delete unneeded `eof` rules

v6.3-exception
Matthew Butterick 6 years ago
parent 32f915a844
commit f5b300f6d1

@ -7,7 +7,6 @@
(define basic-lexer (define basic-lexer
(lexer-srcloc (lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)] ["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)] [whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)] [(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -5,7 +5,6 @@
(define basic-lexer (define basic-lexer
(lexer-srcloc (lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)] ["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)] [whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)] [(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -10,7 +10,6 @@
(define basic-lexer (define basic-lexer
(lexer-srcloc (lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)] ["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)] [whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)] [(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -10,7 +10,6 @@
(define basic-lexer (define basic-lexer
(lexer-src-pos (lexer-src-pos
[(eof) eof]
[whitespace (token 'WHITE lexeme #:skip? #t [whitespace (token 'WHITE lexeme #:skip? #t
#:position (pos lexeme-start) #:position (pos lexeme-start)
#:line (line lexeme-start) #:line (line lexeme-start)

@ -5,7 +5,6 @@
(define basic-lexer (define basic-lexer
(lexer-srcloc (lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)] ["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)] [whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)] [(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -13,7 +13,6 @@
(define (next-token) (define (next-token)
(define bf-lexer (define bf-lexer
(lexer (lexer
[(eof) eof]
[(char-set "><-.,+[]") lexeme] [(char-set "><-.,+[]") lexeme]
[any-char (next-token)])) [any-char (next-token)]))
(bf-lexer port)) (bf-lexer port))

@ -10,8 +10,7 @@
#;[(:: "#" (:* (complement "\n")) "\n") (token 'comment #:skip? #t)] #;[(:: "#" (:* (complement "\n")) "\n") (token 'comment #:skip? #t)]
[whitespace (token 'white #:skip? #t)] [whitespace (token 'white #:skip? #t)]
;; treat other characters as comments ;; treat other characters as comments
[(char-range #\nul #\~) (token 'ascii #:skip? #t)] [(char-range #\nul #\~) (token 'ascii #:skip? #t)]))
[(eof) eof]))
(define (next-token) (get-token ip)) (define (next-token) (get-token ip))

@ -7,7 +7,6 @@
(define (next-token) (define (next-token)
(define get-token (define get-token
(lexer-src-pos (lexer-src-pos
[(eof) eof]
[(union [(union
(:seq "/*" (complement (:seq any-string "*/" any-string)) "*/") (:seq "/*" (complement (:seq any-string "*/" any-string)) "*/")
(:seq "//" (repetition 1 +inf.0 (char-complement #\newline)) #\newline)) (:seq "//" (repetition 1 +inf.0 (char-complement #\newline)) #\newline))

@ -4,7 +4,6 @@
(define hdl-test-lexer (define hdl-test-lexer
(lexer-srcloc (lexer-srcloc
[(eof) eof]
[(:or (from/to "/*" "*/") [(:or (from/to "/*" "*/")
(from/to "//" #\newline)) (token 'COMMENT lexeme #:skip? #t)] (from/to "//" #\newline)) (token 'COMMENT lexeme #:skip? #t)]
[whitespace (token lexeme #:skip? #t)] [whitespace (token lexeme #:skip? #t)]

@ -18,7 +18,6 @@
(define (next-token) (define (next-token)
(define jsonic-lexer (define jsonic-lexer
(lexer (lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)] [(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@") [(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@") (token 'SEXP-TOK (trim-ends "@$" lexeme "$@")

@ -18,7 +18,6 @@
(define (next-token) (define (next-token)
(define jsonic-lexer (define jsonic-lexer
(lexer (lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)] [(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@") [(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@") (token 'SEXP-TOK (trim-ends "@$" lexeme "$@")

@ -5,7 +5,6 @@
(define (next-token) (define (next-token)
(define jsonic-lexer (define jsonic-lexer
(lexer (lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)] [(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@") [(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))] (token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]

@ -7,7 +7,6 @@
(define (next-token) (define (next-token)
(define get-token (define get-token
(lexer (lexer
[(eof) eof]
[(union [(union
(:seq "/*" (complement (:seq any-string "*/" any-string)) "*/") (:seq "/*" (complement (:seq any-string "*/" any-string)) "*/")
(:seq "//" (repetition 1 +inf.0 (char-complement #\newline)) #\newline)) (:seq "//" (repetition 1 +inf.0 (char-complement #\newline)) #\newline))

Loading…
Cancel
Save