delete unneeded `eof` rules

v6.3-exception
Matthew Butterick 7 years ago
parent 32f915a844
commit f5b300f6d1

@ -7,7 +7,6 @@
(define basic-lexer
(lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -5,7 +5,6 @@
(define basic-lexer
(lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -10,7 +10,6 @@
(define basic-lexer
(lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -10,7 +10,6 @@
(define basic-lexer
(lexer-src-pos
[(eof) eof]
[whitespace (token 'WHITE lexeme #:skip? #t
#:position (pos lexeme-start)
#:line (line lexeme-start)

@ -5,7 +5,6 @@
(define basic-lexer
(lexer-srcloc
[(eof) (return-without-srcloc eof)]
["\n" (token 'NEWLINE lexeme)]
[whitespace (token lexeme #:skip? #t)]
[(from/stop-before "rem" "\n") (token 'REM lexeme)]

@ -13,7 +13,6 @@
(define (next-token)
(define bf-lexer
(lexer
[(eof) eof]
[(char-set "><-.,+[]") lexeme]
[any-char (next-token)]))
(bf-lexer port))

@ -10,8 +10,7 @@
#;[(:: "#" (:* (complement "\n")) "\n") (token 'comment #:skip? #t)]
[whitespace (token 'white #:skip? #t)]
;; treat other characters as comments
[(char-range #\nul #\~) (token 'ascii #:skip? #t)]
[(eof) eof]))
[(char-range #\nul #\~) (token 'ascii #:skip? #t)]))
(define (next-token) (get-token ip))

@ -7,7 +7,6 @@
(define (next-token)
(define get-token
(lexer-src-pos
[(eof) eof]
[(union
(:seq "/*" (complement (:seq any-string "*/" any-string)) "*/")
(:seq "//" (repetition 1 +inf.0 (char-complement #\newline)) #\newline))

@ -4,7 +4,6 @@
(define hdl-test-lexer
(lexer-srcloc
[(eof) eof]
[(:or (from/to "/*" "*/")
(from/to "//" #\newline)) (token 'COMMENT lexeme #:skip? #t)]
[whitespace (token lexeme #:skip? #t)]

@ -18,7 +18,6 @@
(define (next-token)
(define jsonic-lexer
(lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@")

@ -18,7 +18,6 @@
(define (next-token)
(define jsonic-lexer
(lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@")

@ -5,7 +5,6 @@
(define (next-token)
(define jsonic-lexer
(lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)]
[(from/to "@$" "$@")
(token 'SEXP-TOK (trim-ends "@$" lexeme "$@"))]

@ -7,7 +7,6 @@
(define (next-token)
(define get-token
(lexer
[(eof) eof]
[(union
(:seq "/*" (complement (:seq any-string "*/" any-string)) "*/")
(:seq "//" (repetition 1 +inf.0 (char-complement #\newline)) #\newline))

Loading…
Cancel
Save