update reader & tokenizer

pull/10/head
Matthew Butterick 7 years ago
parent 3347a86c0d
commit 7a7e162988

@ -2,18 +2,23 @@
(require "parser.rkt" "tokenizer.rkt" brag/support rackunit)
(check-equal?
(parse-tree (apply-tokenizer tokenize "// line commment\n"))
(parse-tree
(apply-tokenizer-maker make-tokenizer "// line commment\n"))
'(jsonic-program))
(check-equal?
(parse-tree (apply-tokenizer tokenize "@$ 42 $@"))
(parse-tree
(apply-tokenizer-maker make-tokenizer "@$ 42 $@"))
'(jsonic-program (s-exp " 42 ")))
(check-equal?
(parse-tree (apply-tokenizer tokenize "hi"))
(parse-tree
(apply-tokenizer-maker make-tokenizer "hi"))
'(jsonic-program
(json-char "h")
(json-char "i")))
(check-equal?
(parse-tree (apply-tokenizer tokenize "hi\n// comment\n@$ 42 $@"))
(parse-tree
(apply-tokenizer-maker make-tokenizer
"hi\n// comment\n@$ 42 $@"))
'(jsonic-program
(json-char "h")
(json-char "i")

@ -2,8 +2,8 @@
(require "tokenizer.rkt" "parser.rkt" racket/contract)
(define (read-syntax path port)
(define parse-tree (parse path (tokenize port)))
(define module-datum `(module jsonic-module jsonic-demo-2/expander
(define parse-tree (parse path (make-tokenizer port)))
(define module-datum `(module jsonic-module jsonic-demo/expander
,parse-tree))
(datum->syntax #f module-datum))
(provide (contract-out

@ -13,10 +13,10 @@
(check-true (token? (token 'A-TOKEN-STRUCT "hi")))
(check-false (token? 42)))
(define (tokenize port)
(define (make-tokenizer port)
(port-count-lines! port)
(define (next-token)
(define our-lexer
(define jsonic-lexer
(lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)]
@ -33,22 +33,24 @@
#:column (col lexeme-start)
#:span (- (pos lexeme-end)
(pos lexeme-start)))]))
(our-lexer port))
(jsonic-lexer port))
next-token)
(provide (contract-out
[tokenize (input-port? . -> . (-> token?))]))
[make-tokenizer (input-port? . -> . (-> token?))]))
(module+ test
(check-equal? (apply-tokenizer tokenize "// comment\n") empty)
(check-equal?
(apply-tokenizer tokenize "@$ (+ 6 7) $@")
(apply-tokenizer-maker make-tokenizer "// comment\n")
empty)
(check-equal?
(apply-tokenizer-maker make-tokenizer "@$ (+ 6 7) $@")
(list (token 'SEXP-TOK " (+ 6 7) "
#:position 3
#:line 1
#:column 2
#:span 9)))
(check-equal?
(apply-tokenizer tokenize "hi")
(apply-tokenizer-maker make-tokenizer "hi")
(list (token 'CHAR-TOK "h"
#:position 1
#:line 1

@ -2,18 +2,23 @@
(require "parser.rkt" "tokenizer.rkt" brag/support rackunit)
(check-equal?
(parse-tree (apply-tokenizer tokenize "// line commment\n"))
(parse-tree
(apply-tokenizer-maker make-tokenizer "// line commment\n"))
'(jsonic-program))
(check-equal?
(parse-tree (apply-tokenizer tokenize "@$ 42 $@"))
(parse-tree
(apply-tokenizer-maker make-tokenizer "@$ 42 $@"))
'(jsonic-program (s-exp " 42 ")))
(check-equal?
(parse-tree (apply-tokenizer tokenize "hi"))
(parse-tree
(apply-tokenizer-maker make-tokenizer "hi"))
'(jsonic-program
(json-char "h")
(json-char "i")))
(check-equal?
(parse-tree (apply-tokenizer tokenize "hi\n// comment\n@$ 42 $@"))
(parse-tree
(apply-tokenizer-maker make-tokenizer
"hi\n// comment\n@$ 42 $@"))
'(jsonic-program
(json-char "h")
(json-char "i")

@ -2,8 +2,8 @@
(require "tokenizer.rkt" "parser.rkt" racket/contract)
(define (read-syntax path port)
(define parse-tree (parse path (tokenize port)))
(define module-datum `(module jsonic-module jsonic-demo-3/expander
(define parse-tree (parse path (make-tokenizer port)))
(define module-datum `(module jsonic-module jsonic-demo/expander
,parse-tree))
(datum->syntax #f module-datum))
(provide (contract-out

@ -13,10 +13,10 @@
(check-true (token? (token 'A-TOKEN-STRUCT "hi")))
(check-false (token? 42)))
(define (tokenize port)
(define (make-tokenizer port)
(port-count-lines! port)
(define (next-token)
(define our-lexer
(define jsonic-lexer
(lexer
[(eof) eof]
[(from/to "//" "\n") (next-token)]
@ -33,22 +33,24 @@
#:column (col lexeme-start)
#:span (- (pos lexeme-end)
(pos lexeme-start)))]))
(our-lexer port))
(jsonic-lexer port))
next-token)
(provide (contract-out
[tokenize (input-port? . -> . (-> token?))]))
[make-tokenizer (input-port? . -> . (-> token?))]))
(module+ test
(check-equal? (apply-tokenizer tokenize "// comment\n") empty)
(check-equal?
(apply-tokenizer tokenize "@$ (+ 6 7) $@")
(apply-tokenizer-maker make-tokenizer "// comment\n")
empty)
(check-equal?
(apply-tokenizer-maker make-tokenizer "@$ (+ 6 7) $@")
(list (token 'SEXP-TOK " (+ 6 7) "
#:position 3
#:line 1
#:column 2
#:span 9)))
(check-equal?
(apply-tokenizer tokenize "hi")
(apply-tokenizer-maker make-tokenizer "hi")
(list (token 'CHAR-TOK "h"
#:position 1
#:line 1

Loading…
Cancel
Save