updates
parent
87009e367b
commit
f25679622a
@ -1,23 +1,13 @@
|
|||||||
#lang brag
|
#lang brag
|
||||||
|
|
||||||
tst-program : load-expr output-file-expr compare-to-expr output-list-expr /";" test-expr*
|
tst-program : tst-load-expr tst-output-file-expr tst-compare-to-expr tst-output-list-expr /";" tst-test-expr*
|
||||||
|
tst-load-expr : /"load" ID /","
|
||||||
load-expr : /"load" ID /","
|
tst-output-file-expr : /"output-file" ID /","
|
||||||
|
tst-compare-to-expr : /"compare-to" ID /","
|
||||||
output-file-expr : /"output-file" ID /","
|
tst-output-list-expr : /"output-list" tst-column [tst-column]+
|
||||||
|
/tst-column : ID FORMAT-STRING
|
||||||
compare-to-expr : /"compare-to" ID /","
|
@tst-test-expr : tst-step-expr+ /";"
|
||||||
|
@tst-step-expr : (tst-set-expr | tst-eval-expr | tst-output-expr) [/","]
|
||||||
output-list-expr : /"output-list" column [column]+
|
tst-set-expr : /"set" ID VAL
|
||||||
|
tst-eval-expr : /"eval"
|
||||||
/column : ID FORMAT-STRING
|
tst-output-expr : /"output"
|
||||||
|
|
||||||
@test-expr : step-expr+ /";"
|
|
||||||
|
|
||||||
@step-expr : (set-expr | eval-expr | output-expr) [/","]
|
|
||||||
|
|
||||||
set-expr : /"set" ID VAL
|
|
||||||
|
|
||||||
eval-expr : /"eval"
|
|
||||||
|
|
||||||
output-expr : /"output"
|
|
@ -1,21 +1,18 @@
|
|||||||
#lang br
|
#lang br/quicklang
|
||||||
(require brag/support
|
(require brag/support)
|
||||||
racket/string)
|
(provide make-tokenizer)
|
||||||
|
|
||||||
(provide tokenize)
|
(define hdl-test-lexer
|
||||||
(define (tokenize input-port)
|
(lexer-srcloc
|
||||||
(define (next-token)
|
[(eof) eof]
|
||||||
(define get-token
|
[(:or (from/to "/*" "*/")
|
||||||
(lexer-src-pos
|
(from/to "//" #\newline)) (token 'COMMENT lexeme #:skip? #t)]
|
||||||
[(eof) eof]
|
[whitespace (token lexeme #:skip? #t)]
|
||||||
[(union
|
[(:or "load" "output-list" "output-file" "compare-to" "set" "eval" "output" "," ";") lexeme]
|
||||||
(:seq "/*" (complement (:seq any-string "*/" any-string)) "*/")
|
[(:seq "%" (:+ alphabetic numeric ".")) (token 'FORMAT-STRING lexeme)]
|
||||||
(:seq "//" (repetition 1 +inf.0 (char-complement #\newline)) #\newline))
|
[(:+ numeric) (token 'VAL (string->number lexeme))]
|
||||||
(token 'COMMENT lexeme #:skip? #t)]
|
[(:+ alphabetic numeric "-" ".") (token 'ID lexeme)]))
|
||||||
[(union #\tab #\space #\newline) (return-without-pos (get-token input-port))]
|
|
||||||
[(union "load" "output-list" "output-file" "compare-to" "set" "eval" "output" (char-set ",;")) lexeme]
|
(define (make-tokenizer ip)
|
||||||
[(:seq "%" (repetition 1 +inf.0 (union alphabetic numeric (char-set ".")))) (token 'FORMAT-STRING lexeme)]
|
(define (next-token) (hdl-test-lexer ip))
|
||||||
[(repetition 1 +inf.0 numeric) (token 'VAL (string->number lexeme))]
|
|
||||||
[(repetition 1 +inf.0 (union alphabetic numeric (char-set "-."))) (token 'ID lexeme)]))
|
|
||||||
(get-token input-port))
|
|
||||||
next-token)
|
next-token)
|
||||||
|
Loading…
Reference in New Issue