Correct location calculations for non-terminals.

The prior code constructed the location of nonterminal maches out of
the the state of the stream after parsing.  This isn't right for a few
reasons:

    1.  It doesn't get starting location correctly.

    2.  It doesn't behave when the non-terminal production did not
        actually consume tokens for its parse.

This patch modifies the parsers to also pass along a
"last-consumed-token"; it, along with a few other changes, provides
the parsers enough information to accurately construct the locations,
even when no tokens have been consumed during the parse.  We
synthesize a sentinel last-consumed-token token to take location from
the head of the stream.

original commit: 6e21e34ec7c7a3e9cf23a3f24bfafd6155e1f14a
tokens
Danny Yoo 12 years ago
parent 33ebcd7a78
commit 2fd35df83a

@ -27,6 +27,8 @@
;; grammar to tokens specific to this parser. In other words, this ;; grammar to tokens specific to this parser. In other words, this
;; parser uses `parser' so that it doesn't have to know anything about ;; parser uses `parser' so that it doesn't have to know anything about
;; tokens. ;; tokens.
;;
(require parser-tools/yacc (require parser-tools/yacc
@ -84,19 +86,19 @@
;; then after parse-a succeeds once, we parallelize parse-b ;; then after parse-a succeeds once, we parallelize parse-b
;; and trying a second result for parse-a. ;; and trying a second result for parse-a.
(define (parse-and simple-a? parse-a parse-b (define (parse-and simple-a? parse-a parse-b
stream depth end success-k fail-k stream last-consumed-token depth end success-k fail-k
max-depth tasks) max-depth tasks)
(letrec ([mk-got-k (letrec ([mk-got-k
(lambda (success-k fail-k) (lambda (success-k fail-k)
(lambda (val stream depth max-depth tasks next1-k) (lambda (val stream last-consumed-token depth max-depth tasks next1-k)
(if simple-a? (if simple-a?
(parse-b val stream depth end (parse-b val stream last-consumed-token depth end
(mk-got2-k success-k fail-k next1-k) (mk-got2-k success-k fail-k next1-k)
(mk-fail2-k success-k fail-k next1-k) (mk-fail2-k success-k fail-k next1-k)
max-depth tasks) max-depth tasks)
(parallel-or (parallel-or
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(parse-b val stream depth end (parse-b val stream last-consumed-token depth end
success-k fail-k success-k fail-k
max-depth tasks)) max-depth tasks))
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
@ -105,8 +107,8 @@
success-k fail-k max-depth tasks))))] success-k fail-k max-depth tasks))))]
[mk-got2-k [mk-got2-k
(lambda (success-k fail-k next1-k) (lambda (success-k fail-k next1-k)
(lambda (val stream depth max-depth tasks next-k) (lambda (val stream last-consumed-token depth max-depth tasks next-k)
(success-k val stream depth max-depth tasks (success-k val stream last-consumed-token depth max-depth tasks
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(next-k (mk-got2-k success-k fail-k next1-k) (next-k (mk-got2-k success-k fail-k next1-k)
(mk-fail2-k success-k fail-k next1-k) (mk-fail2-k success-k fail-k next1-k)
@ -118,28 +120,28 @@
fail-k fail-k
max-depth max-depth
tasks)))]) tasks)))])
(parse-a stream depth end (parse-a stream last-consumed-token depth end
(mk-got-k success-k fail-k) (mk-got-k success-k fail-k)
fail-k fail-k
max-depth tasks))) max-depth tasks)))
;; Parallel or for non-terminal alternatives ;; Parallel or for non-terminal alternatives
(define (parse-parallel-or parse-a parse-b stream depth end success-k fail-k max-depth tasks) (define (parse-parallel-or parse-a parse-b stream last-consumed-token depth end success-k fail-k max-depth tasks)
(parallel-or (lambda (success-k fail-k max-depth tasks) (parallel-or (lambda (success-k fail-k max-depth tasks)
(parse-a stream depth end success-k fail-k max-depth tasks)) (parse-a stream last-consumed-token depth end success-k fail-k max-depth tasks))
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(parse-b stream depth end success-k fail-k max-depth tasks)) (parse-b stream last-consumed-token depth end success-k fail-k max-depth tasks))
success-k fail-k max-depth tasks)) success-k fail-k max-depth tasks))
;; Generic parallel-or ;; Generic parallel-or
(define (parallel-or parse-a parse-b success-k fail-k max-depth tasks) (define (parallel-or parse-a parse-b success-k fail-k max-depth tasks)
(define answer-key (gensym)) (define answer-key (gensym))
(letrec ([gota-k (letrec ([gota-k
(lambda (val stream depth max-depth tasks next-k) (lambda (val stream last-consumed-token depth max-depth tasks next-k)
(report-answer answer-key (report-answer answer-key
max-depth max-depth
tasks tasks
(list val stream depth next-k)))] (list val stream last-consumed-token depth next-k)))]
[faila-k [faila-k
(lambda (max-depth tasks) (lambda (max-depth tasks)
(report-answer answer-key (report-answer answer-key
@ -166,11 +168,11 @@
max-depth tasks))))]) max-depth tasks))))])
(letrec ([mk-got-one (letrec ([mk-got-one
(lambda (immediate-next? get-nth success-k) (lambda (immediate-next? get-nth success-k)
(lambda (val stream depth max-depth tasks next-k) (lambda (val stream last-consumed-token depth max-depth tasks next-k)
(let ([tasks (if immediate-next? (let ([tasks (if immediate-next?
(queue-next next-k tasks) (queue-next next-k tasks)
tasks)]) tasks)])
(success-k val stream depth max-depth (success-k val stream last-consumed-token depth max-depth
tasks tasks
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(let ([tasks (if immediate-next? (let ([tasks (if immediate-next?
@ -194,11 +196,11 @@
;; Non-terminal alternatives where the first is "simple" can be done ;; Non-terminal alternatives where the first is "simple" can be done
;; sequentially, which is simpler ;; sequentially, which is simpler
(define (parse-or parse-a parse-b (define (parse-or parse-a parse-b
stream depth end success-k fail-k max-depth tasks) stream last-consumed-token depth end success-k fail-k max-depth tasks)
(letrec ([mk-got-k (letrec ([mk-got-k
(lambda (success-k fail-k) (lambda (success-k fail-k)
(lambda (val stream depth max-depth tasks next-k) (lambda (val stream last-consumed-token depth max-depth tasks next-k)
(success-k val stream depth (success-k val stream last-consumed-token depth
max-depth tasks max-depth tasks
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(next-k (mk-got-k success-k fail-k) (next-k (mk-got-k success-k fail-k)
@ -207,8 +209,8 @@
[mk-fail-k [mk-fail-k
(lambda (success-k fail-k) (lambda (success-k fail-k)
(lambda (max-depth tasks) (lambda (max-depth tasks)
(parse-b stream depth end success-k fail-k max-depth tasks)))]) (parse-b stream last-consumed-token depth end success-k fail-k max-depth tasks)))])
(parse-a stream depth end (parse-a stream last-consumed-token depth end
(mk-got-k success-k fail-k) (mk-got-k success-k fail-k)
(mk-fail-k success-k fail-k) (mk-fail-k success-k fail-k)
max-depth tasks))) max-depth tasks)))
@ -265,13 +267,13 @@
(if val (if val
(if (null? val) (if (null? val)
(fail-k max-depth tasks) (fail-k max-depth tasks)
(let-values ([(val stream depth next-k) (apply values val)]) (let-values ([(val stream last-consumed-token depth next-k) (apply values val)])
(success-k val stream depth max-depth tasks next-k))) (success-k val stream last-consumed-token depth max-depth tasks next-k)))
(deadlock-k max-depth tasks))))]) (deadlock-k max-depth tasks))))])
(if multi? (if multi?
(hash-set! (tasks-multi-waits tasks) answer-key (hash-set! (tasks-multi-waits tasks) answer-key
(cons wait (hash-ref (tasks-multi-waits tasks) answer-key (cons wait (hash-ref (tasks-multi-waits tasks) answer-key
(lambda () null)))) (lambda () null))))
(hash-set! (tasks-waits tasks) answer-key wait)) (hash-set! (tasks-waits tasks) answer-key wait))
(let ([tasks (make-tasks (tasks-active tasks) (let ([tasks (make-tasks (tasks-active tasks)
(tasks-active-back tasks) (tasks-active-back tasks)
@ -300,8 +302,8 @@
(make-tasks (apply (make-tasks (apply
append append
(hash-map (tasks-multi-waits tasks) (hash-map (tasks-multi-waits tasks)
(lambda (k l) (lambda (k l)
(map (lambda (v) (v #f)) l)))) (map (lambda (v) (v #f)) l))))
(tasks-active-back tasks) (tasks-active-back tasks)
(tasks-waits tasks) (tasks-waits tasks)
(make-hasheq) (make-hasheq)
@ -334,15 +336,15 @@
(let loop ([pat pat] (let loop ([pat pat]
[pos 1]) [pos 1])
(if (null? pat) (if (null? pat)
#`(success-k #,handle stream depth max-depth tasks #`(success-k #,handle stream last-consumed-token depth max-depth tasks
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(fail-k max-depth tasks))) (fail-k max-depth tasks)))
(let ([id (datum->syntax (car pat) (let ([id (datum->syntax (car pat)
(string->symbol (format "$~a" pos)))] (string->symbol (format "$~a" pos)))]
[id-start-pos (datum->syntax (car pat) [id-start-pos (datum->syntax (car pat)
(string->symbol (format "$~a-start-pos" pos)))] (string->symbol (format "$~a-start-pos" pos)))]
[id-end-pos (datum->syntax (car pat) [id-end-pos (datum->syntax (car pat)
(string->symbol (format "$~a-end-pos" pos)))] (string->symbol (format "$~a-end-pos" pos)))]
[n-end-pos (and (null? (cdr pat)) [n-end-pos (and (null? (cdr pat))
(datum->syntax (car pat) '$n-end-pos))]) (datum->syntax (car pat) '$n-end-pos))])
(cond (cond
@ -354,14 +356,21 @@
(or (not l) (or (not l)
(andmap values (caddr l)))) (andmap values (caddr l))))
#,(car pat) #,(car pat)
(lambda (#,id stream depth end success-k fail-k max-depth tasks) (let ([original-stream stream])
(let-syntax ([#,id-start-pos (at-tok-pos #'tok-start #'(and (pair? stream) (car stream)))] (lambda (#,id stream last-consumed-token depth end success-k fail-k max-depth tasks)
[#,id-end-pos (at-tok-pos #'tok-end #'(and (pair? stream) (car stream)))] (let-syntax ([#,id-start-pos (at-tok-pos #'(if (eq? original-stream stream)
#,@(if n-end-pos tok-end
#`([#,n-end-pos (at-tok-pos #'tok-end #'(and (pair? stream) (car stream)))]) tok-start)
null)) #'(if (eq? original-stream stream)
#,(loop (cdr pat) (add1 pos)))) last-consumed-token
stream depth (and (pair? original-stream)
(car original-stream))))]
[#,id-end-pos (at-tok-pos #'tok-end #'last-consumed-token)]
#,@(if n-end-pos
#`([#,n-end-pos (at-tok-pos #'tok-end #'last-consumed-token)])
null))
#,(loop (cdr pat) (add1 pos)))))
stream last-consumed-token depth
#,(let ([cnt (apply + #,(let ([cnt (apply +
(map (lambda (item) (map (lambda (item)
(cond (cond
@ -378,6 +387,7 @@
(eq? '#,tok-id (tok-name (car stream)))) (eq? '#,tok-id (tok-name (car stream))))
(let* ([stream-a (car stream)] (let* ([stream-a (car stream)]
[#,id (tok-val stream-a)] [#,id (tok-val stream-a)]
[last-consumed-token (car stream)]
[stream (cdr stream)] [stream (cdr stream)]
[depth (add1 depth)]) [depth (add1 depth)])
(let ([max-depth (max max-depth depth)]) (let ([max-depth (max max-depth depth)])
@ -396,7 +406,7 @@
;; The cache maps nontermial+startingpos+iteration to a result, where ;; The cache maps nontermial+startingpos+iteration to a result, where
;; the iteration is 0 for the first match attempt, 1 for the second, ;; the iteration is 0 for the first match attempt, 1 for the second,
;; etc. ;; etc.
(define (parse-nt/share key min-cnt init-tokens stream depth end max-depth tasks success-k fail-k k) (define (parse-nt/share key min-cnt init-tokens stream last-consumed-token depth end max-depth tasks success-k fail-k k)
(if (and (positive? min-cnt) (if (and (positive? min-cnt)
(pair? stream) (pair? stream)
(not (memq (tok-name (car stream)) init-tokens))) (not (memq (tok-name (car stream)) init-tokens)))
@ -422,16 +432,16 @@
[else [else
#;(printf "Try ~a ~a\n" table-key (map tok-name stream)) #;(printf "Try ~a ~a\n" table-key (map tok-name stream))
(hash-set! (tasks-cache tasks) table-key (hash-set! (tasks-cache tasks) table-key
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
#;(printf "Wait ~a ~a\n" table-key answer-key) #;(printf "Wait ~a ~a\n" table-key answer-key)
(wait-for-answer #t max-depth tasks answer-key success-k fail-k (wait-for-answer #t max-depth tasks answer-key success-k fail-k
(lambda (max-depth tasks) (lambda (max-depth tasks)
#;(printf "Deadlock ~a ~a\n" table-key answer-key) #;(printf "Deadlock ~a ~a\n" table-key answer-key)
(fail-k max-depth tasks))))) (fail-k max-depth tasks)))))
(let result-loop ([max-depth max-depth][tasks tasks][k k]) (let result-loop ([max-depth max-depth][tasks tasks][k k])
(letrec ([orig-stream stream] (letrec ([orig-stream stream]
[new-got-k [new-got-k
(lambda (val stream depth max-depth tasks next-k) (lambda (val stream last-consumed-token depth max-depth tasks next-k)
;; Check whether we already have a result that consumed the same amount: ;; Check whether we already have a result that consumed the same amount:
(let ([result-key (vector #f key old-depth depth)]) (let ([result-key (vector #f key old-depth depth)])
(cond (cond
@ -457,20 +467,20 @@
(next-k success-k fail-k max-depth tasks))))]) (next-k success-k fail-k max-depth tasks))))])
(hash-set! (tasks-cache tasks) result-key #t) (hash-set! (tasks-cache tasks) result-key #t)
(hash-set! (tasks-cache tasks) table-key (hash-set! (tasks-cache tasks) table-key
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(success-k val stream depth max-depth tasks next-k))) (success-k val stream last-consumed-token depth max-depth tasks next-k)))
(report-answer-all answer-key (report-answer-all answer-key
max-depth max-depth
tasks tasks
(list val stream depth next-k) (list val stream last-consumed-token depth next-k)
(lambda (max-depth tasks) (lambda (max-depth tasks)
(success-k val stream depth max-depth tasks next-k))))])))] (success-k val stream last-consumed-token depth max-depth tasks next-k))))])))]
[new-fail-k [new-fail-k
(lambda (max-depth tasks) (lambda (max-depth tasks)
#;(printf "Failure ~a\n" table-key) #;(printf "Failure ~a\n" table-key)
(hash-set! (tasks-cache tasks) table-key (hash-set! (tasks-cache tasks) table-key
(lambda (success-k fail-k max-depth tasks) (lambda (success-k fail-k max-depth tasks)
(fail-k max-depth tasks))) (fail-k max-depth tasks)))
(report-answer-all answer-key (report-answer-all answer-key
max-depth max-depth
tasks tasks
@ -483,7 +493,7 @@
(syntax-case stx () (syntax-case stx ()
[(_ clause ...) [(_ clause ...)
(let ([clauses (syntax->list #'(clause ...))]) (let ([clauses (syntax->list #'(clause ...))])
(let-values ([(start grammar cfg-error parser-clauses) (let-values ([(start grammar cfg-error parser-clauses src-pos?)
(let ([all-toks (apply (let ([all-toks (apply
append append
(map (lambda (clause) (map (lambda (clause)
@ -524,7 +534,8 @@
(values cfg-start (values cfg-start
cfg-grammar cfg-grammar
cfg-error cfg-error
(reverse parser-clauses)) (reverse parser-clauses)
src-pos?)
(syntax-case (car clauses) (start error grammar src-pos) (syntax-case (car clauses) (start error grammar src-pos)
[(start tok) [(start tok)
(loop (cdr clauses) #'tok cfg-grammar cfg-error src-pos? parser-clauses)] (loop (cdr clauses) #'tok cfg-grammar cfg-error src-pos? parser-clauses)]
@ -647,9 +658,9 @@
(define info (bound-identifier-mapping-get nts nt)) (define info (bound-identifier-mapping-get nts nt))
(list nt (list nt
#`(let ([key (gensym '#,nt)]) #`(let ([key (gensym '#,nt)])
(lambda (stream depth end success-k fail-k max-depth tasks) (lambda (stream last-consumed-token depth end success-k fail-k max-depth tasks)
(parse-nt/share (parse-nt/share
key #,(car info) '#,(cadr info) stream depth end key #,(car info) '#,(cadr info) stream last-consumed-token depth end
max-depth tasks max-depth tasks
success-k fail-k success-k fail-k
(lambda (end max-depth tasks success-k fail-k) (lambda (end max-depth tasks success-k fail-k)
@ -663,18 +674,18 @@
(car simple?s)) (car simple?s))
#'parse-or #'parse-or
#'parse-parallel-or) #'parse-parallel-or)
(lambda (stream depth end success-k fail-k max-depth tasks) (lambda (stream last-consumed-token depth end success-k fail-k max-depth tasks)
#,(build-match nts #,(build-match nts
toks toks
(car pats) (car pats)
(car handles) (car handles)
(car $ctxs))) (car $ctxs)))
(lambda (stream depth end success-k fail-k max-depth tasks) (lambda (stream last-consumed-token depth end success-k fail-k max-depth tasks)
#,(loop (cdr pats) #,(loop (cdr pats)
(cdr handles) (cdr handles)
(cdr $ctxs) (cdr $ctxs)
(cdr simple?s))) (cdr simple?s)))
stream depth end success-k fail-k max-depth tasks))))))))) stream last-consumed-token depth end success-k fail-k max-depth tasks)))))))))
nt-ids nt-ids
patss patss
(syntax->list #'(((begin handle0 handle ...) ...) ...)) (syntax->list #'(((begin handle0 handle ...) ...) ...))
@ -728,7 +739,7 @@
(lambda (get-tok) (lambda (get-tok)
(let ([tok-list (orig-parse get-tok)]) (let ([tok-list (orig-parse get-tok)])
(letrec ([success-k (letrec ([success-k
(lambda (val stream depth max-depth tasks next) (lambda (val stream last-consumed-token depth max-depth tasks next)
(if (null? stream) (if (null? stream)
val val
(next success-k fail-k max-depth tasks)))] (next success-k fail-k max-depth tasks)))]
@ -746,18 +757,87 @@
'cfg-parse 'cfg-parse
"failed at ~a" "failed at ~a"
(tok-val bad-tok)))))]) (tok-val bad-tok)))))])
(#,start tok-list 0 (#,start tok-list
;; we simulate a token at the very beginning with zero width
;; for use with the position-generating code (*-start-pos, *-end-pos).
(if (null? tok-list)
(tok #f #f #f
(position 1
#,(if src-pos? #'1 #'#f)
#,(if src-pos? #'0 #'#f))
(position 1
#,(if src-pos? #'1 #'#f)
#,(if src-pos? #'0 #'#f)))
(tok (tok-name (car tok-list))
(tok-orig-name (car tok-list))
(tok-val (car tok-list))
(tok-start (car tok-list))
(tok-start (car tok-list))))
0
(length tok-list) (length tok-list)
success-k success-k
fail-k fail-k
0 (make-tasks null null 0
(make-hasheq) (make-hasheq) (make-tasks null null
(make-hash) #t)))))))))])) (make-hasheq) (make-hasheq)
(make-hash) #t)))))))))]))
(module* test racket/base (module* test racket/base
(require (submod "..") (require (submod "..")
parser-tools/lex) parser-tools/lex
racket/block
rackunit)
;; Test: parsing regular expressions.
;; Here is a test case on locations:
(block
(define-tokens regexp-tokens (ANCHOR STAR OR LIT LPAREN RPAREN EOF))
(define lex (lexer-src-pos ["|" (token-OR lexeme)]
["^" (token-ANCHOR lexeme)]
["*" (token-STAR lexeme)]
[(repetition 1 +inf.0 alphabetic) (token-LIT lexeme)]
["(" (token-LPAREN lexeme)]
[")" (token-RPAREN lexeme)]
[whitespace (return-without-pos (lex input-port))]
[(eof) (token-EOF 'eof)]))
(define -parse (cfg-parser
(tokens regexp-tokens)
(start top)
(end EOF)
(src-pos)
(grammar [top [(maybe-anchor regexp)
(cond [$1
`(anchored ,$2 ,(pos->sexp $1-start-pos) ,(pos->sexp $2-end-pos))]
[else
`(unanchored ,$2 ,(pos->sexp $1-start-pos) ,(pos->sexp $2-end-pos))])]]
[maybe-anchor [(ANCHOR) #t]
[() #f]]
[regexp [(regexp STAR) `(star ,$1 ,(pos->sexp $1-start-pos) ,(pos->sexp $2-end-pos))]
[(regexp OR regexp) `(or ,$1 ,$3 ,(pos->sexp $1-start-pos) ,(pos->sexp $3-end-pos))]
[(LPAREN regexp RPAREN) `(group ,$2 ,(pos->sexp $1-start-pos) ,(pos->sexp $3-end-pos))]
[(LIT) `(lit ,$1 ,(pos->sexp $1-start-pos) ,(pos->sexp $1-end-pos))]])))
(define (pos->sexp pos)
(position-offset pos))
(define (parse s)
(define ip (open-input-string s))
(port-count-lines! ip)
(-parse (lambda () (lex ip))))
(check-equal? (parse "abc")
'(unanchored (lit "abc" 1 4) 1 4))
(check-equal? (parse "a | (b*) | c")
'(unanchored (or (or (lit "a" 1 2)
(group (star (lit "b" 6 7) 6 8) 5 9)
1 9)
(lit "c" 12 13)
1 13)
1 13)))
;; Tests used during development ;; Tests used during development
(define-tokens non-terminals (PLUS MINUS STAR BAR COLON EOF)) (define-tokens non-terminals (PLUS MINUS STAR BAR COLON EOF))
@ -772,7 +852,6 @@
[whitespace (lex input-port)] [whitespace (lex input-port)]
[(eof) (token-EOF 'eof)])) [(eof) (token-EOF 'eof)]))
(define parse (define parse
(cfg-parser (cfg-parser
(tokens non-terminals) (tokens non-terminals)
@ -792,14 +871,39 @@
[(<random> PLUS) (add1 $1)] [(<random> PLUS) (add1 $1)]
[(<random> PLUS) (add1 $1)]]))) [(<random> PLUS) (add1 $1)]])))
(define (result) (let ([p (open-input-string #;"+*|-|-*|+**" #;"-|+*|+**"
(let ([p (open-input-string #;"+*|-|-*|+**" #;"-|+*|+**" #;"+*|+**|-" #;"-|-*|-|-*"
#;"+*|+**|-" #;"-|-*|-|-*" #;"-|-*|-|-**|-|-*|-|-**"
#;"-|-*|-|-**|-|-*|-|-**" "-|-*|-|-**|-|-*|-|-***|-|-*|-|-**|-|-*|-|-****|-|-*|-|-**|-|-*|-|-***
"-|-*|-|-**|-|-*|-|-***|-|-*|-|-**|-|-*|-|-****|-|-*|-|-**|-|-*|-|-***
|-|-*|-|-**|-|-*|-|-*****|-|-*|-|-**|-|-*|-|-***|-|-*|-|-**|-|-*|-|-****| |-|-*|-|-**|-|-*|-|-*****|-|-*|-|-**|-|-*|-|-***|-|-*|-|-**|-|-*|-|-****|
-|-*|-|-**|-|-*|-|-***|-|-*|-|-**|-|-*|-|-*****" -|-*|-|-**|-|-*|-|-***|-|-*|-|-**|-|-*|-|-*****"
;; This one fails: ;; This one fails:
#;"+*")]) #;"+*")])
(time (parse (lambda () (lex p)))))) (check-equal? (parse (lambda () (lex p)))
(result)) '((((((((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *)
||
(((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *))
.
*)
||
(((((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *)
||
(((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *))
.
*))
.
*)
||
(((((((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *)
||
(((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *))
.
*)
||
(((((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *)
||
(((((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *) || (((("minus" || "minus") . *) || (("minus" || "minus") . *)) . *)) . *))
.
*))
.
*)))))

Loading…
Cancel
Save