pull/28/head
Matthew Butterick 3 years ago
parent 555e2e0193
commit 066ef315e2

@ -18,7 +18,7 @@
(define-for-syntax (rules->token-types rules) (define-for-syntax (rules->token-types rules)
(define-values (implicit-tokens explicit-tokens) (rules-collect-token-types rules)) (define-values (implicit-tokens explicit-tokens) (rules-collect-token-types rules))
(remove-duplicates (append (for/list ([it (in-list implicit-tokens)]) (remove-duplicates (append (for/list ([it (in-list implicit-tokens)])
(string->symbol (syntax-e it))) (string->symbol (syntax-e it)))
(map syntax-e explicit-tokens)) eq?)) (map syntax-e explicit-tokens)) eq?))
(define-syntax (brag-module-begin rules-stx) (define-syntax (brag-module-begin rules-stx)
@ -38,7 +38,7 @@
(with-syntax ([START-ID (first rule-ids)] ; The first rule, by default, is the start rule. (with-syntax ([START-ID (first rule-ids)] ; The first rule, by default, is the start rule.
[((TOKEN-TYPE . TOKEN-TYPE-CONSTRUCTOR) ...) [((TOKEN-TYPE . TOKEN-TYPE-CONSTRUCTOR) ...)
(for/list ([tt (in-list (rules->token-types rules))]) (for/list ([tt (in-list (rules->token-types rules))])
(cons tt (string->symbol (format "token-~a" tt))))] (cons tt (string->symbol (format "token-~a" tt))))]
;; Flatten rules to use the yacc-style ruleset that br-parser-tools supports ;; Flatten rules to use the yacc-style ruleset that br-parser-tools supports
[GENERATED-RULE-CODES (map flat-rule->yacc-rule (flatten-rules rules))] [GENERATED-RULE-CODES (map flat-rule->yacc-rule (flatten-rules rules))]
;; main exports. Break hygiene so they're also available at top-level / repl ;; main exports. Break hygiene so they're also available at top-level / repl
@ -71,44 +71,38 @@
(cons eof token-EOF) (cons eof token-EOF)
(cons 'TOKEN-TYPE TOKEN-TYPE-CONSTRUCTOR) ...))) (cons 'TOKEN-TYPE TOKEN-TYPE-CONSTRUCTOR) ...)))
(define-syntax (MAKE-RULE-PARSER rule-id-stx) (define-syntax (MAKE-RULE-PARSER stx)
(syntax-case rule-id-stx () (syntax-case stx ()
[(_ start-rule) [(_ START-RULE-ID)
(and (identifier? #'start-rule) (and (identifier? #'START-RULE-ID) (member (syntax-e #'START-RULE-ID) 'RULE-IDS))
(member (syntax-e #'start-rule) 'RULE-IDS))
;; The cfg-parser depends on the start-rule provided in (start ...) to have the same ;; The cfg-parser depends on the start-rule provided in (start ...) to have the same
;; context as the rest of this body. Hence RECOLORED-START-RULE ;; context as the rest of this body. Hence RECOLORED-START-RULE
(with-syntax ([RECOLORED-START-RULE (datum->syntax #'RULES-STX (syntax-e #'start-rule))] (with-syntax ([RECOLORED-START-RULE (datum->syntax #'RULES-STX (syntax-e #'START-RULE-ID))])
[PARSE-NAME (datum->syntax #'RULES-STX (string->symbol (format "~a-rule-parser" 'start-rule)))]) #'(let ()
#'(let* ([THE-GRAMMAR (cfg-parser (tokens enumerated-tokens) (define (rule-parser tokenizer)
(src-pos) (define rule-grammar (cfg-parser (tokens enumerated-tokens)
(start RECOLORED-START-RULE) (src-pos)
(end EOF) (start RECOLORED-START-RULE)
(error THE-ERROR-HANDLER) (end EOF)
(grammar . GENERATED-RULE-CODES))] (error the-error-handler)
[THE-BODY (lambda (tokenizer) (grammar . GENERATED-RULE-CODES)))
(define next-token (define next-token (make-permissive-tokenizer tokenizer all-tokens-hash/mutable))
(make-permissive-tokenizer tokenizer all-tokens-hash/mutable)) ;; here's how we support grammar "cuts" on top rule name
;; little post-processor to support cuts on top rule name (define parse-tree-stx (rule-grammar next-token))
(define parse-tree-stx (THE-GRAMMAR next-token)) (syntax-case parse-tree-stx ()
(define top-rule-name-stx (syntax-case parse-tree-stx () [(TOP-RULE-NAME . _)
[(TRN . REST) #'TRN] (if (eq? (syntax-property #'TOP-RULE-NAME 'hide-or-splice?) 'hide)
[_ (error 'malformed-parse-tree)])) (remove-rule-name parse-tree-stx) ; use `remove-rule-name` so we get the same housekeeping
(if (eq? (syntax-property top-rule-name-stx 'hide-or-splice?) 'hide) parse-tree-stx)]
;; use `remove-rule-name` so we get the same housekeeping [_ (error 'malformed-parse-tree)]))
(remove-rule-name parse-tree-stx) (case-lambda [(tokenizer) (rule-parser tokenizer)]
parse-tree-stx))]) [(source tokenizer)
(procedure-rename (parameterize ([current-source source])
(case-lambda [(tokenizer) (rule-parser tokenizer))])))]
(THE-BODY tokenizer)]
[(source tokenizer)
(parameterize ([current-source source])
(THE-BODY tokenizer))])
(string->symbol (format "~a-rule-parser" 'start-rule)))))]
[(_ not-a-rule-id) [(_ not-a-rule-id)
(raise-syntax-error #f (raise-syntax-error #f
(format "Rule ~a is not defined in the grammar" (syntax-e #'not-a-rule-id)) (format "Rule ~a is not defined in the grammar" (syntax-e #'not-a-rule-id))
rule-id-stx)])) stx)]))
;; start-id has to be a value, not an expr, because make-rule-parser is a macro ;; start-id has to be a value, not an expr, because make-rule-parser is a macro
(define PARSE (procedure-rename (MAKE-RULE-PARSER START-ID) 'PARSE)) (define PARSE (procedure-rename (MAKE-RULE-PARSER START-ID) 'PARSE))

@ -7,7 +7,7 @@
brag/private/internal-support) brag/private/internal-support)
(provide THE-ERROR-HANDLER (provide the-error-handler
make-permissive-tokenizer make-permissive-tokenizer
atomic-datum->syntax atomic-datum->syntax
positions->srcloc positions->srcloc
@ -19,7 +19,7 @@
;; The level of indirection here is necessary since the yacc grammar wants a ;; The level of indirection here is necessary since the yacc grammar wants a
;; function value for the error handler up front. We want to delay that decision ;; function value for the error handler up front. We want to delay that decision
;; till parse time. ;; till parse time.
(define (THE-ERROR-HANDLER tok-ok? tok-name tok-value start-pos end-pos) (define (the-error-handler tok-ok? tok-name tok-value start-pos end-pos)
(match (positions->srcloc start-pos end-pos) (match (positions->srcloc start-pos end-pos)
[(list src line col offset span) [(list src line col offset span)
((current-parser-error-handler) tok-name ((current-parser-error-handler) tok-name

@ -1,44 +1,17 @@
#lang racket/base #lang racket/base
(require rackunit (require rackunit
brag/support brag/support
br-parser-tools/lex brag/examples/subrule)
brag/rules/parser
brag/rules/lexer
)
(require "make-rule-parser-grammar.rkt")
(define parse-next
(make-rule-parser next))
(define parse-start
(make-rule-parser start))
(define (lex ip) (define parse-next (make-rule-parser next))
(port-count-lines! ip) (define parse-start (make-rule-parser start))
(lambda ()
(define next-char (read-char ip))
(cond [(eof-object? next-char)
(token eof)]
[(char=? next-char #\0)
(token "0" "0")]
[(char=? next-char #\1)
(token "1" "1")])))
(check-equal? (syntax->datum (parse #f "0")) '(start (next "0")))
(check-equal? (syntax->datum (parse #f "0")) (syntax->datum (parse "0")))
(check-equal? (syntax->datum (parse #f (lex (open-input-string "0")))) (check-equal? (syntax->datum (parse-start #f "0")) '(start (next "0")))
'(start (next "0"))) (check-equal? (syntax->datum (parse-start #f "0")) (syntax->datum (parse-start "0")))
(check-equal? (syntax->datum (parse #f (lex (open-input-string "0"))))
(syntax->datum (parse (lex (open-input-string "0")))))
(check-equal? (syntax->datum (parse-start #f (lex (open-input-string "0")))) (check-equal? (syntax->datum (parse-next #f "0")) '(next "0"))
'(start (next "0"))) (check-equal? (syntax->datum (parse-next #f "0")) (syntax->datum (parse-next "0")))
(check-equal? (syntax->datum (parse-start #f (lex (open-input-string "0"))))
(syntax->datum (parse-start (lex (open-input-string "0")))))
(check-equal? (syntax->datum (parse-next #f (lex (open-input-string "0"))))
'(next "0"))
(check-equal? (syntax->datum (parse-next #f (lex (open-input-string "0"))))
(syntax->datum (parse-next (lex (open-input-string "0")))))
Loading…
Cancel
Save