diff --git a/beautiful-racket/br/demo/jsonic/expander.rkt b/beautiful-racket/br/demo/jsonic/expander.rkt new file mode 100644 index 0000000..b93d58c --- /dev/null +++ b/beautiful-racket/br/demo/jsonic/expander.rkt @@ -0,0 +1,33 @@ +#lang br/quicklang +(require json (for-syntax br/datum racket/string)) +(provide (rename-out [jsonic-mb #%module-begin]) + jsonic-program + char + s-val) + +(define-macro (jsonic-mb PARSE-TREE) + #'(#%module-begin + (define json-string (string-trim PARSE-TREE)) + (when (string->jsexpr json-string) + (display json-string)))) + +(define-macro (jsonic-program STR ...) + #'(string-append STR ...)) + +(define-macro (char STR) + #'STR) + +(define (stringify result) + (cond + [(number? result) (number->string result)] + [(string? result) (format "~v" result)] + [(list? result) (format "[~a]" (string-join (map stringify result) ", "))] + [(hash? result) (format "{~a}" (string-join (for/list ([(k v) (in-hash result)]) + (format "~a: ~a" (stringify k) (stringify v))) ", "))] + [else ""])) + +(define-macro (s-val STR ...) + (define s-exp-string + (string-join (map syntax->datum (syntax->list #'(STR ...))) "")) + (with-pattern ([DATUM (format-datum '~a s-exp-string)]) + #'(stringify DATUM))) diff --git a/beautiful-racket/br/demo/jsonic/reader.rkt b/beautiful-racket/br/demo/jsonic/reader.rkt index a6458c5..60f1361 100644 --- a/beautiful-racket/br/demo/jsonic/reader.rkt +++ b/beautiful-racket/br/demo/jsonic/reader.rkt @@ -2,7 +2,7 @@ (require "tokenizer.rkt" "parser.rkt") (define (read-syntax path port) (define parse-tree (parse path (tokenize port))) - (define module-datum `(module jsonic-module "expander.rkt" + (define module-datum `(module jsonic-module br/demo/jsonic/expander ,parse-tree)) (datum->syntax #f module-datum)) (provide read-syntax) \ No newline at end of file diff --git a/beautiful-racket/br/demo/jsonic/tokenizer.rkt b/beautiful-racket/br/demo/jsonic/tokenizer.rkt index 8d7a893..e36b4b1 100644 --- a/beautiful-racket/br/demo/jsonic/tokenizer.rkt +++ b/beautiful-racket/br/demo/jsonic/tokenizer.rkt @@ -1,5 +1,5 @@ #lang br/quicklang -(require parser-tools/lex parser-tools/lex-sre brag/support) +(require brag/lexer-support) (define (tokenize port) (define (next-token) (define our-lexer @@ -7,9 +7,9 @@ [(eof) eof] ;; (char-complement "\n") means any char but "\n" ;; (complement "\n") means any whole string except "\n" - [(seq "//" (* (char-complement "\n"))) (next-token)] - ["@$" (token 'OPEN lexeme)] - ["$@" (token 'CLOSE lexeme)] + [(:seq "//" (:* (char-complement "\n"))) (next-token)] + ["@$" (token 'OPEN)] + ["$@" (token 'CLOSE)] [any-char (token 'CHAR lexeme)])) (our-lexer port)) next-token)