diff --git a/beautiful-racket/br/demo/jsonic-b.rkt b/beautiful-racket/br/demo/jsonic-b.rkt new file mode 100644 index 0000000..4ae83d6 --- /dev/null +++ b/beautiful-racket/br/demo/jsonic-b.rkt @@ -0,0 +1,4 @@ +#lang br/quicklang +(module reader br/quicklang + (require (submod "jsonic-b/main.rkt" reader)) + (provide (all-from-out (submod "jsonic-b/main.rkt" reader)))) diff --git a/beautiful-racket/br/demo/jsonic-b/expander.rkt b/beautiful-racket/br/demo/jsonic-b/expander.rkt index 2d9ff03..fe5d673 100644 --- a/beautiful-racket/br/demo/jsonic-b/expander.rkt +++ b/beautiful-racket/br/demo/jsonic-b/expander.rkt @@ -1,37 +1,20 @@ #lang br/quicklang - (require json) (define-macro (js-module-begin PARSE-TREE) #'(#%module-begin (define result-string PARSE-TREE) - (when (string->jsexpr result-string) - (display result-string)))) + (define validated-jsexpr (string->jsexpr result-string)) + (display (jsexpr->string validated-jsexpr)))) (provide (rename-out [js-module-begin #%module-begin])) (define-macro (jsonic-program S-EXP-OR-JSON-CHAR ...) #'(string-trim (string-append S-EXP-OR-JSON-CHAR ...))) (provide jsonic-program) -(define-macro (json-char TOKEN) - #'TOKEN) +(define-macro (json-char CHAR-STR) #'CHAR-STR) (provide json-char) -(define (list->json x) - (format "[~a]" (string-join (map ->string x) ", "))) - -(define (hash->json x) - (format "{~a}" (string-join (for/list ([(k v) (in-hash x)]) - (format "~a: ~a" (->string k) (->string v))) ", "))) - -(define (->string x) - (cond - [(number? x) (number->string x)] - [(string? x) (format "~v" x)] - [(list? x) (list->json x)] - [(hash? x) (hash->json x)] - [else ""])) - -(define-macro (s-exp EXP-STRING) - (with-pattern ([EXP-DATUM (format-datum '~a (syntax->datum #'EXP-STRING))]) - #'(->string EXP-DATUM))) +(define-macro (s-exp SEXP-STR) + (with-pattern ([SEXP-DATUM (format-datum '~a (syntax->datum #'SEXP-STR))]) + #'(jsexpr->string SEXP-DATUM))) (provide s-exp) \ No newline at end of file diff --git a/beautiful-racket/br/demo/jsonic-b/jsonic-test.rkt b/beautiful-racket/br/demo/jsonic-b/jsonic-test.rkt index 8972dbe..33613e9 100644 --- a/beautiful-racket/br/demo/jsonic-b/jsonic-test.rkt +++ b/beautiful-racket/br/demo/jsonic-b/jsonic-test.rkt @@ -1,10 +1,12 @@ -#lang br/demo/jsonic +#lang br/demo/jsonic-b // a line comment [ + @$ 'null $@, + @$ #f $@, @$ (* 6 7) $@, @$ "string" $@, @$ (list "array" "of" "strings") $@, - @$ (hash "key-1" 42 - "key-2" "value" - "key-3" (hash "subkey" 21)) $@ + @$ (hash 'key-1 42 + 'key-2 "value" + 'key-3 (hash 'subkey 21)) $@ ] \ No newline at end of file diff --git a/beautiful-racket/br/demo/jsonic-b/parser.rkt b/beautiful-racket/br/demo/jsonic-b/parser.rkt index 0f7183f..2a282ba 100644 --- a/beautiful-racket/br/demo/jsonic-b/parser.rkt +++ b/beautiful-racket/br/demo/jsonic-b/parser.rkt @@ -1,4 +1,4 @@ #lang brag jsonic-program: (s-exp | json-char)* -s-exp: SEXP -json-char: CHAR +s-exp: SEXP-TOK +json-char: CHAR-TOK diff --git a/beautiful-racket/br/demo/jsonic-b/reader.rkt b/beautiful-racket/br/demo/jsonic-b/reader.rkt index 60f1361..41434ce 100644 --- a/beautiful-racket/br/demo/jsonic-b/reader.rkt +++ b/beautiful-racket/br/demo/jsonic-b/reader.rkt @@ -2,7 +2,7 @@ (require "tokenizer.rkt" "parser.rkt") (define (read-syntax path port) (define parse-tree (parse path (tokenize port))) - (define module-datum `(module jsonic-module br/demo/jsonic/expander + (define module-datum `(module jsonic-module br/demo/jsonic-b/expander ,parse-tree)) (datum->syntax #f module-datum)) (provide read-syntax) \ No newline at end of file diff --git a/beautiful-racket/br/demo/jsonic-b/tokenizer.rkt b/beautiful-racket/br/demo/jsonic-b/tokenizer.rkt index c46a762..13aa18c 100644 --- a/beautiful-racket/br/demo/jsonic-b/tokenizer.rkt +++ b/beautiful-racket/br/demo/jsonic-b/tokenizer.rkt @@ -5,15 +5,10 @@ (define our-lexer (lexer [(eof) eof] - [(:seq "//" (:* (char-complement "\n"))) (next-token)] - [(:seq "@$" (complement (:seq any-string "$@" any-string)) "$@") - (let ([trimmed-lexeme (string-trim (string-trim lexeme "$@") "@$")]) - (token 'SEXP trimmed-lexeme))] - [any-char (token 'CHAR lexeme)])) + [(delimited-by "//" "\n") (next-token)] + [(delimited-by "@$" "$@") + (token 'SEXP-TOK (trim-delimiters "@$" lexeme "$@"))] + [any-char (token 'CHAR-TOK lexeme)])) (our-lexer port)) next-token) -(provide tokenize) - - -;; (char-complement "\n") means any char but "\n" -;; (complement "\n") means any whole string except "\n" \ No newline at end of file +(provide tokenize) \ No newline at end of file diff --git a/brag/brag/brag.scrbl b/brag/brag/brag.scrbl index 34ac266..342b34f 100755 --- a/brag/brag/brag.scrbl +++ b/brag/brag/brag.scrbl @@ -986,6 +986,15 @@ In addition to the exports shown below, the @racketmodname[brag/lexer-support] m Repeatedly apply @racket[tokenizer] to @racket[source-string], gathering the resulting tokens into a list. Useful for testing or debugging a tokenizer. } + +@defproc[(trim-delimiters [left-delimiter string?] +[str string?] +[right-delimiter string?]) + string?]{ +Remove @racket[left-delimiter] from the left side of @racket[str], and @racket[right-delimiter] from its right side. Intended as a helper function for @racket[delimited-by]. +} + + @defform[(:* re ...)]{ Repetition of @racket[re] sequence 0 or more times.} @@ -1045,5 +1054,9 @@ one character.} Character ranges, matching characters between successive pairs of characters.} +@defform[(delimited-by open close)]{ + +A string that is bounded by the @racket[open] and @racket[close] delimiters. Matching is non-greedy (meaning, it stops at the first occurence of @racket[close]). The resulting lexeme includes the delimiters. To remove them, see @racket[trim-delimiters].} + @close-eval[my-eval] diff --git a/brag/brag/lexer-support.rkt b/brag/brag/lexer-support.rkt index 3ea4fa3..d8a3bf5 100755 --- a/brag/brag/lexer-support.rkt +++ b/brag/brag/lexer-support.rkt @@ -1,7 +1,9 @@ #lang racket/base (require "support.rkt" parser-tools/lex - (prefix-in : parser-tools/lex-sre)) + racket/string + (prefix-in : parser-tools/lex-sre) + (for-syntax racket/base)) (provide (all-from-out "support.rkt") (all-from-out parser-tools/lex) (all-from-out parser-tools/lex-sre) @@ -13,4 +15,13 @@ in)) (define token-producer (tokenize input-port)) (for/list ([token (in-producer token-producer eof)]) - token)) \ No newline at end of file + token)) + +(define (trim-delimiters left lexeme right) + (string-trim (string-trim lexeme left #:right? #f) right #:left? #f)) + +(define-lex-trans delimited-by + (λ(stx) + (syntax-case stx () + [(_ OPEN CLOSE) + #'(:seq OPEN (complement (:seq any-string CLOSE any-string)) CLOSE)]))) \ No newline at end of file