improve jsonic

dev-srcloc
Matthew Butterick 8 years ago
parent da2bdce15a
commit 65b34b8409

@ -1,4 +0,0 @@
#lang br/quicklang
(module reader br/quicklang
(require (submod "jsonic-b/main.rkt" reader))
(provide (all-from-out (submod "jsonic-b/main.rkt" reader))))

@ -1,20 +0,0 @@
#lang br/quicklang
(require json)
(define-macro (js-module-begin PARSE-TREE)
#'(#%module-begin
(define result-string PARSE-TREE)
(define validated-jsexpr (string->jsexpr result-string))
(display (jsexpr->string validated-jsexpr))))
(provide (rename-out [js-module-begin #%module-begin]))
(define-macro (jsonic-program S-EXP-OR-JSON-CHAR ...)
#'(string-trim (string-append S-EXP-OR-JSON-CHAR ...)))
(provide jsonic-program)
(define-macro (json-char CHAR-STR) #'CHAR-STR)
(provide json-char)
(define-macro (s-exp SEXP-STR)
(with-pattern ([SEXP-DATUM (format-datum '~a (syntax->datum #'SEXP-STR))])
#'(jsexpr->string SEXP-DATUM)))
(provide s-exp)

@ -1,12 +0,0 @@
#lang br/demo/jsonic-b
// a line comment
[
@$ 'null $@,
@$ #f $@,
@$ (* 6 7) $@,
@$ "string" $@,
@$ (list "array" "of" "strings") $@,
@$ (hash 'key-1 42
'key-2 "value"
'key-3 (hash 'subkey 21)) $@
]

@ -1,4 +0,0 @@
#lang br/quicklang
(module reader br
(require "reader.rkt")
(provide read-syntax))

@ -1,4 +0,0 @@
#lang brag
jsonic-program: (s-exp | json-char)*
s-exp: SEXP-TOK
json-char: CHAR-TOK

@ -1,8 +0,0 @@
#lang br/quicklang
(require "tokenizer.rkt" "parser.rkt")
(define (read-syntax path port)
(define parse-tree (parse path (tokenize port)))
(define module-datum `(module jsonic-module br/demo/jsonic-b/expander
,parse-tree))
(datum->syntax #f module-datum))
(provide read-syntax)

@ -1,14 +0,0 @@
#lang br/quicklang
(require brag/lexer-support)
(define (tokenize port)
(define (next-token)
(define our-lexer
(lexer
[(eof) eof]
[(delimited-by "//" "\n") (next-token)]
[(delimited-by "@$" "$@")
(token 'SEXP-TOK (trim-delimiters "@$" lexeme "$@"))]
[any-char (token 'CHAR-TOK lexeme)]))
(our-lexer port))
next-token)
(provide tokenize)

@ -3,35 +3,18 @@
(define-macro (js-module-begin PARSE-TREE)
#'(#%module-begin
(define result-string PARSE-TREE)
(when (string->jsexpr result-string)
(display result-string))))
(define validated-jsexpr (string->jsexpr result-string))
(display (jsexpr->string validated-jsexpr))))
(provide (rename-out [js-module-begin #%module-begin]))
(define-macro (jsonic-program S-EXP-OR-JSON-CHAR ...)
#'(string-trim (string-append S-EXP-OR-JSON-CHAR ...)))
#'(string-append S-EXP-OR-JSON-CHAR ...))
(provide jsonic-program)
(define-macro (json-char TOKEN)
#'TOKEN)
(define-macro (json-char CHAR-STR) #'CHAR-STR)
(provide json-char)
(define-macro (s-exp TOKEN ...)
(define token-stxs (syntax->list #'(TOKEN ...)))
(define token-strs (map syntax->datum token-stxs))
(define s-exp-str (apply string-append token-strs))
(with-pattern ([S-EXP-DATUM (format-datum '~a s-exp-str)])
#'(->json S-EXP-DATUM)))
(provide s-exp)
(define (->json x)
(cond
[(number? x) (number->string x)]
[(string? x) (format "~v" x)]
[(list? x)
(format "[~a]" (string-join (map ->json x) ", "))]
[(hash? x)
(define pair-strs (for/list ([(k v) (in-hash x)])
(format "~a: ~a"
(->json k) (->json v))))
(format "{~a}" (string-join pair-strs ", "))]
[else ""]))
(define-macro (s-exp SEXP-STR)
(with-pattern ([SEXP-DATUM (format-datum '~a #'SEXP-STR)])
#'(jsexpr->string SEXP-DATUM)))
(provide s-exp)

@ -1,10 +1,13 @@
#lang br/demo/jsonic
// a line comment
[
@$ (* 6 7) $@,
@$ "string" $@,
@$ (list "array" "of" "strings") $@,
@$ (hash "key-1" 42
"key-2" "value"
"key-3" (hash "subkey" 21)) $@
@$ 'null $@,
@$ #f $@,
@$ #t $@,
@$ (* 6 7) $@,
@$ "string" $@,
@$ (list "array" "of" "strings") $@,
@$ (hash 'key-1 42
'key-2 "value"
'key-3 (hash 'subkey 21)) $@
]

@ -1,4 +1,4 @@
#lang brag
jsonic-program: (s-exp | json-char)*
s-exp: /OPEN CHAR* /CLOSE
json-char: CHAR
s-exp: SEXP-TOK
json-char: CHAR-TOK

@ -5,14 +5,10 @@
(define our-lexer
(lexer
[(eof) eof]
[(:seq "//" (:* (char-complement "\n"))) (next-token)]
["@$" (token 'OPEN)]
["$@" (token 'CLOSE)]
[any-char (token 'CHAR lexeme)]))
[(delimited-by "//" "\n") (next-token)]
[(delimited-by "@$" "$@")
(token 'SEXP-TOK (trim-delimiters "@$" lexeme "$@"))]
[any-char (token 'CHAR-TOK lexeme)]))
(our-lexer port))
next-token)
(provide tokenize)
;; (char-complement "\n") means any char but "\n"
;; (complement "\n") means any whole string except "\n"
(provide tokenize)
Loading…
Cancel
Save