split jsonic & jsonic-pro

dev-srcloc
Matthew Butterick 8 years ago
parent 0cc0929ebc
commit 9e00c6dcf8

@ -0,0 +1,4 @@
#lang br/quicklang
(module reader br/quicklang
(require (submod "jsonic-pro/main.rkt" reader))
(provide (all-from-out (submod "jsonic-pro/main.rkt" reader))))

@ -0,0 +1,9 @@
#lang br/demo/jsonic
42
"string"
["array", "of", "strings"]
{
"key": 42,
25: "value",
[1, 2, 3]: {"subkey": 21}
}

@ -0,0 +1,51 @@
#lang at-exp br/quicklang
(require "parser.rkt")
#|
Demonstrate:
+ color lexing
+ indentation
+ toolbar buttons
+ pinpoint errors
+ unit tests
|#
(module+ reader
(define (read-syntax path port)
(define parse-tree (parse path (tokenize port)))
(define module-datum `(module bf-mod br/demo/jsonic-pro/expander
,parse-tree))
(datum->syntax #f module-datum))
(provide read-syntax get-info))
(require parser-tools/lex parser-tools/lex-sre brag/support)
(define (tokenize port)
(define (next-token)
(define our-lexer
(lexer
[(eof) eof]
;; (char-complement "\n") means any char but "\n"
;; (complement "\n") means any whole string except "\n"
[(seq "//" (* (char-complement "\n"))) (next-token)]
["@$" (token 'OPEN lexeme)]
["$@" (token 'CLOSE lexeme)]
[any-char (token 'CHAR lexeme)]))
(our-lexer port))
next-token)
(define (get-info . _)
(λ (key default)
(case key
[(color-lexer)
(dynamic-require 'br/demo/jsonic/color-lexer 'color-lexer (λ () #f))]
[(drracket:indentation)
(dynamic-require 'br/demo/jsonic/indenter 'indenter (λ () #f))]
[(drracket:toolbar-buttons)
(dynamic-require 'br/demo/jsonic/toolbar 'buttons (λ () #f))]
[else default])))
(define (test-tokenize str)
(define ip (open-input-string str))
(define token-producer (tokenize ip))
(for/list ([token (in-producer token-producer eof)])
token))

@ -0,0 +1,7 @@
#lang brag
jsonic-program: (char | s-val)*
char: CHAR
s-val: /OPEN CHAR* /CLOSE

@ -1,9 +0,0 @@
#lang br/demo/jsonic
{
"string": @$(string-append "foo" "bar")$@,
{
"array": @$(range 5)$@,
"object": @$(hash "k1" "valstring" (format "~a" 42) (hash "k1" (range 10) "k2" 42))$@
}
// "bar" :
}

@ -1,51 +1,4 @@
#lang at-exp br/quicklang
(require "parser.rkt")
#|
Demonstrate:
+ color lexing
+ indentation
+ toolbar buttons
+ pinpoint errors
+ unit tests
|#
(module+ reader
(define (read-syntax path port)
(define parse-tree (parse path (tokenize port)))
(define module-datum `(module bf-mod br/demo/jsonic/expander
,parse-tree))
(datum->syntax #f module-datum))
(provide read-syntax get-info))
(require parser-tools/lex parser-tools/lex-sre brag/support)
(define (tokenize port)
(define (next-token)
(define our-lexer
(lexer
[(eof) eof]
;; (char-complement "\n") means any char but "\n"
;; (complement "\n") means any whole string except "\n"
[(seq "//" (* (char-complement "\n"))) (next-token)]
["@$" (token 'OPEN lexeme)]
["$@" (token 'CLOSE lexeme)]
[any-char (token 'CHAR lexeme)]))
(our-lexer port))
next-token)
(define (get-info . _)
(λ (key default)
(case key
[(color-lexer)
(dynamic-require 'br/demo/jsonic/color-lexer 'color-lexer (λ () #f))]
[(drracket:indentation)
(dynamic-require 'br/demo/jsonic/indenter 'indenter (λ () #f))]
[(drracket:toolbar-buttons)
(dynamic-require 'br/demo/jsonic/toolbar 'buttons (λ () #f))]
[else default])))
(define (test-tokenize str)
(define ip (open-input-string str))
(define token-producer (tokenize ip))
(for/list ([token (in-producer token-producer eof)])
token))
#lang br/quicklang
(module reader br
(require "reader.rkt")
(provide read-syntax))

@ -1,7 +1,4 @@
#lang brag
jsonic-program: (char | s-val)*
char: CHAR
s-val: /OPEN CHAR* /CLOSE

@ -0,0 +1,8 @@
#lang br/quicklang
(require "tokenizer.rkt" "parser.rkt")
(define (read-syntax path port)
(define parse-tree (parse path (tokenize port)))
(define module-datum `(module jsonic-module "expander.rkt"
,parse-tree))
(datum->syntax #f module-datum))
(provide read-syntax)

@ -0,0 +1,16 @@
#lang br/quicklang
(require parser-tools/lex parser-tools/lex-sre brag/support)
(define (tokenize port)
(define (next-token)
(define our-lexer
(lexer
[(eof) eof]
;; (char-complement "\n") means any char but "\n"
;; (complement "\n") means any whole string except "\n"
[(seq "//" (* (char-complement "\n"))) (next-token)]
["@$" (token 'OPEN lexeme)]
["$@" (token 'CLOSE lexeme)]
[any-char (token 'CHAR lexeme)]))
(our-lexer port))
next-token)
(provide tokenize)
Loading…
Cancel
Save