parser-transform
Version:
Streaming+Async lexer and parser
120 lines (92 loc) • 3.46 kB
Markdown
A starter JSON lexer
{expect} = require 'chai'
fs = require 'fs'
json_lexer = fs.readFileSync 'test/json.l','utf8'
json_parser = fs.readFileSync 'test/json.y', 'utf8'
asyncjson_parser = fs.readFileSync 'test/json-async.y', 'utf8'
Stream = require 'stream'
class Tee extends Stream.Transform
constructor: (options = {}) ->
options.objectMode = true
super options
_transform: (chunk,encoding,next) ->
console.log chunk
chunk
next()
return
class LongReadable extends Stream.Readable
constructor: (options) ->
super options
= 0
_read: (size) ->
switch ++
when 0
'['
when 21
'4'
when 22
']'
when 23
setTimeout (=> null), 60
else
setTimeout (=> '"yes",'), 50
return
describe 'The module', ->
{LexerParser,LexerTransform,Grammar,ParserTransform} = require '..'
dfas = null
grammar = null
grammar2 = null
it 'should build a lexer', ->
dfas = LexerParser.parse json_lexer
it 'should build a grammar', ->
grammar = Grammar.fromString json_parser, mode:'LALR1', 'bnf'
it 'should build another grammar', ->
grammar2 = Grammar.fromString asyncjson_parser, mode:'LALR1', 'bnf'
it 'should parse two concurrent streams', (done) ->
Test three concurrent streams
A stream is read from `package.json`
s = fs.createReadStream('./package.json','utf8')
s.setEncoding('utf8')
streamLexer = new LexerTransform dfas
streamParser = new ParserTransform grammar
The "background" stream is generated by code.
w = new LongReadable
w.setEncoding 'utf8'
streamLexer2 = new LexerTransform dfas
streamParser2 = new ParserTransform grammar
A third stream is read from `rows.json`
r = fs.createReadStream('./test/rows.json','utf8')
r.setEncoding('utf8')
streamLexer3 = new LexerTransform dfas
streamParser3 = new ParserTransform grammar2
All three streams are expected to complete.
counted = 3
s
.pipe streamLexer
.pipe streamParser
.on 'data', (data) ->
expect(data).to.have.property 'name', 'parser-transform'
expect(data).to.have.property('scripts').with.property('test')
done() if --counted is 0
w
.pipe streamLexer2
# .pipe new Tee # use this to see chunks generated by the lexer
.pipe streamParser2
# .pipe new Tee # use this to see chunks generated by the parser
.on 'data', (data) ->
expect(data).to.have.length 21
expect(data).to.have.property 0, 'yes'
expect(data).to.have.property 1, 'yes'
expect(data).to.have.property 20, 4
done() if --counted is 0
expected_rows = 3
r
.pipe streamLexer3
.pipe streamParser3
.on 'data', (data) ->
expect(data).to.have.property('prefix').with.length(2)
expect(data).to.have.property('prefix').with.property(0,'rows')
expect(data).to.have.property('value').with.property('_id')
return unless --expected_rows is 0
done() if --counted is 0
return