From 19439583c9d263ff618f0e225f17e37af42d5e2b Mon Sep 17 00:00:00 2001 From: Nathan Date: Fri, 17 Mar 2017 14:18:08 -0400 Subject: [PATCH 1/3] Add Lexer#stream() --- moo.js | 17 +++++++++++++ test/test.js | 67 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+) diff --git a/moo.js b/moo.js index 796afde..64ccc66 100644 --- a/moo.js +++ b/moo.js @@ -273,6 +273,23 @@ this.reset() } + if (typeof require !== 'undefined') { + var Transform = require('stream').Transform + + Lexer.prototype.stream = function(state) { + var self = this.reset('', state) + return new Transform({ + readableObjectMode: true, + transform(chunk, encoding, cb) { + self.feed(chunk.toString()) + var token + while (token = self.next()) this.push(token) + cb() + } + }) + } + } + Lexer.prototype.setState = function(state) { if (!state || this.state === state) return this.state = state diff --git a/test/test.js b/test/test.js index 3283785..a6f1202 100644 --- a/test/test.js +++ b/test/test.js @@ -469,6 +469,73 @@ describe('errors', () => { }) +describe('streams', () => { + const lexer = compile({ + word: /[a-z]+/, + space: {match: /\s+/, lineBreaks: true}, + }) + const {Readable, Writable} = require('stream') + + const inputs = ['this is\n', 'a test'] + const tokens = [ + {type: 'word', value: 'this'}, + {type: 'space', value: ' '}, + {type: 'word', value: 'is'}, + {type: 'space', value: '\n'}, + {type: 'word', value: 'a'}, + {type: 'space', value: ' '}, + {type: 'word', value: 'test'}, + ] + + test('can be written and read', () => new Promise((resolve, reject) => { + let index = 0 + expect.assertions(tokens.length) + + const s = lexer.stream() + s.write(inputs[0]) + s.end(inputs[1]) + + s.on('data', tok => { + try { + expect(tok).toMatchObject(tokens[index++]) + } catch (e) {reject(e)} + }) + .on('error', reject) + .on('end', resolve) + })) + + test('can be piped to/from', () => new Promise((resolve, reject) => { + let input = 0 + const rs = new Readable({ + read() { + try { + this.push(input < inputs.length ? + Buffer.from(inputs[input++], 'ascii') : null) + } catch (e) {console.log('read', e) || reject(e)} + } + }) + + let index = 0 + expect.assertions(tokens.length) + const ws = new Writable({ + objectMode: true, + write(tok, _, cb) { + try { + expect(tok).toMatchObject(tokens[index++]) + cb() + } catch (e) {cb(e)} + } + }) + + rs + .on('error', reject).pipe(lexer.stream()) + .on('error', reject).pipe(ws) + .on('error', reject) + .on('finish', resolve) + })) +}) + + describe('example: python', () => { test('kurt tokens', () => { From 47ac663912762baa2f1892a79d82143e1a19cc66 Mon Sep 17 00:00:00 2001 From: Nathan Date: Fri, 17 Mar 2017 14:42:44 -0400 Subject: [PATCH 2/3] Cool --- moo.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/moo.js b/moo.js index 64ccc66..cfc7dc0 100644 --- a/moo.js +++ b/moo.js @@ -273,7 +273,7 @@ this.reset() } - if (typeof require !== 'undefined') { + if (typeof module !== 'undefined' && module.exports) { var Transform = require('stream').Transform Lexer.prototype.stream = function(state) { From ca92f26eb3ade81b5c427a3795ec3991a169b980 Mon Sep 17 00:00:00 2001 From: Nathan Date: Fri, 17 Mar 2017 20:38:30 -0400 Subject: [PATCH 3/3] Make Lexer a transform stream --- moo.js | 18 +++++++----------- test/test.js | 4 ++-- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/moo.js b/moo.js index cfc7dc0..456436b 100644 --- a/moo.js +++ b/moo.js @@ -271,22 +271,18 @@ this.stack = [] this.setState(state) this.reset() + if (Transform) Transform.call(this, {readableObjectMode: true}) } if (typeof module !== 'undefined' && module.exports) { var Transform = require('stream').Transform + require('util').inherits(Lexer, Transform) - Lexer.prototype.stream = function(state) { - var self = this.reset('', state) - return new Transform({ - readableObjectMode: true, - transform(chunk, encoding, cb) { - self.feed(chunk.toString()) - var token - while (token = self.next()) this.push(token) - cb() - } - }) + Lexer.prototype._transform = function(chunk, encoding, cb) { + this.feed(chunk.toString()) + var token + while (token = this.next()) this.push(token) + cb() } } diff --git a/test/test.js b/test/test.js index a6f1202..0d280e9 100644 --- a/test/test.js +++ b/test/test.js @@ -491,7 +491,7 @@ describe('streams', () => { let index = 0 expect.assertions(tokens.length) - const s = lexer.stream() + const s = lexer.clone() s.write(inputs[0]) s.end(inputs[1]) @@ -528,7 +528,7 @@ describe('streams', () => { }) rs - .on('error', reject).pipe(lexer.stream()) + .on('error', reject).pipe(lexer.clone()) .on('error', reject).pipe(ws) .on('error', reject) .on('finish', resolve)