Skip to content
Browse files

initial

  • Loading branch information...
0 parents commit f59e9fd846a8eb96f59b77fab58c19ff033ff12c @dominictarr dominictarr committed Sep 23, 2011
Showing with 4,285 additions and 0 deletions.
  1. +3 −0 .gitignore
  2. +13 −0 examples/all_docs.js
  3. +80 −0 index.js
  4. +11 −0 package.json
  5. +84 −0 readme.markdown
  6. +4,030 −0 test/fixtures/all_npm.json
  7. +35 −0 test/test.js
  8. +29 −0 test/test2.js
3 .gitignore
@@ -0,0 +1,3 @@
+node_modules
+node_modules/*
+npm_debug.log
13 examples/all_docs.js
@@ -0,0 +1,13 @@
+var request = require('request')
+ , JSONStream = require('JSONStream')
+ , es = require('event-stream')
+
+var parser = JSONStream.parse(['rows', /./]) //emit parts that match this path (any element of the rows array)
+ , req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
+ , logger = es.mapSync(function (data) { //create a stream that logs to stderr,
+ console.error(data)
+ return data
+ })
+
+req.pipe(parser)
+parser.pipe(logger)
80 index.js
@@ -0,0 +1,80 @@
+
+var Parser = require('jsonparse')
+ , Stream = require('stream').Stream
+
+/*
+
+ the value of this.stack that creationix's jsonparse has is weird.
+
+ it makes this code ugly, but his problem is way harder that mine,
+ so i'll forgive him.
+
+
+
+*/
+
+exports.parse = function (path) {
+
+ var stream = new Stream()
+ var parser = new Parser()
+ var count = 0
+ if(!path.length)
+ path = null
+ parser.onValue = function () {
+ if(!this.root && this.stack.length == 1){
+ stream.root = this.value
+ }
+ if(!path || this.stack.length !== path.length)
+ return
+ var _path = []
+ for( var i = 0; i < (path.length - 1); i++) {
+ var key = path[i]
+ var c = this.stack[1 + (+i)]
+
+ if(!c) {
+ console.log(c, this.stack.length)
+ return
+ }
+ var m =
+ ( 'string' === typeof key
+ ? c.key == key
+ : key.exec(c.key))
+ _path.push(c.key)
+
+ if(!m)
+ return
+
+ }
+ var c = this
+
+ var key = path[path.length - 1]
+ var m =
+ ( 'string' === typeof key
+ ? c.key == key
+ : key.exec(c.key))
+ if(!m)
+ return
+ _path.push(c.key)
+
+ count ++
+ stream.emit('data', this.value[this.key])
+ }
+
+
+ parser.onError = function (err) {
+ stream.emit('error', err)
+ }
+ stream.readable = true
+ stream.writable = true
+ stream.write = function (chunk) {
+ parser.write(chunk)
+ }
+ stream.end = function (data) {
+ if(data)
+ stream.write(data)
+ if(!count)
+ stream.emit('data', stream.root)
+ stream.emit('end')
+ }
+ return stream
+}
11 package.json
@@ -0,0 +1,11 @@
+{ "name": "JSONStream"
+, "version": "0.0.0"
+, "description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)"
+, "homepage": "http://github.com/dominictarr/JSONStream"
+, "repository":
+ { "type": "git"
+ , "url": "https://github.com/dominictarr/JSONStream.git" }
+, "dependencies": {}
+, "devDependencies": {}
+, "author": "Dominic Tarr <dominic.tarr@gmail.com> (http://bit.ly/dominictarr)"
+, "scripts": { "test": "meta-test test/*.js" } }
84 readme.markdown
@@ -0,0 +1,84 @@
+# JSONStream
+
+streaming JSON.parse and stringify
+
+## example
+
+in node v0.4.x
+```js
+
+var request = require('request')
+ , JSONStream = require('JSONStream')
+ , es = require('event-stream')
+
+var parser = JSONStream.parse(['rows', /./]) //emit parts that match this path (any element of the rows array)
+ , req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
+```
+in node 0.4.x
+
+``` js
+
+req.pipe(parser)
+parser.pipe(es.log(''))
+
+```
+
+in node v0.5.x
+
+```js
+req.pipe(parser).pipe(es.log(''))
+
+```
+
+## JSONStream.parse(path)
+
+usally, a json API will return a list of objects.
+
+`path` should be an array of property names and/or `RedExp`s.
+any object that matches the path will be emitted as 'data' (and `pipe()`d down stream)
+
+if `path` is empty or null, or if no matches are made:
+JSONStream.parse will only emit 'data' once, emitting the root object.
+
+for example, couchdb returns views like this:
+
+``` bash
+curl -sS localhost:5984/tests/_all_docs
+```
+returns this:
+
+``` js
+{"total_rows":129,"offset":0,"rows":[
+ { "id":"change1_0.6995461115147918"
+ , "key":"change1_0.6995461115147918"
+ , "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
+ , "doc":{
+ "_id": "change1_0.6995461115147918"
+ , "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
+ },
+{"id":"change2_0.6995461115147918","key":"change2_0.6995461115147918","value":{"rev":"1-13677d36b98c0c075145bb8975105153"},"doc":{"_id":"change2_0.6995461115147918","_rev":"1-13677d36b98c0c075145bb8975105153","hello":2}},
+...
+]}
+
+```
+
+we are probably interested in the `rows.*.docs`
+
+create a `Stream` that parses the documents from the feed like this:
+
+``` js
+JSONStream.parse(['rows', /./, 'doc']) //rows, ANYTHING, doc
+```
+
+awesome!
+
+## todo
+
+ * JSONStream.stringify()
+
+## Acknowlegements
+
+ this module depends on https://github.com/creationix/jsonparse
+ by Tim Caswell
+ and also thanks to Florent Jaby for teaching me about parsing with:
+ https://github.com/Floby/node-json-streams
4,030 test/fixtures/all_npm.json
4,030 additions, 0 deletions not shown because the diff is too large. Please use a local Git client to view these changes.
35 test/test.js
@@ -0,0 +1,35 @@
+
+
+var fs = require ('fs')
+ , join = require('path').join
+ , file = join(__dirname, 'fixtures','all_npm.json')
+ , JSONStream = require('../')
+ , it = require('it-is')
+
+var expected = JSON.parse(fs.readFileSync(file))
+ , parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
+ , called = 0
+ , ended = false
+ , parsed = []
+
+fs.createReadStream(file).pipe(parser)
+
+parser.on('data', function (data) {
+ called ++
+ it.has({
+ id: it.isString(),
+ value: {rev: it.isString()},
+ key:it.isString()
+ })
+ parsed.push(data)
+})
+
+parser.on('end', function () {
+ ended = true
+})
+
+process.on('exit', function () {
+ it(called).equal(expected.rows.length)
+ it(parsed).deepEqual(expected.rows)
+ console.error('PASSED')
+})
29 test/test2.js
@@ -0,0 +1,29 @@
+
+
+var fs = require ('fs')
+ , join = require('path').join
+ , file = join(__dirname, '..','package.json')
+ , JSONStream = require('../')
+ , it = require('it-is')
+
+var expected = JSON.parse(fs.readFileSync(file))
+ , parser = JSONStream.parse([])
+ , called = 0
+ , ended = false
+ , parsed = []
+
+fs.createReadStream(file).pipe(parser)
+
+parser.on('data', function (data) {
+ called ++
+ it(data).deepEqual(expected)
+})
+
+parser.on('end', function () {
+ ended = true
+})
+
+process.on('exit', function () {
+ it(called).equal(1)
+ console.error('PASSED')
+})

0 comments on commit f59e9fd

Please sign in to comment.
Something went wrong with that request. Please try again.