Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

first commit

  • Loading branch information...
commit fd989d049c46e451b58ae26fb787339e7f04adc4 0 parents
@twilson63 authored
2  .gitignore
@@ -0,0 +1,2 @@
+node_modules
+.DS_Store
152 Cakefile
@@ -0,0 +1,152 @@
+# ** Cakefile Template ** is a Template for a common Cakefile that you may use in a coffeescript nodejs project.
+#
+# It comes baked in with 4 tasks:
+#
+# * build - compiles your src directory to your lib directory
+# * watch - watches any changes in your src directory and automatically compiles to the lib directory
+# * test - runs mocha test framework, you can edit this task to use your favorite test framework
+# * docs - generates annotated documentation using docco
+fs = require 'fs'
+{print} = require 'util'
+{spawn, exec} = require 'child_process'
+
+try
+ which = require('which').sync
+catch err
+ which = null
+
+# ANSI Terminal Colors
+bold = `'\033[0;1m'`
+green = `'\033[0;32m'`
+reset = `'\033[0m'`
+red = `'\033[0;31m'`
+
+# Internal Functions
+#
+# ## *walk*
+#
+# **given** string as dir which represents a directory in relation to local directory
+# **and** callback as done in the form of (err, results)
+# **then** recurse through directory returning an array of files
+walk = (dir, done) ->
+ results = []
+ fs.readdir dir, (err, list) ->
+ return done(err, []) if err
+ pending = list.length
+ return done(null, results) unless pending
+ for name in list
+ file = "#{dir}/#{name}"
+ try
+ stat = fs.statSync file
+ catch err
+ stat = null
+ if stat?.isDirectory()
+ walk file, (err, res) ->
+ results.push name for name in res
+ done(null, results) unless --pending
+ else
+ results.push file
+ done(null, results) unless --pending
+
+# ## *log*
+#
+# **given** string as a message
+# **and** string as a color
+# **and** optional string as an explaination
+# **then** builds a statement and logs to console.
+log = (message, color, explanation) -> console.log color + message + reset + ' ' + (explanation or '')
+
+# ## *launch*
+#
+# **given** string as a cmd
+# **and** optional array and option flags
+# **and** optional callback
+# **then** spawn cmd with options
+# **and** pipe to process stdout and stderr respectively
+# **and** on child process exit emit callback if set and status is 0
+launch = (cmd, options=[], callback) ->
+ cmd = which(cmd) if which
+ app = spawn cmd, options
+ app.stdout.pipe(process.stdout)
+ app.stderr.pipe(process.stderr)
+ app.on 'exit', (status) -> callback?() if status is 0
+
+# ## *build*
+#
+# **given** optional boolean as watch
+# **and** optional function as callback
+# **then** invoke launch passing coffee command
+# **and** defaulted options to compile src to lib
+build = (watch, callback) ->
+ if typeof watch is 'function'
+ callback = watch
+ watch = false
+
+ options = ['-c', '-b', '-o', '.', 'src']
+ options.unshift '-w' if watch
+ launch 'coffee', options, callback
+
+# ## *mocha*
+#
+# **given** optional array of option flags
+# **and** optional function as callback
+# **then** invoke launch passing mocha command
+mocha = (options, callback) ->
+ if typeof options is 'function'
+ callback = options
+ options = []
+
+ launch 'mocha', options, callback
+
+# ## *docco*
+#
+# **given** optional function as callback
+# **then** invoke launch passing docco command
+docco = (callback) ->
+ walk 'src', (err, files) -> launch 'docco', files, callback
+
+# Cakefile Tasks
+#
+# ## *docs*
+#
+# Generate Annotated Documentation
+#
+# <small>Usage</small>
+#
+# ```
+# cake docs
+# ```
+task 'docs', 'generate documentation', -> docco()
+
+# ## *build*
+#
+# Builds Source
+#
+# <small>Usage</small>
+#
+# ```
+# cake build
+# ```
+task 'build', 'compile source', -> build -> log ":)", green
+
+# ## *watch*
+#
+# Builds your source whenever it changes
+#
+# <small>Usage</small>
+#
+# ```
+# cake watch
+# ```
+task 'watch', 'compile and watch', -> build true, -> log ":-)", green
+
+# ## *test*
+#
+# Runs your test suite.
+#
+# <small>Usage</small>
+#
+# ```
+# cake test
+# ```
+task 'test', 'run tests', -> build -> mocha -> log ":)", green
42 index.js
@@ -0,0 +1,42 @@
+// Generated by CoffeeScript 1.3.3
+var csv2json, es, merge, parseRow, tidy;
+
+es = require('event-stream');
+
+tidy = function(field) {
+ return field != null ? field.trim().replace(/^\"/, '').replace(/\"$/, '').trim() : void 0;
+};
+
+merge = function(keys, values) {
+ var i, key, output, _i, _len;
+ output = {};
+ for (i = _i = 0, _len = keys.length; _i < _len; i = ++_i) {
+ key = keys[i];
+ output[tidy(key)] = tidy(values[i]);
+ }
+ return output;
+};
+
+exports.parseRow = parseRow = function(columns, sep) {
+ if (sep == null) {
+ sep = ',';
+ }
+ return es.map(function(data, cb) {
+ var record;
+ record = merge(columns, data.toString().split(sep));
+ return cb(null, JSON.stringify(record));
+ });
+};
+
+module.exports = csv2json = function(columns, row, col) {
+ if (row == null) {
+ row = ',';
+ }
+ if (col == null) {
+ col = '\n';
+ }
+ if (columns == null) {
+ throw new Error('columns must be defined.');
+ }
+ return es.pipe(es.split(col), parseRow(columns, row), es.join(','));
+};
16 package.json
@@ -0,0 +1,16 @@
+{
+ "author": "Tom Wilson <tom@jackhq.com>",
+ "name": "csv2json",
+ "description": "Stream CSV to JSON",
+ "version": "0.0.1",
+ "dependencies": {
+ "event-stream": "~1.3.0"
+ },
+ "devDependencies": {
+ "coffee-script": "1.3.3"
+ },
+ "optionalDependencies": {},
+ "engines": {
+ "node": "*"
+ }
+}
87 readme.md
@@ -0,0 +1,87 @@
+# csv2json = CSV to JSON Stream Parser
+
+Stream csv data and convert to json data
+
+# Usage
+
+``` javascript
+var csv2json = require('csv2json'),
+ columns = ['foo','bar','baz'];
+
+process.openStdin().pipe(csv2json(columns).pipe(process.stdout));
+```
+
+``` coffeescript
+csv2json = require 'csv2json'
+columns = ['foo','bar','baz']
+
+process.openStdin().pipe csv2json(columns).pipe(process.stdout)
+```
+
+# API
+
+## var csv2json = require('csv2json')
+
+---
+
+### parameters
+
+- columns
+
+Array of column names for each row that will be set as the json keys
+
+- columnSeparator (default ',')
+
+The column separator for your csv file
+
+- rowSeparator (default '\n')
+
+The row separator for you csv file
+
+## var parseRow = require('csv2json').parseRow;
+
+---
+
+
+- columns
+
+Array of column names for each row that will be set as the json keys
+
+- columnSeparator (default ',')
+
+The column separator for your csv file
+
+example
+
+# example.js
+``` javascript
+var es = require('event-stream'),
+ parseRow = require('csv2json').parseRow;
+
+es.pipe(
+ es.openStdin(),
+ es.split(),
+ parseRow(['foo','bar','baz']),
+ es.join(','),
+ process.stdout
+);
+
+```
+
+``` sh
+echo "'baz','bar','foo'" | node example.js
+```
+
+## License
+
+MIT
+
+## Contribute
+
+pull requests are welcome
+
+## Thanks
+
+* [Event Stream](https://github.com/dominictarr/event-stream)
+* [NodeJs](http://nodejs.org)
+* [CoffeeScript](http://coffeescript.org)
27 src/index.coffee
@@ -0,0 +1,27 @@
+# csv2json stream parser
+es = require 'event-stream'
+
+tidy = (field) -> field?.trim().replace(/^\"/, '').replace(/\"$/,'').trim()
+# merge arrays into an object
+merge = (keys, values) ->
+ output = {}
+ output[tidy(key)] = tidy(values[i]) for key, i in keys
+ output
+
+# if you aready have a single record
+# you can just pipe to csv2json.parseRow
+exports.parseRow = parseRow = (columns, sep=',') ->
+ es.map (data, cb) ->
+ record = merge columns, data.toString().split(sep)
+ cb null, JSON.stringify(record)
+
+# main stream to receive a set of csv data and
+# produce an comma delimited list of json.
+module.exports = csv2json = (columns, row=',', col='\n') ->
+ unless columns?
+ throw new Error('columns must be defined.')
+ es.pipe(
+ es.split(col)
+ parseRow(columns, row)
+ es.join(',')
+ )
4 test/data/sample.csv
@@ -0,0 +1,4 @@
+"foo", "bar", "baz"
+"foo1", "bar1", "baz1"
+"foo2", "bar2", "baz2"
+"foo3", "bar3", "baz3"
7 test/index.coffee
@@ -0,0 +1,7 @@
+path = require 'path'
+csv2json = require path.join(__dirname,'..')
+fs = require 'fs'
+#describe 'basic convert', ->
+# it 'should be successful', (done) ->
+stream = fs.FileReadStream(path.join(__dirname,'data','sample.csv'))
+stream.pipe(csv2json(["foo","bar","baz"]).pipe(process.stdout))
Please sign in to comment.
Something went wrong with that request. Please try again.