Permalink
Browse files

Merge pull request #32 from eladb/new-columns

writeOptions.newColumns
  • Loading branch information...
2 parents 188c1f7 + 88b10ce commit 2030bfdff577be7077e6dbd7c9f2fcd560faec7b Worms David committed Jun 20, 2012
Showing with 63 additions and 5 deletions.
  1. +11 −3 lib/csv.js
  2. +1 −1 package.json
  3. +5 −0 readme.md
  4. +3 −0 samples/new-columns.in
  5. +22 −0 samples/new-columns.js
  6. +15 −1 test/columns.coffee
  7. +3 −0 test/columns/out_new.in
  8. +3 −0 test/columns/out_new.out
View
@@ -54,6 +54,7 @@ module.exports = function(){
flags: 'w',
encoding: 'utf8',
bufferSize: null,
+ newColumns: false,
end: true // Call `end()` on close
};
// A boolean that is true by default, but turns false after an 'error' occurred,
@@ -334,17 +335,24 @@ module.exports = function(){
state.line = line;
line = null;
}
- if(state.count === 0 && csv.writeOptions.header === true){
- write(csv.writeOptions.columns || csv.readOptions.columns);
- }
var line;
if(csv.transformer){
transforming = true;
line = csv.transformer(state.line, state.count);
+
+ if (csv.writeOptions.newColumns && !csv.writeOptions.columns && typeof line === 'object' && !Array.isArray(line)) {
+ Object.keys(line)
+ .filter(function(column) { return csv.readOptions.columns.indexOf(column) === -1; })
+ .forEach(function(column) { csv.readOptions.columns.push(column); });
+ }
+
transforming = false;
}else{
line = state.line;
}
+ if(state.count === 0 && csv.writeOptions.header === true){
+ write(csv.writeOptions.columns || csv.readOptions.columns);
+ }
write(line);
state.count++;
state.line = [];
View
@@ -1,6 +1,6 @@
{
"name": "csv",
- "version": "0.0.13",
+ "version": "0.0.14",
"description": "CSV parser with simple api, full of options and tested against large datasets.",
"author": "David Worms <david@adaltas.com>",
"contributors": [
View
@@ -156,6 +156,11 @@ Options are:
- *end*
Prevent calling `end` on the destination, so that destination is no longer writable, similar to passing `{end: false}` option in `stream.pipe()`.
+- *newColumns*
+ If the `columns` option is not specified (which means columns will be taken from the reader
+ options, will automatically append new columns if they are added during `transform()`.
+
+
Transforming data
-----------------
View
@@ -0,0 +1,3 @@
+id,lastname,firstname
+82,Preisner,Zbigniew
+94,Gainsbourg,Serge
View
@@ -0,0 +1,22 @@
+
+// CSV sample - Copyright David Worms <open@adaltas.com> (BSD Licensed)
+
+ // node samples/column.js
+ var csv = require('..');
+
+ csv()
+ .fromPath(__dirname+'/columns.in',{
+ columns: true
+ })
+ .toStream(process.stdout, {
+ newColumns: true,
+ end: false
+ })
+ .transform(function(data){
+ data.name = data.firstname + ' ' + data.lastname
+ return data;
+ });
+
+ // Will print sth like:
+ // 82,Preisner,Zbigniew,Zbigniew Preisner
+ // 94,Gainsbourg,Serge,Serge Gainsbourg
View
@@ -90,7 +90,21 @@ describe 'columns', ->
result.should.eql expect
fs.unlink "#{__dirname}/columns/out_named.tmp"
next()
-
+ it 'should emit new columns in output', (next) ->
+ csv()
+ .fromPath("#{__dirname}/columns/out_new.in", columns: true)
+ .toPath("#{__dirname}/columns/out_new.tmp", newColumns: true, header: true)
+ .transform (data) ->
+ data.should.be.an.a 'object'
+ data.FIELD_7 = 'new_field'
+ data
+ .on 'end', (count) ->
+ count.should.eql 2
+ expect = fs.readFileSync("#{__dirname}/columns/out_new.out").toString()
+ result = fs.readFileSync("#{__dirname}/columns/out_new.tmp").toString()
+ result.should.eql expect
+ fs.unlink "#{__dirname}/columns/out_new.tmp"
+ next()
View
@@ -0,0 +1,3 @@
+FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
+20322051544,1979,8.8017226E7,ABC,45,2000-01-01
+28392898392,1974,8.8392926E7,DEF,23,2050-11-27
View
@@ -0,0 +1,3 @@
+FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6,FIELD_7
+20322051544,1979,8.8017226E7,ABC,45,2000-01-01,new_field
+28392898392,1974,8.8392926E7,DEF,23,2050-11-27,new_field

0 comments on commit 2030bfd

Please sign in to comment.