Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Loading…

writeOptions.newColumns #32

Merged
merged 1 commit into from

2 participants

@eladb

If writeOptions.columns is not defined and writeOptions.newColumns is defined, any new keys
added to the object returned by transform() will be added as an output column.

See wdavidw/node-csv#31

@eladb eladb writeOptions.newColumns
If `writeOptions.columns` is not defined and `writeOptions.newColumns` is defined, any new keys
added to the object returned by `transform()` will be added as an output column.

See wdavidw/node-csv#31
88b10ce
@wdavidw wdavidw merged commit 2030bfd into wdavidw:master
@wdavidw
Owner

Thanks for you addition and for making my life so easy, I wish someone will comme with such a pull request with the streaming api fully implemented: )

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on Jun 14, 2012
  1. @eladb

    writeOptions.newColumns

    eladb authored
    If `writeOptions.columns` is not defined and `writeOptions.newColumns` is defined, any new keys
    added to the object returned by `transform()` will be added as an output column.
    
    See wdavidw/node-csv#31
This page is out of date. Refresh to see the latest.
View
14 lib/csv.js
@@ -54,6 +54,7 @@ module.exports = function(){
flags: 'w',
encoding: 'utf8',
bufferSize: null,
+ newColumns: false,
end: true // Call `end()` on close
};
// A boolean that is true by default, but turns false after an 'error' occurred,
@@ -334,17 +335,24 @@ module.exports = function(){
state.line = line;
line = null;
}
- if(state.count === 0 && csv.writeOptions.header === true){
- write(csv.writeOptions.columns || csv.readOptions.columns);
- }
var line;
if(csv.transformer){
transforming = true;
line = csv.transformer(state.line, state.count);
+
+ if (csv.writeOptions.newColumns && !csv.writeOptions.columns && typeof line === 'object' && !Array.isArray(line)) {
+ Object.keys(line)
+ .filter(function(column) { return csv.readOptions.columns.indexOf(column) === -1; })
+ .forEach(function(column) { csv.readOptions.columns.push(column); });
+ }
+
transforming = false;
}else{
line = state.line;
}
+ if(state.count === 0 && csv.writeOptions.header === true){
+ write(csv.writeOptions.columns || csv.readOptions.columns);
+ }
write(line);
state.count++;
state.line = [];
View
2  package.json
@@ -1,6 +1,6 @@
{
"name": "csv",
- "version": "0.0.13",
+ "version": "0.0.14",
"description": "CSV parser with simple api, full of options and tested against large datasets.",
"author": "David Worms <david@adaltas.com>",
"contributors": [
View
5 readme.md
@@ -156,6 +156,11 @@ Options are:
- *end*
Prevent calling `end` on the destination, so that destination is no longer writable, similar to passing `{end: false}` option in `stream.pipe()`.
+- *newColumns*
+ If the `columns` option is not specified (which means columns will be taken from the reader
+ options, will automatically append new columns if they are added during `transform()`.
+
+
Transforming data
-----------------
View
3  samples/new-columns.in
@@ -0,0 +1,3 @@
+id,lastname,firstname
+82,Preisner,Zbigniew
+94,Gainsbourg,Serge
View
22 samples/new-columns.js
@@ -0,0 +1,22 @@
+
+// CSV sample - Copyright David Worms <open@adaltas.com> (BSD Licensed)
+
+ // node samples/column.js
+ var csv = require('..');
+
+ csv()
+ .fromPath(__dirname+'/columns.in',{
+ columns: true
+ })
+ .toStream(process.stdout, {
+ newColumns: true,
+ end: false
+ })
+ .transform(function(data){
+ data.name = data.firstname + ' ' + data.lastname
+ return data;
+ });
+
+ // Will print sth like:
+ // 82,Preisner,Zbigniew,Zbigniew Preisner
+ // 94,Gainsbourg,Serge,Serge Gainsbourg
View
16 test/columns.coffee
@@ -90,7 +90,21 @@ describe 'columns', ->
result.should.eql expect
fs.unlink "#{__dirname}/columns/out_named.tmp"
next()
-
+ it 'should emit new columns in output', (next) ->
+ csv()
+ .fromPath("#{__dirname}/columns/out_new.in", columns: true)
+ .toPath("#{__dirname}/columns/out_new.tmp", newColumns: true, header: true)
+ .transform (data) ->
+ data.should.be.an.a 'object'
+ data.FIELD_7 = 'new_field'
+ data
+ .on 'end', (count) ->
+ count.should.eql 2
+ expect = fs.readFileSync("#{__dirname}/columns/out_new.out").toString()
+ result = fs.readFileSync("#{__dirname}/columns/out_new.tmp").toString()
+ result.should.eql expect
+ fs.unlink "#{__dirname}/columns/out_new.tmp"
+ next()
View
3  test/columns/out_new.in
@@ -0,0 +1,3 @@
+FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
+20322051544,1979,8.8017226E7,ABC,45,2000-01-01
+28392898392,1974,8.8392926E7,DEF,23,2050-11-27
View
3  test/columns/out_new.out
@@ -0,0 +1,3 @@
+FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6,FIELD_7
+20322051544,1979,8.8017226E7,ABC,45,2000-01-01,new_field
+28392898392,1974,8.8392926E7,DEF,23,2050-11-27,new_field
Something went wrong with that request. Please try again.