Skip to content

Commit

Permalink
Merge pull request #52 from doug-martin/master
Browse files Browse the repository at this point in the history
v0.4.2
  • Loading branch information
doug-martin committed Aug 3, 2014
2 parents 9273b9b + ca43ee5 commit e337fd7
Show file tree
Hide file tree
Showing 8 changed files with 124 additions and 50 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
language: node_js
node_js:
- 0.1
- "0.10"
before_script:
- npm install -g grunt-cli
7 changes: 6 additions & 1 deletion History.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
# v0.4.2

* Added ability to specify a rowDelimiter when creating a csv.
* Added discardUnmappedColumns option to allow the ignoring of extra data [#45](https://github.com/C2FO/fast-csv/pull/45)

# v0.4.1

* Fixed race condition that occured if you called pause during a flush.
* Fixed race condition that occurred if you called pause during a flush.

# v0.4.0

Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,7 @@ csv
Formatting accepts the same options as parsing with an additional `transform` option.
* `transform(row)`: A function that accepts a row and returns a transformed one to be written.
* `rowDelimiter='\n'`: Specify an alternate row delimiter (i.e `\r\n`)
**`createWriteStream(options)`**
Expand Down
7 changes: 6 additions & 1 deletion docs/History.html
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,14 @@



<h1>v0.4.2</h1>
<ul>
<li>Added ability to specify a rowDelimiter when creating a csv.</li>
<li>Added discardUnmappedColumns option to allow the ignoring of extra data <a href="https://github.com/C2FO/fast-csv/pull/45">#45</a></li>
</ul>
<h1>v0.4.1</h1>
<ul>
<li>Fixed race condition that occured if you called pause during a flush.</li>
<li>Fixed race condition that occurred if you called pause during a flush.</li>
</ul>
<h1>v0.4.0</h1>
<ul>
Expand Down
24 changes: 13 additions & 11 deletions docs/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@ <h3>Parsing</h3>
<li><code>objectMode=true</code>: Ensure that <code>data</code> events have an object emitted rather than the stringified version set to false to have a stringified buffer.</li>
<li><code>headers=false</code>: Ste to true if you expect the first line of your <code>CSV</code> to contain headers, alternatly you can specify an array of headers to use.</li>
<li><code>ignoreEmpty=false</code>: If you wish to ignore empty rows.</li>
<li><code>discardUnmappedColumns=false</code>: If you want to discard columns that do not map to a header.</li>
<li><code>delimiter=&#39;,&#39;</code>: If your data uses an alternate delimiter such as <code>;</code> or <code>\t</code>.<ul>
<li><strong>NOTE</strong> When specifying an alternate <code>delimiter</code> you may only pass in a single character delimiter</li>
</ul>
Expand Down Expand Up @@ -218,7 +219,7 @@ <h3>Parsing</h3>

var csvStream = csv()
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -232,7 +233,7 @@ <h3>Parsing</h3>
csv
.fromPath(&quot;my.csv&quot;)
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -246,9 +247,9 @@ <h3>Parsing</h3>
&#39;a2,b2\n&#39;;

csv
.fromPath(CSV_STRING, {headers: true})
.fromString(CSV_STRING, {headers: true})
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -260,7 +261,7 @@ <h3>Parsing</h3>
csv()
.fromStream(stream)
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -272,7 +273,7 @@ <h3>Parsing</h3>
csv()
.fromStream(stream, {headers : true})
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -284,7 +285,7 @@ <h3>Parsing</h3>
csv
.fromStream(stream, {headers : [&quot;firstName&quot;, &quot;lastName&quot;, &quot;address&quot;]})
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -297,7 +298,7 @@ <h3>Parsing</h3>
csv
.fromStream(stream, {ignoreEmpty: true})
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -316,7 +317,7 @@ <h3>Validating</h3>
//do something with invalid row
})
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -332,7 +333,7 @@ <h3>Transforming</h3>
return data.reverse(); //reverse each row.
})
.on(&quot;record&quot;, function(data){
console.log(data):
console.log(data);
})
.on(&quot;end&quot;, function(){
console.log(&quot;done&quot;);
Expand All @@ -342,6 +343,7 @@ <h3>Formatting</h3>
<p>Formatting accepts the same options as parsing with an additional <code>transform</code> option.</p>
<ul>
<li><code>transform(row)</code>: A function that accepts a row and returns a transformed one to be written.</li>
<li><code>rowDelimiter=&#39;\n&#39;</code>: Specify an alternate row delimiter (i.e <code>\r\n</code>)</li>
</ul>
<p><strong><code>createWriteStream(options)</code></strong></p>
<p>This is the lowest level of the write methods, it creates a stream that can be used to create a csv of unknown size and pipe to an output csv.</p>
Expand Down Expand Up @@ -530,7 +532,7 @@ <h2>Piping from Parser to Writer</h2>
emailAddress: obj.Email_Address,
verified: obj.Verified
};
});
});
csv
.fromPath(&quot;in.csv&quot;, {headers: true})
.pipe(formatStream)
Expand Down
6 changes: 4 additions & 2 deletions lib/formatter.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ function __write(writer, arr, options) {
var formatter = createFormatter(options),
transformer = extended.has(options, "transform") ? options.transform : defaultTransform,
hasHeaders = extended.has(options, "headers") ? options.headers : true,
rowDelimiter = options.rowDelimiter || LINE_BREAK,
headersLength = 0,
i = -1,
j = -1,
Expand Down Expand Up @@ -87,7 +88,7 @@ function __write(writer, arr, options) {
}
ret.push(formatter(vals));
}
writer.push(ret.join(LINE_BREAK));
writer.push(ret.join(rowDelimiter));
}
}

Expand All @@ -97,6 +98,7 @@ function CsvTransformStream(options) {
options = options || {};
Transform.call(this, options);
this.formatter = createFormatter(options);
this.rowDelimiter = options.rowDelimiter || "\n";
var hasHeaders = this.hasHeaders = extended.has(options, "headers") ? options.headers : true;
this.parsedHeaders = hasHeaders ? false : true;
this.buffer = [];
Expand Down Expand Up @@ -134,7 +136,7 @@ extended(CsvTransformStream).extend({
}
}
if (this.totalCount++) {
buffer.push("\n");
buffer.push(this.rowDelimiter);
}
if (isHash) {
var i = -1, headersLength = this.headersLength;
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "fast-csv",
"version": "0.4.1",
"version": "0.4.2",
"description": "CSV parser and writer",
"main": "index.js",
"scripts": {
Expand Down
125 changes: 92 additions & 33 deletions test/fast-csv.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -754,6 +754,16 @@ it.describe("fast-csv", function (it) {
}
}), "A,B\na1,b1\na2,b2");
});

it.should("support specifying an alternate row delimiter", function () {
assert.equal(csv.writeToString([
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
], {
headers: true,
rowDelimiter: '\r\n'
}), "a,b\r\na1,b1\r\na2,b2");
});
});

it.describe(".write", function (it) {
Expand Down Expand Up @@ -824,6 +834,18 @@ it.describe("fast-csv", function (it) {
}
}).on("error", next).pipe(ws);
});

it.should("support specifying an alternate row delimiter", function (next) {
var ws = new stream.Writable();
ws._write = function (data) {
assert.deepEqual(data.toString(), "a,b\r\na1,b1\r\na2,b2");
next();
};
csv.write([
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
], {headers: true, rowDelimiter: '\r\n'}).on("error", next).pipe(ws);
});
});

it.describe(".writeToPath", function (it) {
Expand Down Expand Up @@ -902,6 +924,20 @@ it.describe("fast-csv", function (it) {
next();
});
});

it.should("support specifying an alternate row delimiter", function (next) {
csv
.writeToPath(path.resolve(__dirname, "assets/test.csv"), [
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
], {headers: true, rowDelimiter: '\r\n'})
.on("error", next)
.on("finish", function () {
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\r\na1,b1\r\na2,b2");
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
next();
});
});
});

it.describe(".createWriteStream", function (it) {
Expand Down Expand Up @@ -976,49 +1012,72 @@ it.describe("fast-csv", function (it) {
});
stream.write(null);
});
});

it.describe("piping from parser to formatter", function (it) {

it.should("allow piping from a parser to a formatter", function (next) {
it.should("support specifying an alternate row delimiter", function (next) {
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
csv
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true, objectMode: true})
.on("error", next)
.pipe(csv.createWriteStream({headers: true}))
.on("error", next)
.pipe(writable)
var stream = csv
.createWriteStream({headers: true, rowDelimiter: '\r\n'})
.on("error", next);

writable
.on("finish", function () {
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1,b1\na2,b2");
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\r\na1,b1\r\na2,b2");
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
next();
});
stream.pipe(writable);
var vals = [
{a: "a1", b: "b1"},
{a: "a2", b: "b2"}
];
vals.forEach(function (item) {
stream.write(item);
});
stream.write(null);
});

it.should("preserve transforms", function (next) {
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
csv
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true})
.transform(function (obj) {
obj.a = obj.a + "-parsed";
obj.b = obj.b + "-parsed";
return obj;
})
.on("error", next)
.pipe(csv.createWriteStream({headers: true}))
.on("error", next)
.pipe(writable)
.on("error", next);

writable
.on("finish", function () {
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1-parsed,b1-parsed\na2-parsed,b2-parsed");
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
next();
});
it.describe("piping from parser to formatter", function (it) {

it.should("allow piping from a parser to a formatter", function (next) {
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
csv
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true, objectMode: true})
.on("error", next)
.pipe(csv.createWriteStream({headers: true}))
.on("error", next)
.pipe(writable)
.on("error", next);

writable
.on("finish", function () {
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1,b1\na2,b2");
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
next();
});
});

it.should("preserve transforms", function (next) {
var writable = fs.createWriteStream(path.resolve(__dirname, "assets/test.csv"), {encoding: "utf8"});
csv
.fromPath(path.resolve(__dirname, "./assets/test22.csv"), {headers: true})
.transform(function (obj) {
obj.a = obj.a + "-parsed";
obj.b = obj.b + "-parsed";
return obj;
})
.on("error", next)
.pipe(csv.createWriteStream({headers: true}))
.on("error", next)
.pipe(writable)
.on("error", next);

writable
.on("finish", function () {
assert.equal(fs.readFileSync(path.resolve(__dirname, "assets/test.csv")).toString(), "a,b\na1-parsed,b1-parsed\na2-parsed,b2-parsed");
fs.unlinkSync(path.resolve(__dirname, "assets/test.csv"));
next();
});
});
});
});
});
});

0 comments on commit e337fd7

Please sign in to comment.