Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ var converter = require('json-2-csv');
* `FIELD` - String - Field Delimiter. Default: `','`
* `ARRAY` - String - Array Value Delimiter. Default: `';'`
* `WRAP` - String - Wrap values in the delimiter of choice (e.g. wrap values in quotes). Default: `''`
* `CHECK_SCHEMA_DIFFERENCES` - Boolean - Should we require all documents to have the same schema? Default: `true`
* `PREPEND_HEADER` - Boolean - Should the auto-generated header be prepended as the first line in the CSV? Default: `true`
* `EOL` - String - End of Line Delimiter. Default: `'\n'`
* `KEYS` - Array - Specify the keys (as strings) that should be converted. Default: `null`
Expand Down Expand Up @@ -164,6 +165,7 @@ Lines : 97.99% ( 146/149 )
- Allows for custom field delimiters, end of line delimiters, etc.
- Promisifiable via bluebird's .promisify(<function>) and .promisifyAll(<object>) (as of 1.1.1)
- Wrapped value support for json2csv and csv2json (as of 1.3.0)
- Support for multiple different schemas (as of 1.4.0)

## F.A.Q.

Expand Down
2 changes: 1 addition & 1 deletion bower.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "json-2-csv",
"version": "1.3.1",
"version": "1.4.0",
"homepage": "https://github.com/mrodrig/json-2-csv",
"moduleType": [
"node"
Expand Down
3 changes: 2 additions & 1 deletion lib/constants.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"EOL" : "\n",
"PREPEND_HEADER" : true,
"PARSE_CSV_NUMBERS" : false,
"KEYS" : null
"KEYS" : null,
"CHECK_SCHEMA_DIFFERENCES": true
}
}
51 changes: 34 additions & 17 deletions lib/json-2-csv.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,27 +23,41 @@ var generateHeading = function(data) {
}
});

// Check for a consistent schema that does not require the same order:
// if we only have one document - then there is no possibility of multiple schemas
if (keys && keys.length <= 1) {
return Promise.resolve(_.flatten(keys) || []);
// If the user wants to check for the same schema:
if (options.CHECK_SCHEMA_DIFFERENCES) {
// Check for a consistent schema that does not require the same order:
// if we only have one document - then there is no possibility of multiple schemas
if (keys && keys.length <= 1) {
return Promise.resolve(_.flatten(keys) || []);
}
// else - multiple documents - ensure only one schema (regardless of field ordering)
var firstDocSchema = _.flatten(keys[0]),
schemaDifferences = 0;

_.each(keys, function (keyList) {
// If there is a difference between the schemas, increment the counter of schema inconsistencies
var diff = _.difference(firstDocSchema, _.flatten(keyList));
if (!_.isEqual(diff, [])) {
schemaDifferences++;
}
});

// If there are schema inconsistencies, throw a schema not the same error
if (schemaDifferences) {
return Promise.reject(new Error(constants.Errors.json2csv.notSameSchema));
}

return Promise.resolve(_.flatten(keys[0]));
}
// else - multiple documents - ensure only one schema (regardless of field ordering)
var firstDocSchema = _.flatten(keys[0]),
schemaDifferences = 0;

var uniqueKeys = [];

// Otherwise, we do not care if the schemas are different, so we should merge them via union:
_.each(keys, function (keyList) {
// If there is a difference between the schemas, increment the counter of schema inconsistencies
var diff = _.difference(firstDocSchema, _.flatten(keyList));
if (!_.isEqual(diff, [])) {
schemaDifferences++;
}
uniqueKeys = _.union(uniqueKeys, _.flatten(keyList));
});

// If there are schema inconsistencies, throw a schema not the same error
if (schemaDifferences) { return Promise.reject(new Error(constants.Errors.json2csv.notSameSchema)); }

return Promise.resolve(_.flatten(keys[0]));
return Promise.resolve(uniqueKeys);
};

/**
Expand Down Expand Up @@ -79,8 +93,11 @@ var generateDocumentHeading = function(heading, data) {
var convertData = function (data, keys) {
// Reduce each key in the data to its CSV value
return _.reduce(keys, function (output, key) {
// Retrieve the appropriate field data
var fieldData = path.evaluatePath(data, key);
if (_.isUndefined(fieldData)) { fieldData = 'null'; }
// Add the CSV representation of the data at the key in the document to the output array
return output.concat(convertField(path.evaluatePath(data, key)));
return output.concat(convertField(fieldData));
}, []);
};

Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"author": "mrodrig",
"name": "json-2-csv",
"description": "A JSON to CSV and CSV to JSON converter that natively supports sub-documents and auto-generates the CSV heading.",
"version": "1.3.1",
"version": "1.4.0",
"repository": {
"type": "git",
"url": "http://github.com/mrodrig/json-2-csv.git"
Expand Down
5 changes: 5 additions & 0 deletions test/CSV/quoted/differentSchemas.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
"carModel","price","color","mileage"
"Audi","10000","blue","7200"
"BMW","15000","red","null"
"Mercedes","20000","yellow","null"
"Porsche","30000","green","null"
5 changes: 5 additions & 0 deletions test/CSV/unQuoted/differentSchemas.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
carModel,price,color,mileage
Audi,10000,blue,7200
BMW,15000,red,null
Mercedes,20000,yellow,null
Porsche,30000,green,null
6 changes: 4 additions & 2 deletions test/testCsvFilesList.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
{"key": "nestedQuotes", "file": "test/CSV/unQuoted/nestedQuotes.csv"},
{"key": "noData", "file": "test/CSV/unQuoted/noData.csv"},
{"key": "regularJson", "file": "test/CSV/unQuoted/regularJson.csv"},
{"key": "singleDoc", "file": "test/CSV/unQuoted/singleDoc.csv"}
{"key": "singleDoc", "file": "test/CSV/unQuoted/singleDoc.csv"},
{"key": "differentSchemas", "file": "test/CSV/unQuoted/differentSchemas.csv"}
]
},
{
Expand All @@ -24,7 +25,8 @@
{"key": "nestedQuotes", "file": "test/CSV/quoted/nestedQuotes.csv"},
{"key": "noData", "file": "test/CSV/quoted/noData.csv"},
{"key": "regularJson", "file": "test/CSV/quoted/regularJson.csv"},
{"key": "singleDoc", "file": "test/CSV/quoted/singleDoc.csv"}
{"key": "singleDoc", "file": "test/CSV/quoted/singleDoc.csv"},
{"key": "differentSchemas", "file": "test/CSV/quoted/differentSchemas.csv"}
]
}
]
115 changes: 114 additions & 1 deletion test/testJson2Csv.js
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,19 @@ var json2csvTests = function () {
});
});

it('should convert two documents with different schemas properly', function (done) {
opts = JSON.parse(JSON.stringify(options));
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
if (err) { throw err; }
true.should.equal(_.isEqual(err, null));
csv.should.equal(csvTestData.unQuoted.differentSchemas);
csv.split(options.EOL).length.should.equal(6);
done();
}, opts);
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
err.message.should.equal(constants.Errors.json2csv.notSameSchema);
Expand Down Expand Up @@ -280,6 +293,19 @@ var json2csvTests = function () {
}, opts);
});

it('should convert two documents with different schemas properly', function (done) {
opts = JSON.parse(JSON.stringify(options));
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
if (err) { throw err; }
true.should.equal(_.isEqual(err, null));
csv.should.equal(csvTestData.unQuoted.differentSchemas);
csv.split(options.EOL).length.should.equal(6);
done();
}, opts);
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
err.message.should.equal(constants.Errors.json2csv.notSameSchema);
Expand Down Expand Up @@ -444,7 +470,7 @@ var json2csvTests = function () {
});

it('should repress the heading', function (done) {
opts = JSON.parse(JSON.stringify(options));
var opts = JSON.parse(JSON.stringify(options));
opts.PREPEND_HEADER = false;

converter.json2csv(jsonTestData.sameSchemaDifferentOrdering, function (err, csv) {
Expand All @@ -456,6 +482,20 @@ var json2csvTests = function () {
}, opts);
});

it('should convert two documents with different schemas properly', function (done) {
var opts = JSON.parse(JSON.stringify(options));
opts.PREPEND_HEADER = true;
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
if (err) { throw err; }
true.should.equal(_.isEqual(err, null));
csv.should.equal(csvTestData.unQuoted.differentSchemas.replace(/,/g, options.DELIMITER.FIELD).split(options.EOL).join(options.EOL));
csv.split(options.EOL).length.should.equal(6);
done();
}, opts);
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
err.message.should.equal(constants.Errors.json2csv.notSameSchema);
Expand Down Expand Up @@ -642,6 +682,19 @@ var json2csvTests = function () {
}, opts);
});

it('should convert two documents with different schemas properly', function (done) {
var opts = JSON.parse(JSON.stringify(options));
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
if (err) { throw err; }
true.should.equal(_.isEqual(err, null));
csv.should.equal(csvTestData.quoted.differentSchemas);
csv.split(options.EOL).length.should.equal(6);
done();
}, opts);
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csv(jsonTestData.differentSchemas, function (err, csv) {
err.message.should.equal(constants.Errors.json2csv.notSameSchema);
Expand Down Expand Up @@ -846,6 +899,21 @@ var json2csvTests = function () {
});
});

it('should convert two documents with different schemas properly', function (done) {
var opts = JSON.parse(JSON.stringify(options));
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csvAsync(jsonTestData.differentSchemas, opts)
.then(function (csv) {
csv.should.equal(csvTestData.unQuoted.differentSchemas);
csv.split(options.EOL).length.should.equal(6);
done();
})
.catch(function (err) {
throw err;
});
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csvAsync(jsonTestData.differentSchemas)
.then(function (csv) {
Expand Down Expand Up @@ -1017,6 +1085,21 @@ var json2csvTests = function () {
});
});

it('should convert two documents with different schemas properly', function (done) {
var opts = JSON.parse(JSON.stringify(options));
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csvAsync(jsonTestData.differentSchemas, opts)
.then(function (csv) {
csv.should.equal(csvTestData.unQuoted.differentSchemas);
csv.split(options.EOL).length.should.equal(6);
done();
})
.catch(function (err) {
throw err;
});
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csvAsync(jsonTestData.differentSchemas, options)
.then(function (csv) {
Expand Down Expand Up @@ -1189,6 +1272,21 @@ var json2csvTests = function () {
});
});

it('should convert two documents with different schemas properly', function (done) {
var opts = JSON.parse(JSON.stringify(options));
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csvAsync(jsonTestData.differentSchemas, opts)
.then(function (csv) {
csv.should.equal(csvTestData.unQuoted.differentSchemas.replace(/,/g, options.DELIMITER.FIELD).split(options.EOL).join(options.EOL));
csv.split(options.EOL).length.should.equal(6);
done();
})
.catch(function (err) {
throw err;
});
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csvAsync(jsonTestData.differentSchemas, options)
.then(function (csv) {
Expand Down Expand Up @@ -1373,6 +1471,21 @@ var json2csvTests = function () {
});
});

it('should convert two documents with different schemas properly', function (done) {
var opts = JSON.parse(JSON.stringify(options));
opts.CHECK_SCHEMA_DIFFERENCES = false;

converter.json2csvAsync(jsonTestData.differentSchemas, opts)
.then(function (csv) {
csv.should.equal(csvTestData.quoted.differentSchemas);
csv.split(options.EOL).length.should.equal(6);
done();
})
.catch(function (err) {
throw err;
});
});

it('should throw an error if the documents do not have the same schema', function (done) {
converter.json2csvAsync(jsonTestData.differentSchemas, options)
.then(function (csv) {
Expand Down