diff --git a/README.md b/README.md index e4b0e74..d7ffc86 100755 --- a/README.md +++ b/README.md @@ -34,6 +34,7 @@ var converter = require('json-2-csv'); * `FIELD` - String - Field Delimiter. Default: `','` * `ARRAY` - String - Array Value Delimiter. Default: `';'` * `WRAP` - String - Wrap values in the delimiter of choice (e.g. wrap values in quotes). Default: `''` + * `CHECK_SCHEMA_DIFFERENCES` - Boolean - Should we require all documents to have the same schema? Default: `true` * `PREPEND_HEADER` - Boolean - Should the auto-generated header be prepended as the first line in the CSV? Default: `true` * `EOL` - String - End of Line Delimiter. Default: `'\n'` * `KEYS` - Array - Specify the keys (as strings) that should be converted. Default: `null` @@ -164,6 +165,7 @@ Lines : 97.99% ( 146/149 ) - Allows for custom field delimiters, end of line delimiters, etc. - Promisifiable via bluebird's .promisify() and .promisifyAll() (as of 1.1.1) - Wrapped value support for json2csv and csv2json (as of 1.3.0) +- Support for multiple different schemas (as of 1.4.0) ## F.A.Q. diff --git a/bower.json b/bower.json index af83cf3..3a2fcfa 100644 --- a/bower.json +++ b/bower.json @@ -1,6 +1,6 @@ { "name": "json-2-csv", - "version": "1.3.1", + "version": "1.4.0", "homepage": "https://github.com/mrodrig/json-2-csv", "moduleType": [ "node" diff --git a/lib/constants.json b/lib/constants.json index 30bfa52..c419e8a 100644 --- a/lib/constants.json +++ b/lib/constants.json @@ -28,6 +28,7 @@ "EOL" : "\n", "PREPEND_HEADER" : true, "PARSE_CSV_NUMBERS" : false, - "KEYS" : null + "KEYS" : null, + "CHECK_SCHEMA_DIFFERENCES": true } } \ No newline at end of file diff --git a/lib/json-2-csv.js b/lib/json-2-csv.js index 5643bb4..7b8bc18 100755 --- a/lib/json-2-csv.js +++ b/lib/json-2-csv.js @@ -23,27 +23,41 @@ var generateHeading = function(data) { } }); - // Check for a consistent schema that does not require the same order: - // if we only have one document - then there is no possibility of multiple schemas - if (keys && keys.length <= 1) { - return Promise.resolve(_.flatten(keys) || []); + // If the user wants to check for the same schema: + if (options.CHECK_SCHEMA_DIFFERENCES) { + // Check for a consistent schema that does not require the same order: + // if we only have one document - then there is no possibility of multiple schemas + if (keys && keys.length <= 1) { + return Promise.resolve(_.flatten(keys) || []); + } + // else - multiple documents - ensure only one schema (regardless of field ordering) + var firstDocSchema = _.flatten(keys[0]), + schemaDifferences = 0; + + _.each(keys, function (keyList) { + // If there is a difference between the schemas, increment the counter of schema inconsistencies + var diff = _.difference(firstDocSchema, _.flatten(keyList)); + if (!_.isEqual(diff, [])) { + schemaDifferences++; + } + }); + + // If there are schema inconsistencies, throw a schema not the same error + if (schemaDifferences) { + return Promise.reject(new Error(constants.Errors.json2csv.notSameSchema)); + } + + return Promise.resolve(_.flatten(keys[0])); } - // else - multiple documents - ensure only one schema (regardless of field ordering) - var firstDocSchema = _.flatten(keys[0]), - schemaDifferences = 0; + var uniqueKeys = []; + + // Otherwise, we do not care if the schemas are different, so we should merge them via union: _.each(keys, function (keyList) { - // If there is a difference between the schemas, increment the counter of schema inconsistencies - var diff = _.difference(firstDocSchema, _.flatten(keyList)); - if (!_.isEqual(diff, [])) { - schemaDifferences++; - } + uniqueKeys = _.union(uniqueKeys, _.flatten(keyList)); }); - // If there are schema inconsistencies, throw a schema not the same error - if (schemaDifferences) { return Promise.reject(new Error(constants.Errors.json2csv.notSameSchema)); } - - return Promise.resolve(_.flatten(keys[0])); + return Promise.resolve(uniqueKeys); }; /** @@ -79,8 +93,11 @@ var generateDocumentHeading = function(heading, data) { var convertData = function (data, keys) { // Reduce each key in the data to its CSV value return _.reduce(keys, function (output, key) { + // Retrieve the appropriate field data + var fieldData = path.evaluatePath(data, key); + if (_.isUndefined(fieldData)) { fieldData = 'null'; } // Add the CSV representation of the data at the key in the document to the output array - return output.concat(convertField(path.evaluatePath(data, key))); + return output.concat(convertField(fieldData)); }, []); }; diff --git a/package.json b/package.json index 47bd346..7e44837 100755 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "mrodrig", "name": "json-2-csv", "description": "A JSON to CSV and CSV to JSON converter that natively supports sub-documents and auto-generates the CSV heading.", - "version": "1.3.1", + "version": "1.4.0", "repository": { "type": "git", "url": "http://github.com/mrodrig/json-2-csv.git" diff --git a/test/CSV/quoted/differentSchemas.csv b/test/CSV/quoted/differentSchemas.csv new file mode 100644 index 0000000..f6e0606 --- /dev/null +++ b/test/CSV/quoted/differentSchemas.csv @@ -0,0 +1,5 @@ +"carModel","price","color","mileage" +"Audi","10000","blue","7200" +"BMW","15000","red","null" +"Mercedes","20000","yellow","null" +"Porsche","30000","green","null" diff --git a/test/CSV/unQuoted/differentSchemas.csv b/test/CSV/unQuoted/differentSchemas.csv new file mode 100644 index 0000000..96000bd --- /dev/null +++ b/test/CSV/unQuoted/differentSchemas.csv @@ -0,0 +1,5 @@ +carModel,price,color,mileage +Audi,10000,blue,7200 +BMW,15000,red,null +Mercedes,20000,yellow,null +Porsche,30000,green,null diff --git a/test/testCsvFilesList.json b/test/testCsvFilesList.json index 60f4a00..fc7dcc0 100644 --- a/test/testCsvFilesList.json +++ b/test/testCsvFilesList.json @@ -9,7 +9,8 @@ {"key": "nestedQuotes", "file": "test/CSV/unQuoted/nestedQuotes.csv"}, {"key": "noData", "file": "test/CSV/unQuoted/noData.csv"}, {"key": "regularJson", "file": "test/CSV/unQuoted/regularJson.csv"}, - {"key": "singleDoc", "file": "test/CSV/unQuoted/singleDoc.csv"} + {"key": "singleDoc", "file": "test/CSV/unQuoted/singleDoc.csv"}, + {"key": "differentSchemas", "file": "test/CSV/unQuoted/differentSchemas.csv"} ] }, { @@ -24,7 +25,8 @@ {"key": "nestedQuotes", "file": "test/CSV/quoted/nestedQuotes.csv"}, {"key": "noData", "file": "test/CSV/quoted/noData.csv"}, {"key": "regularJson", "file": "test/CSV/quoted/regularJson.csv"}, - {"key": "singleDoc", "file": "test/CSV/quoted/singleDoc.csv"} + {"key": "singleDoc", "file": "test/CSV/quoted/singleDoc.csv"}, + {"key": "differentSchemas", "file": "test/CSV/quoted/differentSchemas.csv"} ] } ] \ No newline at end of file diff --git a/test/testJson2Csv.js b/test/testJson2Csv.js index eda0d4e..4f0fcb4 100644 --- a/test/testJson2Csv.js +++ b/test/testJson2Csv.js @@ -96,6 +96,19 @@ var json2csvTests = function () { }); }); + it('should convert two documents with different schemas properly', function (done) { + opts = JSON.parse(JSON.stringify(options)); + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { + if (err) { throw err; } + true.should.equal(_.isEqual(err, null)); + csv.should.equal(csvTestData.unQuoted.differentSchemas); + csv.split(options.EOL).length.should.equal(6); + done(); + }, opts); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { err.message.should.equal(constants.Errors.json2csv.notSameSchema); @@ -280,6 +293,19 @@ var json2csvTests = function () { }, opts); }); + it('should convert two documents with different schemas properly', function (done) { + opts = JSON.parse(JSON.stringify(options)); + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { + if (err) { throw err; } + true.should.equal(_.isEqual(err, null)); + csv.should.equal(csvTestData.unQuoted.differentSchemas); + csv.split(options.EOL).length.should.equal(6); + done(); + }, opts); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { err.message.should.equal(constants.Errors.json2csv.notSameSchema); @@ -444,7 +470,7 @@ var json2csvTests = function () { }); it('should repress the heading', function (done) { - opts = JSON.parse(JSON.stringify(options)); + var opts = JSON.parse(JSON.stringify(options)); opts.PREPEND_HEADER = false; converter.json2csv(jsonTestData.sameSchemaDifferentOrdering, function (err, csv) { @@ -456,6 +482,20 @@ var json2csvTests = function () { }, opts); }); + it('should convert two documents with different schemas properly', function (done) { + var opts = JSON.parse(JSON.stringify(options)); + opts.PREPEND_HEADER = true; + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { + if (err) { throw err; } + true.should.equal(_.isEqual(err, null)); + csv.should.equal(csvTestData.unQuoted.differentSchemas.replace(/,/g, options.DELIMITER.FIELD).split(options.EOL).join(options.EOL)); + csv.split(options.EOL).length.should.equal(6); + done(); + }, opts); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { err.message.should.equal(constants.Errors.json2csv.notSameSchema); @@ -642,6 +682,19 @@ var json2csvTests = function () { }, opts); }); + it('should convert two documents with different schemas properly', function (done) { + var opts = JSON.parse(JSON.stringify(options)); + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { + if (err) { throw err; } + true.should.equal(_.isEqual(err, null)); + csv.should.equal(csvTestData.quoted.differentSchemas); + csv.split(options.EOL).length.should.equal(6); + done(); + }, opts); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csv(jsonTestData.differentSchemas, function (err, csv) { err.message.should.equal(constants.Errors.json2csv.notSameSchema); @@ -846,6 +899,21 @@ var json2csvTests = function () { }); }); + it('should convert two documents with different schemas properly', function (done) { + var opts = JSON.parse(JSON.stringify(options)); + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csvAsync(jsonTestData.differentSchemas, opts) + .then(function (csv) { + csv.should.equal(csvTestData.unQuoted.differentSchemas); + csv.split(options.EOL).length.should.equal(6); + done(); + }) + .catch(function (err) { + throw err; + }); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csvAsync(jsonTestData.differentSchemas) .then(function (csv) { @@ -1017,6 +1085,21 @@ var json2csvTests = function () { }); }); + it('should convert two documents with different schemas properly', function (done) { + var opts = JSON.parse(JSON.stringify(options)); + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csvAsync(jsonTestData.differentSchemas, opts) + .then(function (csv) { + csv.should.equal(csvTestData.unQuoted.differentSchemas); + csv.split(options.EOL).length.should.equal(6); + done(); + }) + .catch(function (err) { + throw err; + }); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csvAsync(jsonTestData.differentSchemas, options) .then(function (csv) { @@ -1189,6 +1272,21 @@ var json2csvTests = function () { }); }); + it('should convert two documents with different schemas properly', function (done) { + var opts = JSON.parse(JSON.stringify(options)); + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csvAsync(jsonTestData.differentSchemas, opts) + .then(function (csv) { + csv.should.equal(csvTestData.unQuoted.differentSchemas.replace(/,/g, options.DELIMITER.FIELD).split(options.EOL).join(options.EOL)); + csv.split(options.EOL).length.should.equal(6); + done(); + }) + .catch(function (err) { + throw err; + }); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csvAsync(jsonTestData.differentSchemas, options) .then(function (csv) { @@ -1373,6 +1471,21 @@ var json2csvTests = function () { }); }); + it('should convert two documents with different schemas properly', function (done) { + var opts = JSON.parse(JSON.stringify(options)); + opts.CHECK_SCHEMA_DIFFERENCES = false; + + converter.json2csvAsync(jsonTestData.differentSchemas, opts) + .then(function (csv) { + csv.should.equal(csvTestData.quoted.differentSchemas); + csv.split(options.EOL).length.should.equal(6); + done(); + }) + .catch(function (err) { + throw err; + }); + }); + it('should throw an error if the documents do not have the same schema', function (done) { converter.json2csvAsync(jsonTestData.differentSchemas, options) .then(function (csv) {