From 5a42fcb458fb7150f685b8a4ed7fbe0b78e7e62a Mon Sep 17 00:00:00 2001 From: Ilya Radchenko Date: Sat, 3 Oct 2020 12:00:37 -0400 Subject: [PATCH] docs: clean up wording and typos, also format js using prettier --- README.md | 223 ++++++++++++++++++----------- docs/cli-examples.md | 8 +- docs/parser-examples.md | 305 +++++++++++++++++++++++----------------- 3 files changed, 317 insertions(+), 219 deletions(-) diff --git a/README.md b/README.md index 47ab3b09..78224f15 100644 --- a/README.md +++ b/README.md @@ -96,7 +96,7 @@ For more details, you can check some of our CLI usage [examples](docs/cli-exampl The programatic APIs take a configuration object very similar to the CLI options. All APIs take the exact same options. - `fields` - Array of Objects/Strings. Defaults to toplevel JSON attributes. See example below. -- `ndjson` - Boolean, indicates that the data is in NDJSON format. Only effective when using the streaming API and not in object mode. +- `ndjson` - Boolean, indicates that the data is in NDJSON format. Only effective when using the streaming API and not in object mode. - `transforms` - Array of transforms. A transform is a function that receives a data recod and returns a transformed record. Transforms are executed in order before converting the data record into a CSV row. See bellow for more details. - `formatters` - Object where the each key is a Javascript data type and its associated value is a formatters for the given type. A formatter is a function that receives the raw js value of a given type and formats it as a valid CSV cell. Supported types are the types returned by `typeof` i.e. `undefined`, `boolean`, `number`, `bigint`, `string`, `symbol`, `function` and `object`. - `defaultValue` - Default value to use when missing data. Defaults to `` if not specified. (Overridden by `fields[].default`) @@ -108,7 +108,7 @@ The programatic APIs take a configuration object very similar to the CLI options ### Transforms -json2csv supports transforms. A transform is a function that receives a data recod and returns a transformed record. +json2csv supports transforms. A transform is a function that receives a data record and returns a transformed record. #### Custom transforms @@ -118,38 +118,50 @@ function doNothing(item) { return transformedItem; } ``` -or using ES6 + +or using ES6 + ```js const doNothing = (item) => { // apply tranformations or create new object return transformedItem; -} +}; ``` For example, let's add a line counter to our CSV, capitalize the car field and change the price to be in Ks (1000s). + ```js function addCounter() { let counter = 1; - return (item) => ({ counter: counter++, ...item, car: item.car.toUpperCase(), price: item.price / 1000 }); + return (item) => ({ + counter: counter++, + ...item, + car: item.car.toUpperCase(), + price: item.price / 1000, + }); } ``` + Then you can add `addCounter()` to the `transforms` array. -The reason to wrap the actual transform in a factory function is so the counter always starts with one and you can reuse it. But it's nor strictly necessary. +The reason to wrap the actual transform in a factory function is so the counter always starts with one and you can reuse it. But it's not strictly necessary. -#### Built-in transforms +#### Built-in Transforms There is a number of built-in transform provider by the library. ```js -const { transforms: { unwind, flatten } } = require('json2csv'); +const { + transforms: { unwind, flatten }, +} = require('json2csv'); ``` ##### Unwind -The unwind transform deconstructs an array field from the input item to output a row for each element. Is's similar to MongoDB's $unwind aggregation. +The `unwind` transform deconstructs an array field from the input item to output a row for each element. It's similar to MongoDB's \$unwind aggregation. The transform needs to be instantiated and takes an options object as arguments containing: -- `paths` - Array of String, list the paths to the fields to be unwound. It's mandatory and should not be empty. + +- `paths` - Array of Strings, list the paths to the fields to be unwound. It's mandatory and should not be empty. - `blankOut` - Boolean, unwind using blank values instead of repeating data. Defaults to `false`. ```js @@ -161,9 +173,11 @@ unwind({ paths: ['fieldToUnwind'], blankOut: true }); ``` ##### Flatten -Flatten nested javascript objects into a single level object. + +Flatten nested JavaScript objects into a single level object. The transform needs to be instantiated and takes an options object as arguments containing: + - `objects` - Boolean, whether to flatten JSON objects or not. Defaults to `true`. - `arrays`- Boolean, whether to flatten Arrays or not. Defaults to `false`. - `separator` - String, separator to use between nested JSON keys when flattening a field. Defaults to `.`. @@ -179,7 +193,6 @@ flatten({ separator: '_' }); flatten({ objects: false, arrays: true }); ``` - ### Formatters json2csv supports formatters. A formatter is a function that receives the raw js value of a given type and formats it as a valid CSV cell. Supported types are the types returned by `typeof` i.e. `undefined`, `boolean`, `number`, `bigint`, `string`, `symbol`, `function` and `object`. @@ -188,7 +201,7 @@ There is a special type of formatter that only applies to the CSV headers if the Pay special attention to the `string` formatter since other formatters like the `headers` or `object` formatters, rely on the `string` formatter for the stringification. -#### Custom formatters +#### Custom Formatters ```js function formatType(itemOfType) { @@ -196,57 +209,66 @@ function formatType(itemOfType) { return formattedItem; } ``` -or using ES6 + +or using ES6 + ```js const formatType = (itemOfType) => { // apply tranformations or create new object return itemOfType; -} +}; ``` -For example, let's format functions as their name or 'unkwown'. +For example, let's format functions as their name or 'unknown'. ```js -const functionNameFormatter = (item) => item.name || 'unkown'; +const functionNameFormatter = (item) => item.name || 'unknown'; ``` Then you can add `{ function: functionNameFormatter }` to the `formatters` object. A less trivial example would be to ensure that string cells never take more than 20 characters. + ```js -const stringFixedFormatter = (stringLength, elipsis = '...') => (item) => item.length <= stringLength ? item : `${item.slice(0, stringLength - elipsis.length)}${elipsis}`; +const stringFixedFormatter = (stringLength, ellipsis = '...') => (item) => + item.length <= stringLength + ? item + : `${item.slice(0, stringLength - ellipsis.length)}${ellipsis}`; ``` Then you can add `{ string: stringFixedFormatter(20) }` to the `formatters` object. -Or `stringFixedFormatter(20, '')` to don't use ellipsis and just clip the text. +Or `stringFixedFormatter(20, '')` to not use the ellipsis and just clip the text. As with the sample transform in the previous section, the reason to wrap the actual formatter in a factory function is so it can be parameterized easily. -Keep in mind that the above example doesn't quote or escape the string which is problematic. A more realistic example could use our built-in string formated to do the quoting and escaping like: +Keep in mind that the above example doesn't quote or escape the string which is problematic. A more realistic example could use our built-in string formatted to do the quoting and escaping like: ```js -const { formatters: { string: defaulStringFormatter } } = require('json2csv'); +const { formatters: { string: defaultStringFormatter } } = require('json2csv'); -const stringFixedFormatter = (stringLength, elipsis = '...', stringFormatter = defaulStringFormatter()) => (item) => item.length <= stringLength ? item : stringFormatter(`${item.slice(0, stringLength - elipsis.length)}${elipsis})`; +const stringFixedFormatter = (stringLength, ellipsis = '...', stringFormatter = defaultStringFormatter()) => (item) => item.length <= stringLength ? item : stringFormatter(`${item.slice(0, stringLength - ellipsis.length)}${ellipsis})`; ``` -#### Built-in formatters +#### Built-in Formatters -There is a number of built-in transform provider by the library. +There is a number of built-in formatters provider by the library. ```js -const { formatters: { - default: defaultFormatter, - number: numberFormatter, - string: stringFormatter, - stringQuoteOnlyIfNecessary: stringQuoteOnlyIfNecessaryFormatter, - stringExcel: stringExcelFormatter, - symbol: symbolFormatter, - object: objectFormatter, -} } = require('json2csv'); +const { + formatters: { + default: defaultFormatter, + number: numberFormatter, + string: stringFormatter, + stringQuoteOnlyIfNecessary: stringQuoteOnlyIfNecessaryFormatter, + stringExcel: stringExcelFormatter, + symbol: symbolFormatter, + object: objectFormatter, + }, +} = require('json2csv'); ``` ##### Default -Just rely on standard Javascript strignification. + +This formatter just relies on standard JavaScript stringification. This is the default formatter for `undefined`, `boolean`, `number` and `bigint` elements. It's not a factory but the formatter itself. @@ -261,9 +283,11 @@ It's not a factory but the formatter itself. ``` ##### Number + Format numbers with a fixed amount of decimals The formatter needs to be instantiated and takes an options object as arguments containing: + - `separator` - String, separator to use between integer and decimal digits. Defaults to `.`. It's crucial that the decimal separator is not the same character as the CSV delimiter or the result CSV will be incorrect. - `decimals` - Number, amount of decimals to keep. Defaults to all the available decimals. @@ -282,8 +306,9 @@ The formatter needs to be instantiated and takes an options object as arguments Format strings quoting them and escaping illegal characters if needed. The formatter needs to be instantiated and takes an options object as arguments containing: + - `quote` - String, quote around cell values and column names. Defaults to `"`. -- `escapedQuote` - String, the value to replace escaped quotes in strings. Defaults to 2x`quotes` (for example `""`). +- `escapedQuote` - String, the value to replace escaped quotes in strings. Defaults to double-quotes (for example `""`). This is the default for `string` elements. @@ -308,6 +333,7 @@ This is the default for `string` elements. The default string formatter quote all strings. This is consistent but it is not mandatory according to the CSV standard. This formatter only quote strings if they don't contain quotes (by default `"`), the CSV separator character (by default `,`) or the end-of-line (by default `\n` or `\r\n` depending on you operating system). The formatter needs to be instantiated and takes an options object as arguments containing: + - `quote` - String, quote around cell values and column names. Defaults to `"`. - `escapedQuote` - String, the value to replace escaped quotes in strings. Defaults to 2x`quotes` (for example `""`). - `eol` - String, overrides the default OS line ending (i.e. `\n` on Unix and `\r\n` on Windows). Ensure that you use the same `eol` here as in the json2csv options. @@ -336,6 +362,7 @@ The formatter needs to be instantiated and takes an options object as arguments Converts string data into normalized Excel style data after formatting it using the given string formatter. The formatter needs to be instantiated and takes an options object as arguments containing: + - `stringFormatter` - Boolean, whether to flatten JSON objects or not. Defaults to our built-in `stringFormatter`. ```js @@ -353,8 +380,8 @@ The formatter needs to be instantiated and takes an options object as arguments Format the symbol as its string value and then use the given string formatter i.e. `Symbol('My Symbol')` is formatted as `"My Symbol"`. The formatter needs to be instantiated and takes an options object as arguments containing: -- `stringFormatter` - Boolean, whether to flatten JSON objects or not. Defaults to our built-in `stringFormatter`. +- `stringFormatter` - Boolean, whether to flatten JSON objects or not. Defaults to our built-in `stringFormatter`. This is the default for `symbol` elements. @@ -375,9 +402,10 @@ Format the object using `JSON.stringify` and then the given string formatter. Some object types likes `Date` or Mongo's `ObjectId` are automatically quoted by `JSON.stringify`. This formatter, remove those quotes and uses the given string formatter for correct quoting and escaping. The formatter needs to be instantiated and takes an options object as arguments containing: -- `stringFormatter` - Boolean, whether to flatten JSON objects or not. Defaults to our built-in `stringFormatter`. -This is the default for `function` and `object` elements. `functions` are formatted as empty ``. +- `stringFormatter` - Boolean, whether to flatten JSON objects or not. Defaults to our built-in `stringFormatter`. + +This is the default for `function` and `object` elements. `function`'s are formatted as empty ``. ```js { @@ -390,9 +418,10 @@ This is the default for `function` and `object` elements. `functions` are format } ``` -### json2csv parser (Synchronous API) +### json2csv Parser (Synchronous API) + +`json2csv` can also be used programmatically as a synchronous converter using its `parse` method. -`json2csv` can also be used programatically as a synchronous converter using its `parse` method. ```js const { Parser } = require('json2csv'); @@ -426,20 +455,21 @@ try { Both of the methods above load the entire JSON in memory and do the whole processing in-memory while blocking Javascript event loop. For that reason is rarely a good reason to use it until your data is very small or your application doesn't do anything else. -### json2csv async parser (Streaming API) +### json2csv Async Parser (Streaming API) -The synchronous API has the downside of loading the entire JSON array in memory and blocking javascript's event loop while processing the data. This means that your server won't be able to process more request or your UI will become irresponsive while data is being processed. For those reasons, it is rarely a good reason to use it unless your data is very small or your application doesn't do anything else. +The synchronous API has the downside of loading the entire JSON array in memory and blocking JavaScript's event loop while processing the data. This means that your server won't be able to process more request or your UI will become irresponsive while data is being processed. For those reasons, it is rarely a good reason to use it unless your data is very small or your application doesn't do anything else. -The async parser process the data as a non-blocking stream. This approach ensures a consistent memory footprint and avoid blocking javascript's event loop. Thus, it's better suited for large datasets or system with high concurrency. +The async parser processes the data as a non-blocking stream. This approach ensures a consistent memory footprint and avoid blocking JavaScript's event loop. Thus, it's better suited for large datasets or system with high concurrency. One very important difference between the asynchronous and the synchronous APIs is that using the asynchronous API json objects are processed one by one. In practice, this means that only the fields in the first object of the array are automatically detected and other fields are just ignored. To avoid this, it's advisable to ensure that all the objects contain exactly the same fields or provide the list of fields using the `fields` option. The async API takes a second options arguments that is directly passed to the underlying streams and accepts the same options as the standard [Node.js streams](https://nodejs.org/api/stream.html#stream_new_stream_duplex_options). Instances of `AsyncParser` expose three objects: -* *input:* Which allows to push more data -* *processor:* A readable string representing the whole data processing. You can listen to all the standard events of Node.js streams. -* *transform:* The json2csv transform. See bellow for more details. + +- _input:_ Which allows to push more data +- _processor:_ A readable string representing the whole data processing. You can listen to all the standard events of Node.js streams. +- _transform:_ The json2csv transform. See below for more details. ```js const { AsyncParser } = require('json2csv'); @@ -452,25 +482,26 @@ const asyncParser = new AsyncParser(opts, transformOpts); let csv = ''; asyncParser.processor - .on('data', chunk => (csv += chunk.toString())) + .on('data', (chunk) => (csv += chunk.toString())) .on('end', () => console.log(csv)) - .on('error', err => console.error(err)); - + .on('error', (err) => console.error(err)); + // You can also listen for events on the conversion and see how the header or the lines are coming out. asyncParser.transform - .on('header', header => console.log(header)) - .on('line', line => console.log(line)) - .on('error', err => console.log(err)); + .on('header', (header) => console.log(header)) + .on('line', (line) => console.log(line)) + .on('error', (err) => console.log(err)); asyncParser.input.push(data); // This data might come from an HTTP request, etc. asyncParser.input.push(null); // Sending `null` to a stream signal that no more data is expected and ends it. ``` `AsyncParser` also exposes some convenience methods: -* `fromInput` allows you to set the input stream. -* `throughTransform` allows you to add transforms to the stream. -* `toOutput` allows you to set the output stream. -* `promise` returns a promise that resolves when the stream ends or errors. Takes a boolean parameter to indicate if the resulting CSV should be kept in-memory and be resolved by the promise. + +- `fromInput` allows you to set the input stream. +- `throughTransform` allows you to add transforms to the stream. +- `toOutput` allows you to set the output stream. +- `promise` returns a promise that resolves when the stream ends or errors. Takes a boolean parameter to indicate if the resulting CSV should be kept in-memory and be resolved by the promise. ```js const { createReadStream, createWriteStream } = require('fs'); @@ -485,9 +516,10 @@ const input = createReadStream(inputPath, { encoding: 'utf8' }); const asyncParser = new JSON2CSVAsyncParser(opts, transformOpts); const parsingProcessor = asyncParser.fromInput(input); -parsingProcessor.promise() - .then(csv => console.log(csv)) - .catch(err => console.error(err)); +parsingProcessor + .promise() + .then((csv) => console.log(csv)) + .catch((err) => console.error(err)); // Using the promise API just to know when the process finnish // but not actually load the CSV in memory @@ -496,7 +528,7 @@ const output = createWriteStream(outputPath, { encoding: 'utf8' }); const asyncParser = new JSON2CSVAsyncParser(opts, transformOpts); const parsingProcessor = asyncParser.fromInput(input).toOutput(output); -parsingProcessor.promise(false).catch(err => console.error(err)); +parsingProcessor.promise(false).catch((err) => console.error(err)); ``` you can also use the convenience method `parseAsync` which accept both JSON arrays/objects and readable streams and returns a promise. @@ -508,11 +540,11 @@ const fields = ['field1', 'field2', 'field3']; const opts = { fields }; parseAsync(myData, opts) - .then(csv => console.log(csv)) - .catch(err => console.error(err)); + .then((csv) => console.log(csv)) + .catch((err) => console.error(err)); ``` -### json2csv transform (Streaming API) +### json2csv Transform (Streaming API) json2csv also exposes the raw stream transform so you can pipe your json content into it. This is the same Transform that `AsyncParser` uses under the hood. @@ -532,21 +564,21 @@ const processor = input.pipe(json2csv).pipe(output); // You can also listen for events on the conversion and see how the header or the lines are coming out. json2csv - .on('header', header => console.log(header)) - .on('line', line => console.log(line)) - .on('error', err => console.log(err)); + .on('header', (header) => console.log(header)) + .on('line', (line) => console.log(line)) + .on('error', (err) => console.log(err)); ``` -The stream API can also work on object mode. This is useful when you have an input stream in object mode or if you are getting JSON objects one by one and want to convert them to CSV as they come. +The stream API can also work in object mode. This is useful when you have an input stream in object mode or if you are getting JSON objects one by one and want to convert them to CSV as they come. ```js -const { Transform } = require("json2csv"); +const { Transform } = require('json2csv'); const { Readable } = require('stream'); const input = new Readable({ objectMode: true }); input._read = () => {}; // myObjectEmitter is just a fake example representing anything that emit objects. -myObjectEmitter.on('object', obj => input.push(obj)); +myObjectEmitter.on('object', (obj) => input.push(obj)); // Pushing a null close the stream myObjectEmitter.end(() => input.push(null)); @@ -559,24 +591,24 @@ const json2csv = new Transform(opts, transformOpts); const processor = input.pipe(json2csv).pipe(output); ``` - ## Upgrading ### Upgrading from 5.X to 6.X The CLI hasn't changed at all. -In the javascript Javascript modules, `formatters` are introduced and the `quote`, `escapedQuote` and `excelStrings` options are removed. +In the JavaScript modules, `formatters` are introduced and the `quote`, `escapedQuote` and `excelStrings` options are removed. Custom `quote` and `escapedQuote` are applied by setting the properties in the `string` formatter. ```js const { Parser } = require('json2csv'); -const json2csvParser = new Parser({ quote: '\'', escapedQuote: '\\\'' }); +const json2csvParser = new Parser({ quote: "'", escapedQuote: "\\'" }); const csv = json2csvParser.parse(myData); ``` should be replaced by + ```js const { Parser, formatter: { string: stringFormatter } } = require('json2csv'); const json2csvParser = new Parser({ @@ -591,11 +623,16 @@ const csv = json2csvParser.parse(myData); ```js const { Parser } = require('json2csv'); -const json2csvParser = new Parser({ quote: '\'', escapedQuote: '\\\'', excelStrings: true }); +const json2csvParser = new Parser({ + quote: "'", + escapedQuote: "\\'", + excelStrings: true, +}); const csv = json2csvParser.parse(myData); ``` should be replaced by + ```js const { Parser, formatter: { stringExcel: stringExcelFormatter } } = require('json2csv'); const json2csvParser = new Parser({ @@ -616,17 +653,29 @@ The `stringify` option hass been removed. In the javascript Javascript modules, `transforms` are introduced and all the `unwind` and `flatten` -related options has been moved to their own transforms. -What used to be +What used to be + ```js const { Parser } = require('json2csv'); -const json2csvParser = new Parser({ unwind: paths, unwindBlank: true, flatten: true, flattenSeparator: '__' }); +const json2csvParser = new Parser({ + unwind: paths, + unwindBlank: true, + flatten: true, + flattenSeparator: '__', +}); const csv = json2csvParser.parse(myData); ``` should be replaced by + ```js -const { Parser, transform: { unwind, flatten } } = require('json2csv'); -const json2csvParser = new Parser({ transforms: [unwind({ paths, blankOut: true }), flatten('__')] }); +const { + Parser, + transform: { unwind, flatten }, +} = require('json2csv'); +const json2csvParser = new Parser({ + transforms: [unwind({ paths, blankOut: true }), flatten('__')], +}); const csv = json2csvParser.parse(myData); ``` @@ -635,12 +684,14 @@ You can se the documentation for json2csv v4.X.X [here](https://github.com/zemir ### Upgrading from 3.X to 4.X What in 3.X used to be + ```js const json2csv = require('json2csv'); const csv = json2csv({ data: myData, fields: myFields, unwindPath: paths, ... }); ``` should be replaced by + ```js const { Parser } = require('json2csv'); const json2csvParser = new Parser({ fields: myFields, unwind: paths, ... }); @@ -648,6 +699,7 @@ const csv = json2csvParser.parse(myData); ``` or the convenience method + ```js const json2csv = require('json2csv'); const csv = json2csv.parse(myData, { fields: myFields, unwind: paths, ... }); @@ -660,11 +712,12 @@ You can se the documentation for json2csv 3.11.5 [here](https://github.com/zemir ### Excel support -#### Avoiding excel autoformatting +#### Avoiding excel auto-formatting Excel tries to automatically detect the format of every field (number, date, string, etc.) regardless of whether the field is quoted or not. This might produce few undesired effects with, for example, serial numbers: + - Large numbers are displayed using scientific notation - Leading zeros are stripped. @@ -686,7 +739,6 @@ Excel only recognizes `\r\n` as valid new line inside a cell. Excel can display Unicode correctly (just setting the `withBOM` option to true). However, Excel can't save unicode so, if you perform any changes to the CSV and save it from Excel, the Unicode characters will not be displayed correctly. - ### PowerShell escaping PowerShell do some estrange double quote escaping escaping which results on each line of the CSV missing the first and last quote if outputting the result directly to stdout. Instead of that, it's advisable that you write the result directly to a file. @@ -708,17 +760,18 @@ json2csv is packaged using `rollup`. You can generate the packages running: ```sh npm run build ``` + which generates 3 files under the `dist folder`: -* `json2csv.umd.js` UMD module transpiled to ES5 -* `json2csv.esm.js` ES5 module (import/export) -* `json2csv.cjs.js` CommonJS module +- `json2csv.umd.js` UMD module transpiled to ES5 +- `json2csv.esm.js` ES5 module (import/export) +- `json2csv.cjs.js` CommonJS module When you use packaging tools like webpack and such, they know which version to use depending on your configuration. ### Linting & Testing -Run the folowing command to check the code style. +Run the following command to check the code style. ```sh $ npm run lint @@ -744,5 +797,5 @@ See [LICENSE.md]. [travis-badge-url]: https://travis-ci.org/zemirco/json2csv [coveralls-badge]: https://coveralls.io/repos/zemirco/json2csv/badge.svg?branch=master [coveralls-badge-url]: https://coveralls.io/r/zemirco/json2csv?branch=master -[CHANGELOG]: https://github.com/zemirco/json2csv/blob/master/CHANGELOG.md -[LICENSE.md]: https://github.com/zemirco/json2csv/blob/master/LICENSE.md +[changelog]: https://github.com/zemirco/json2csv/blob/master/CHANGELOG.md +[license.md]: https://github.com/zemirco/json2csv/blob/master/LICENSE.md diff --git a/docs/cli-examples.md b/docs/cli-examples.md index 5379931a..24022717 100644 --- a/docs/cli-examples.md +++ b/docs/cli-examples.md @@ -1,4 +1,4 @@ -# CLI examples +# CLI Examples All examples use this example [input file](https://github.com/zemirco/json2csv/blob/master/test/fixtures/json/default.json). @@ -42,11 +42,7 @@ $ json2csv -i input.json -c fieldsConfig.json -o out.csv where the file `fieldsConfig.json` contains ```json -[ - "carModel", - "price", - "color" -] +["carModel", "price", "color"] ``` ## Read input from stdin diff --git a/docs/parser-examples.md b/docs/parser-examples.md index b3a6cc5f..a826d724 100644 --- a/docs/parser-examples.md +++ b/docs/parser-examples.md @@ -1,26 +1,29 @@ -# Javascript module examples +# JavaScript Module Examples Most of the examples in this section use the same input data: ```js const myCars = [ { - "car": "Audi", - "price": 40000, - "color": "blue" - }, { - "car": "BMW", - "price": 35000, - "color": "black" - }, { - "car": "Porsche", - "price": 60000, - "color": "green" - } + car: 'Audi', + price: 40000, + color: 'blue', + }, + { + car: 'BMW', + price: 35000, + color: 'black', + }, + { + car: 'Porsche', + price: 60000, + color: 'green', + }, ]; ``` ## Example `fields` option + ```js { fields: [ @@ -101,13 +104,16 @@ will output to console ```js const { Parser } = require('json2csv'); -const fields = [{ - label: 'Car Name', - value: 'car' -},{ - label: 'Price USD', - value: 'price' -}]; +const fields = [ + { + label: 'Car Name', + value: 'car', + }, + { + label: 'Price USD', + value: 'price', + }, +]; const json2csvParser = new Parser({ fields }); const csv = json2csvParser.parse(myCars); @@ -133,18 +139,20 @@ const { Parser } = require('json2csv'); const myCars = [ { - "car": { "make": "Audi", "model": "A3" }, - "price": 40000, - "color": "blue" - }, { - "car": { "make": "BMW", "model": "F20" }, - "price": 35000, - "color": "black" - }, { - "car": { "make": "Porsche", "model": "9PA AF1" }, - "price": 60000, - "color": "green" - } + car: { make: 'Audi', model: 'A3' }, + price: 40000, + color: 'blue', + }, + { + car: { make: 'BMW', model: 'F20' }, + price: 35000, + color: 'black', + }, + { + car: { make: 'Porsche', model: '9PA AF1' }, + price: 60000, + color: 'green', + }, ]; const fields = ['car.make', 'car.model', 'price', 'color']; @@ -195,7 +203,10 @@ For example, you could use `*` as quotes and format numbers to always have 2 dec To avoid conflict between the number separator and the CSV delimiter, we can use a custom delimiter again. ```js -const { Parser, formatters: {string: stringFormatter, number: numberFormatter } } = require('json2csv'); +const { + Parser, + formatters: { string: stringFormatter, number: numberFormatter }, +} = require('json2csv'); const json2csvParser = new Parser({ delimiter: ';', @@ -246,29 +257,35 @@ car, price, color ## Unwind arrays -You can unwind arrays similar to MongoDB's $unwind operation using the `unwind` transform. +You can unwind arrays similar to MongoDB's \$unwind operation using the `unwind` transform. ```js -const { Parser, transforms: { unwind } } = require('json2csv'); +const { + Parser, + transforms: { unwind }, +} = require('json2csv'); const myCars = [ { - "carModel": "Audi", - "price": 0, - "colors": ["blue","green","yellow"] - }, { - "carModel": "BMW", - "price": 15000, - "colors": ["red","blue"] - }, { - "carModel": "Mercedes", - "price": 20000, - "colors": "yellow" - }, { - "carModel": "Porsche", - "price": 30000, - "colors": ["green","teal","aqua"] - } + carModel: 'Audi', + price: 0, + colors: ['blue', 'green', 'yellow'], + }, + { + carModel: 'BMW', + price: 15000, + colors: ['red', 'blue'], + }, + { + carModel: 'Mercedes', + price: 20000, + colors: 'yellow', + }, + { + carModel: 'Porsche', + price: 30000, + colors: ['green', 'teal', 'aqua'], + }, ]; const fields = ['carModel', 'price', 'colors']; @@ -300,53 +317,68 @@ will output to console You can also unwind arrays multiple times or with nested objects. ```js -const { Parser, transforms: { unwind } } = require('json2csv'); +const { + Parser, + transforms: { unwind }, +} = require('json2csv'); const myCars = [ { - "carModel": "BMW", - "price": 15000, - "items": [ + carModel: 'BMW', + price: 15000, + items: [ + { + name: 'airbag', + color: 'white', + }, + { + name: 'dashboard', + color: 'black', + }, + ], + }, + { + carModel: 'Porsche', + price: 30000, + items: [ { - "name": "airbag", - "color": "white" - }, { - "name": "dashboard", - "color": "black" - } - ] - }, { - "carModel": "Porsche", - "price": 30000, - "items": [ + name: 'airbag', + items: [ + { + position: 'left', + color: 'white', + }, + { + position: 'right', + color: 'gray', + }, + ], + }, { - "name": "airbag", - "items": [ + name: 'dashboard', + items: [ { - "position": "left", - "color": "white" - }, { - "position": "right", - "color": "gray" - } - ] - }, { - "name": "dashboard", - "items": [ + position: 'left', + color: 'gray', + }, { - "position": "left", - "color": "gray" - }, { - "position": "right", - "color": "black" - } - ] - } - ] - } + position: 'right', + color: 'black', + }, + ], + }, + ], + }, ]; -const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color']; +const fields = [ + 'carModel', + 'price', + 'items.name', + 'items.color', + 'items.items.position', + 'items.items.color', +]; const transforms = [unwind({ paths: ['items', 'items.items'] })]; const json2csvParser = new Parser({ fields, transforms }); const csv = json2csvParser.parse(myCars); @@ -371,54 +403,71 @@ will output to console You can also unwind arrays blanking the repeated fields. ```js -const { Parser, transforms: { unwind } } = require('json2csv'); +const { + Parser, + transforms: { unwind }, +} = require('json2csv'); const myCars = [ { - "carModel": "BMW", - "price": 15000, - "items": [ + carModel: 'BMW', + price: 15000, + items: [ + { + name: 'airbag', + color: 'white', + }, + { + name: 'dashboard', + color: 'black', + }, + ], + }, + { + carModel: 'Porsche', + price: 30000, + items: [ { - "name": "airbag", - "color": "white" - }, { - "name": "dashboard", - "color": "black" - } - ] - }, { - "carModel": "Porsche", - "price": 30000, - "items": [ + name: 'airbag', + items: [ + { + position: 'left', + color: 'white', + }, + { + position: 'right', + color: 'gray', + }, + ], + }, { - "name": "airbag", - "items": [ + name: 'dashboard', + items: [ { - "position": "left", - "color": "white" - }, { - "position": "right", - "color": "gray" - } - ] - }, { - "name": "dashboard", - "items": [ + position: 'left', + color: 'gray', + }, { - "position": "left", - "color": "gray" - }, { - "position": "right", - "color": "black" - } - ] - } - ] - } + position: 'right', + color: 'black', + }, + ], + }, + ], + }, ]; -const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color']; -const transforms = [unwind({ paths: ['items', 'items.items'], blankOut: true })]; +const fields = [ + 'carModel', + 'price', + 'items.name', + 'items.color', + 'items.items.position', + 'items.items.color', +]; +const transforms = [ + unwind({ paths: ['items', 'items.items'], blankOut: true }), +]; const json2csvParser = new Parser({ fields, transforms }); const csv = json2csvParser.parse(myCars);