Skip to content

Commit

Permalink
make tests for writers and appenders
Browse files Browse the repository at this point in the history
  • Loading branch information
mhkeller committed Oct 17, 2016
1 parent cf7cb58 commit 1f8779e
Show file tree
Hide file tree
Showing 2 changed files with 664 additions and 44 deletions.
54 changes: 35 additions & 19 deletions lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ var yamlParser = require('js-yaml')
var mkdirp = require('mkdirp')
var archieml = require('archieml')

// Equivalents formats
var equivalentFormats = {
json: ['json', 'topojson', 'geojson'],
yml: ['yaml']
}

var formatters = {
json: function (file, writeOptions) {
writeOptions = writeOptions || {}
Expand Down Expand Up @@ -71,8 +77,8 @@ formatters.topojson = formatters.json
var parsers = {
json: function (str, parserOptions) {
parserOptions = parserOptions || {}
// Do a naive test whether this is a string or an object
var mapFn = str.trim().charAt(0) === '[' ? _.map : _.mapObject
// Do a naive test whether this is a string or an object
var mapFn = parserOptions.map ? str.trim().charAt(0) === '[' ? _.map : _.mapObject : _.identity
return mapFn(jsonParser(str, parserOptions.reviver, parserOptions.filename), parserOptions.map)
},
csv: function (str, parserOptions) {
Expand All @@ -92,7 +98,7 @@ var parsers = {
},
yaml: function (str, parserOptions) {
parserOptions = parserOptions || {}
var map = parserOptions.map || function (d) { return d }
var map = parserOptions.map || _.identity
delete parserOptions.map
var loadMethod = parserOptions.loadMethod || 'safeLoad'
delete parserOptions.loadMethod
Expand All @@ -101,7 +107,7 @@ var parsers = {
},
aml: function (str, parserOptions) {
parserOptions = parserOptions || {}
var map = parserOptions.map || function (d) { return d }
var map = parserOptions.map || _.identity
delete parserOptions.map
var data = archieml.load(str, parserOptions)
return map(data, map)
Expand Down Expand Up @@ -623,7 +629,7 @@ readers.readData = function (path, opts_, cb_) {
parser = helpers.discernParser(path)
}
fs.readFile(path, 'utf8', function (err, data) {
if (helpers.discernFormat(path) === 'json' && data === '') {
if (equivalentFormats.json.indexOf(helpers.discernFormat(path)) > -1 && data === '') {
data = '[]'
}
if (err) {
Expand Down Expand Up @@ -731,7 +737,7 @@ readers.readDataSync = function (path, opts_) {
parser = helpers.discernParser(path)
}
var data = fs.readFileSync(path, 'utf8')
if (helpers.discernFormat(path) === 'json' && data === '') {
if (equivalentFormats.json.indexOf(helpers.discernFormat(path)) > -1 && data === '') {
data = '[]'
}

Expand Down Expand Up @@ -1518,12 +1524,11 @@ var writers = {}
*
* Supported formats:
*
* * `.json` Array of objects
* * `.json` Array of objects, also supports `.geojson` and `.topojson`
* * `.csv` Comma-separated
* * `.tsv` Tab-separated
* * `.psv` Pipe-separated
* * `.yaml` Yaml file
* * `.yml` Yaml file
* * `.yaml` Yaml file, also supports `.yml`
* * `.dbf` Database file, commonly used in ESRI-shapefile format.
*
* *Note: `.yaml` and `.yml` files are written with `.dump`, which has no security checking. See `js-yaml` for more secure optins.*
Expand Down Expand Up @@ -1571,6 +1576,8 @@ writers.writeData = function (outPath, data, opts_, cb) {
/**
* Syncronous version of {@link writers#writeData}
*
* Supports the same formats with the exception of `.dbf` files
*
* @param {String} fileName the name of the file
* @param {Object} [options] Optional config object, see below
* @param {Boolean} [options.makeDirectories=false] If true, create intermediate directories to your data file.
Expand Down Expand Up @@ -1602,7 +1609,7 @@ writers.writeDataSync = function (outPath, data, opts_) {
}

/**
* Append to an existing data object, creating a new file if one does not exist. For tabular formats, data must be an array of flat objects (cannot contain nested objects or arrays).
* Append to an existing data object, creating a new file if one does not exist. If appending to an object, data is extended with `_.extend`. For tabular formats, existing data and new data must be an array of flat objects (cannot contain nested objects or arrays).
*
* Supported formats:
*
Expand All @@ -1621,11 +1628,11 @@ writers.writeDataSync = function (outPath, data, opts_) {
* @param {Function} callback callback of `(err, data)` where `err` is any error and `data` is the data that was written out
*
* @example
* io.writeAppendData('path/to/data.json', jsonData, function(err){
* io.appendData('path/to/data.json', jsonData, function(err){
* console.log(err)
* })
*
* io.writeAppendData('path/to/create/to/data.csv', flatJsonData, {makeDirectories: true}, function(err){
* io.appendData('path/to/create/to/data.csv', flatJsonData, {makeDirectories: true}, function(err){
* console.log(err)
* })
*/
Expand All @@ -1647,7 +1654,13 @@ writers.appendData = function (outPath, data, opts_, cb) {
if (!err) {
readers.readData(outPath, function (err, existingData) {
if (!err) {
data = existingData.concat(data)
if (!_.isEmpty(existingData)) {
if (_.isArray(existingData)) {
data = existingData.concat(data)
} else if (_.isObject(existingData)) {
data = _.extend({}, existingData, data)
}
}
writers.writeData(outPath, data, opts_, cb)
} else {
cb(err)
Expand All @@ -1670,9 +1683,9 @@ writers.appendData = function (outPath, data, opts_, cb) {
* @returns {Object} the data that was written
*
* @example
* io.writeAppendDataSync('path/to/data.json', jsonData)
* io.appendDataSync('path/to/data.json', jsonData)
*
* io.writeAppendDataSync('path/to/create/to/data.csv', flatJsonData, {makeDirectories: true})
* io.appendDataSync('path/to/create/to/data.csv', flatJsonData, {makeDirectories: true})
*/
writers.appendDataSync = function (outPath, data, opts_) {
// Run append file to delegate creating a new file if none exists
Expand All @@ -1681,7 +1694,13 @@ writers.appendDataSync = function (outPath, data, opts_) {
}
fs.appendFileSync(outPath, '')
var existingData = readers.readDataSync(outPath)
data = existingData.concat(data)
if (!_.isEmpty(existingData)) {
if (_.isArray(existingData)) {
data = existingData.concat(data)
} else if (_.isObject(existingData)) {
data = _.extend({}, existingData, data)
}
}
writers.writeDataSync(outPath, data, opts_)
return data
}
Expand Down Expand Up @@ -1723,7 +1742,4 @@ converters.convertDbfToData = function (inPath, outPath, opts_, cb) {
})
}

// Alias this function to `writers.writeDbfToData` for legacy support
writers.writeDbfToData = converters.convertDbfToData

module.exports = _.extend({}, {parsers: parsers}, {formatters: formatters}, readers, shorthandReaders, writers, converters, helpers, { fs: fs })

0 comments on commit 1f8779e

Please sign in to comment.