Skip to content

Commit

Permalink
WIP convertData function and fix dbf otions err
Browse files Browse the repository at this point in the history
  • Loading branch information
mhkeller committed Sep 20, 2017
1 parent f7a3d3e commit a52a6ca
Show file tree
Hide file tree
Showing 6 changed files with 183 additions and 47 deletions.
115 changes: 75 additions & 40 deletions dist/indian-ocean.node.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,6 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau
var fs = _interopDefault(require('fs'));
var path = _interopDefault(require('path'));

var identity = (function (d) {
return d;
});

var commonjsGlobal = typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};

function commonjsRequire () {
Expand Down Expand Up @@ -1788,6 +1784,10 @@ var parserCsv = function (str, parserOptions) {
return csvParse(str, parserOptions.map);
};

var identity = (function (d) {
return d;
});

var parserJson = function (str, parserOptions) {
parserOptions = parserOptions || {};
// Do a naive test whether this is a string or an object
Expand Down Expand Up @@ -5925,13 +5925,16 @@ function file$1(filePath, parser, parserOptions, cb) {
}

var shapefile = require('shapefile');

function dbf(filePath, parser, parserOptions, cb) {
var values = [];
parserOptions = parserOptions || {};
var map = parserOptions.map || identity;
var i = 0;
shapefile.openDbf(filePath).then(function (source) {
return source.read().then(function log(result) {
console.log(i++);
if (result.done) return cb(null, values);
values.push(parserOptions.map(result.value)); // TODO, figure out i
values.push(map(result.value)); // TODO, figure out i
return source.read().then(log);
});
}).catch(function (error) {
Expand Down Expand Up @@ -6089,40 +6092,6 @@ function readData(filePath, opts_, cb_) {
loader(filePath, parser, parserOptions, cb);
}

/**
* Asynchronously read a dbf file. Returns an empty array if file is empty.
*
* @function readDbf
* @param {String} filePath Input file path
* @param {Function|Object} [map] Optional map function or an object with `map` key that is a function. Called once for each row with the signature `(row, i)` and must return the transformed row. See example below.
* @param {Function} callback Has signature `(err, data)`
*
* @example
* io.readDbf('path/to/data.dbf', function (err, data) {
* console.log(data) // Json data
* })
*
* // Transform values on load
* io.readDbf('path/to/data.csv', function (row, i) {
* console.log(columns) // [ 'name', 'occupation', 'height' ]
* row.height = +row.height // Convert this value to a number
* return row
* }, function (err, data) {
* console.log(data) // Converted json data
* })
*/
function readDbf(filePath, opts_, cb) {
var parserOptions = {
map: identity
};
if (typeof cb === 'undefined') {
cb = opts_;
} else {
parserOptions = typeof opts_ === 'function' ? { map: opts_ } : opts_;
}
readData(filePath, parserOptions, cb);
}

var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g;

var index$2 = function (str) {
Expand Down Expand Up @@ -6949,6 +6918,71 @@ function writeData(outPath, data, opts_, cb) {
}
}

/**
* Reads in a dbf file with `.readDbf` and write to file using `.writeData`. A convenience function for converting DBFs to more useable formats. Formerly known as `writeDbfToData` and is aliased for legacy support.
*
* @function convertData
* @param {String} inFilePath Input file path
* @param {String} outFilePath Output file path
* @param {Object} [options] Optional config object that's passed to {@link writeData}. See that documentation for full options, which vary depending on the output format you choose.
* @param {Function} callback Has signature `(err)`
*
* @example
* io.convertData('path/to/data.dbf', 'path/to/data.csv', function (err) {
* console.log(err)
* })
*
* io.convertData('path/to/data.dbf', 'path/to/create/to/data.csv', {makeDirectories: true}, function (err) {
* console.log(err)
* })
*/
function convertData(inPath, outPath, opts_, cb) {
if (typeof cb === 'undefined') {
cb = opts_;
}
readData(inPath, function (error, jsonData) {
if (error) {
cb(error);
} else {
writeData(outPath, jsonData, opts_, cb);
}
});
}

/**
* Asynchronously read a dbf file. Returns an empty array if file is empty.
*
* @function readDbf
* @param {String} filePath Input file path
* @param {Function|Object} [map] Optional map function or an object with `map` key that is a function. Called once for each row with the signature `(row, i)` and must return the transformed row. See example below.
* @param {Function} callback Has signature `(err, data)`
*
* @example
* io.readDbf('path/to/data.dbf', function (err, data) {
* console.log(data) // Json data
* })
*
* // Transform values on load
* io.readDbf('path/to/data.csv', function (row, i) {
* console.log(columns) // [ 'name', 'occupation', 'height' ]
* row.height = +row.height // Convert this value to a number
* return row
* }, function (err, data) {
* console.log(data) // Converted json data
* })
*/
function readDbf(filePath, opts_, cb) {
var parserOptions = {
map: identity
};
if (typeof cb === 'undefined') {
cb = opts_;
} else {
parserOptions = typeof opts_ === 'function' ? { map: opts_ } : opts_;
}
readData(filePath, parserOptions, cb);
}

/**
* Reads in a dbf file with `.readDbf` and write to file using `.writeData`. A convenience function for converting DBFs to more useable formats. Formerly known as `writeDbfToData` and is aliased for legacy support.
*
Expand Down Expand Up @@ -8345,6 +8379,7 @@ function appendDataSync(outPath, data, opts_) {

// converters

exports.convertData = convertData;
exports.convertDbfToData = convertDbfToData;
exports.writeDbfToData = convertDbfToData;
exports.formatters = formatters;
Expand Down
1 change: 1 addition & 0 deletions index.node.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
// converters
export {default as convertData} from './src/converters/convertData'
export {default as convertDbfToData} from './src/converters/convertDbfToData'
export {default as writeDbfToData} from './src/converters/convertDbfToData' // Legacy support
// formatters
Expand Down
10 changes: 5 additions & 5 deletions src/converters/convertData.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,24 @@ import readData from '../readers/readData'
import writeData from '../writers/writeData'

/**
* Reads in a dbf file with `.readDbf` and write to file using `.writeData`. A convenience function for converting DBFs to more useable formats. Formerly known as `writeDbfToData` and is aliased for legacy support.
* Reads in data given a path ending in the file format with {@link readData} and writes to file using {@link writeData}. A convenience function for converting files to more other formats.
*
* @function convertDbfToData
* @function convertData
* @param {String} inFilePath Input file path
* @param {String} outFilePath Output file path
* @param {Object} [options] Optional config object that's passed to {@link writeData}. See that documentation for full options, which vary depending on the output format you choose.
* @param {Function} callback Has signature `(err)`
*
* @example
* io.convertDbfToData('path/to/data.dbf', 'path/to/data.csv', function (err) {
* io.convertData('path/to/data.dbf', 'path/to/data.csv', function (err) {
* console.log(err)
* })
*
* io.convertDbfToData('path/to/data.dbf', 'path/to/create/to/data.csv', {makeDirectories: true}, function (err) {
* io.convertData('path/to/data.csv', 'path/to/create/to/data.dbf', {makeDirectories: true}, function (err) {
* console.log(err)
* })
*/
export default function convertDbfToData (inPath, outPath, opts_, cb) {
export default function convertData (inPath, outPath, opts_, cb) {
if (typeof cb === 'undefined') {
cb = opts_
}
Expand Down
2 changes: 1 addition & 1 deletion src/converters/convertDbfToData.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import readDbf from '../directReaders/readDbf'
import writeData from '../writers/writeData'

/**
* Reads in a dbf file with `.readDbf` and write to file using `.writeData`. A convenience function for converting DBFs to more useable formats. Formerly known as `writeDbfToData` and is aliased for legacy support.
* Reads in a dbf file with {@link readData} and write to file using {@link writeData}. A convenience function for converting DBFs to more useable formats. Formerly known as `writeDbfToData` and is aliased for legacy support.
*
* @function convertDbfToData
* @param {String} inFilePath Input file path
Expand Down
7 changes: 6 additions & 1 deletion src/loaders/dbf.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
var shapefile = require('shapefile')
import identity from '../utils/identity'

export default function dbf (filePath, parser, parserOptions, cb) {
var values = []
parserOptions = parserOptions || {}
var map = parserOptions.map || identity
var i = 0
shapefile.openDbf(filePath)
.then(source => source.read()
.then(function log (result) {
i++
if (result.done) return cb(null, values)
values.push(parserOptions.map(result.value)) // TODO, figure out i
values.push(map(result.value, i))
return source.read().then(log)
}))
.catch(error => cb(error.stack))
Expand Down
95 changes: 95 additions & 0 deletions test/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -3476,6 +3476,101 @@ describe('writers', function () {
})
})

describe('convertData()', function () {
describe('csv', function () {
it('should convert dbf to format', function (done) {
var filePath = ['test', 'tmp-convert-to-data-csv', 'data.csv']
io.convertData(testDataPath('dbf/basic.dbf'), filePath.join(path.sep), {makeDirectories: true}, function (err) {
if (err) {
console.log(err)
}
assert.equal(err, null)
var json = io.readDataSync(filePath.join(path.sep))
assert(_.isEqual(JSON.stringify(json), '[{"foo":"orange","bar":"0"},{"foo":"blue","bar":"1"},{"foo":"green","bar":"2"}]'))
rimraf(filePath.slice(0, 2).join(path.sep), {glob: false}, function (err) {
assert.equal(err, null)
done()
})
})
})
})

describe('psv', function () {
it('should convert dbf to format', function (done) {
var filePath = ['test', 'tmp-convert-to-data-psv', 'data.psv']
io.convertData(testDataPath('dbf/basic.dbf'), filePath.join(path.sep), {makeDirectories: true}, function (err) {
assert.equal(err, null)
var json = io.readDataSync(filePath.join(path.sep))
assert(_.isEqual(JSON.stringify(json), '[{"foo":"orange","bar":"0"},{"foo":"blue","bar":"1"},{"foo":"green","bar":"2"}]'))
rimraf(filePath.slice(0, 2).join(path.sep), {glob: false}, function (err) {
assert.equal(err, null)
done()
})
})
})
})

describe('tsv', function () {
it('should convert dbf to format', function (done) {
var filePath = ['test', 'tmp-convert-to-data-tsv', 'data.tsv']
io.convertData(testDataPath('dbf/basic.dbf'), filePath.join(path.sep), {makeDirectories: true}, function (err) {
assert.equal(err, null)
var json = io.readDataSync(filePath.join(path.sep))
assert(_.isEqual(JSON.stringify(json), '[{"foo":"orange","bar":"0"},{"foo":"blue","bar":"1"},{"foo":"green","bar":"2"}]'))
rimraf(filePath.slice(0, 2).join(path.sep), {glob: false}, function (err) {
assert.equal(err, null)
done()
})
})
})
})

describe('yaml', function () {
it('should convert dbf to format', function (done) {
var filePath = ['test', 'tmp-convert-to-data-yaml', 'data.yaml']
io.convertData(testDataPath('dbf/basic.dbf'), filePath.join(path.sep), {makeDirectories: true}, function (err) {
assert.equal(err, null)
var json = io.readDataSync(filePath.join(path.sep))
assert(_.isEqual(JSON.stringify(json), '[{"foo":"orange","bar":0},{"foo":"blue","bar":1},{"foo":"green","bar":2}]'))
rimraf(filePath.slice(0, 2).join(path.sep), {glob: false}, function (err) {
assert.equal(err, null)
done()
})
})
})
})

describe('yml', function () {
it('should convert dbf to format', function (done) {
var filePath = ['test', 'tmp-convert-to-data-yml', 'data.yml']
io.convertData(testDataPath('dbf/basic.dbf'), filePath.join(path.sep), {makeDirectories: true}, function (err) {
assert.equal(err, null)
var json = io.readDataSync(filePath.join(path.sep))
assert(_.isEqual(JSON.stringify(json), '[{"foo":"orange","bar":0},{"foo":"blue","bar":1},{"foo":"green","bar":2}]'))
rimraf(filePath.slice(0, 2).join(path.sep), {glob: false}, function (err) {
assert.equal(err, null)
done()
})
})
})
})

describe('json', function () {
it('should convert dbf to format', function (done) {
var filePath = ['test', 'tmp-convert-to-data-json', 'data.json']
io.convertData(testDataPath('dbf/basic.dbf'), filePath.join(path.sep), {makeDirectories: true}, function (err) {
assert.equal(err, null)
var json = io.readDataSync(filePath.join(path.sep))
assert(_.isEqual(JSON.stringify(json), '[{"foo":"orange","bar":0},{"foo":"blue","bar":1},{"foo":"green","bar":2}]'))
rimraf(filePath.slice(0, 2).join(path.sep), {glob: false}, function (err) {
assert.equal(err, null)
done()
})
})
})
})
})

describe('convertDbfToData()', function () {
describe('csv', function () {
it('should convert to format', function (done) {
Expand Down

0 comments on commit a52a6ca

Please sign in to comment.