Permalink
Browse files

Add compound keys to dat import

  • Loading branch information...
karissa committed Aug 5, 2015
1 parent f4ab36e commit 51f44864ebaae9b0e2a021a67881c39dc157ab68
Showing with 44 additions and 6 deletions.
  1. +7 −4 bin/import.js
  2. +2 −1 docs/cli-docs.md
  3. +13 −1 lib/import.js
  4. +19 −0 tests/import.js
  5. +1 −0 usage/export.txt
  6. +2 −0 usage/import.txt
View
@@ -33,6 +33,11 @@ module.exports = {
boolean: false,
abbr: 'k'
},
{
name: 'keys',
boolean: false,
abbr: 'ks'
},
{
name: 'batch',
boolean: false,
@@ -49,11 +54,9 @@ module.exports = {
function handleImport (args) {
debug('handleImport', args)
if (args.help || args._.length === 0) {
return usage()
}
if (args.help || args._.length === 0) return usage()
if (!args.dataset) abort(new Error('Error: Must specify dataset (-d)'), args)
if (args.keys) args.keys = args.keys.split(',')
openDat(args, function (err, db) {
if (err) abort(err, args)
View
@@ -480,7 +480,8 @@ dat import <filename> --dataset=<name>
### Options
- `key`/`k` - specify which field to use as the primary key (false for no key)
- `key`/`k` - specify which column to use as the primary key (defaults to auto-generated keys)
- `keys`/`ks` - comma-separated list of column names to craft a compound key. sorted ascending by default
- `message`/`m` - a short description of this import
Examples:
View
@@ -4,14 +4,26 @@ var through = require('through2')
var debug = require('debug')('lib/import')
var parseInputStream = require('../lib/util/parse-input-stream.js')
var COMPOUND_KEY_SEPARATOR = '+'
module.exports = function (db, opts) {
if (!opts) opts = {}
if (!opts.dataset) throw new Error('Error: Must specify dataset (-d)')
if (opts.dataset === 'files') throw new Error('Cannot import into the \'files\' dataset')
var transform = through.obj(function (obj, enc, next) {
debug('heres my obj!', obj)
var key = obj[opts.key] || obj.key || uuid()
var key
if (opts.keys) {
key = opts.keys.sort().map(function (key) {
return obj[key] || ''
}).join(COMPOUND_KEY_SEPARATOR)
} else {
key = obj[opts.key] || obj.key
}
if (!key || key === COMPOUND_KEY_SEPARATOR) key = uuid()
var doc = {type: 'put', key: key, value: obj}
next(null, doc)
})
View
@@ -41,6 +41,25 @@ test('import: dat import json', function (t) {
st.end()
})
test('import: dat import json with compound key', function (t) {
var json = path.resolve(__dirname + '/fixtures/all_hour.json')
var st = spawn(t, dat + ' import ' + json + ' --keys=latitude,longitude -d compound', {cwd: dat2})
st.stdout.empty()
st.stderr.match(/Done importing data/)
st.end()
})
test('import: dat keys get integer id', function (t) {
var st = spawn(t, dat + ' keys -d compound', {cwd: dat2})
st.stdout.match(function (output) {
var keys = output.split('\n')
t.same(keys[0], '33.9233322+-117.9376678')
return true
})
st.stderr.empty()
st.end()
})
test('import: dat import json with integer id', function (t) {
var json = path.resolve(__dirname + '/fixtures/all_hour.json')
var st = spawn(t, dat + ' import ' + json + ' --key=int --dataset=int-id', {cwd: dat2})
View
@@ -2,4 +2,5 @@ dat export
-d <dataset-name> (required) # the name of the dataset
--limit=<number> # the number of rows to output. default: infinity
--format=[ndjson, csv, json] # how to parse the output. default: ndjson
--full # output full metadata with each row, including autogenerated keys
View
@@ -1,3 +1,5 @@
dat import <filename or - for stdin> (required)
-d <dataset-name> (required) # the name of the dataset to import
-f [ndjson, csv, json] # how to parse the file. guessed if not supplied.
--key <column name># the primary key to use. defaults to auto-generated keys
--keys <column,names,separated> # a comma-separated list of column names for a compound key

0 comments on commit 51f4486

Please sign in to comment.