Skip to content

Commit 51f4486

Browse files
committed
Add compound keys to dat import
1 parent f4ab36e commit 51f4486

File tree

6 files changed

+44
-6
lines changed

6 files changed

+44
-6
lines changed

bin/import.js

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,11 @@ module.exports = {
3333
boolean: false,
3434
abbr: 'k'
3535
},
36+
{
37+
name: 'keys',
38+
boolean: false,
39+
abbr: 'ks'
40+
},
3641
{
3742
name: 'batch',
3843
boolean: false,
@@ -49,11 +54,9 @@ module.exports = {
4954
function handleImport (args) {
5055
debug('handleImport', args)
5156

52-
if (args.help || args._.length === 0) {
53-
return usage()
54-
}
55-
57+
if (args.help || args._.length === 0) return usage()
5658
if (!args.dataset) abort(new Error('Error: Must specify dataset (-d)'), args)
59+
if (args.keys) args.keys = args.keys.split(',')
5760

5861
openDat(args, function (err, db) {
5962
if (err) abort(err, args)

docs/cli-docs.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -480,7 +480,8 @@ dat import <filename> --dataset=<name>
480480

481481
### Options
482482

483-
- `key`/`k` - specify which field to use as the primary key (false for no key)
483+
- `key`/`k` - specify which column to use as the primary key (defaults to auto-generated keys)
484+
- `keys`/`ks` - comma-separated list of column names to craft a compound key. sorted ascending by default
484485
- `message`/`m` - a short description of this import
485486

486487
Examples:

lib/import.js

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,26 @@ var through = require('through2')
44
var debug = require('debug')('lib/import')
55
var parseInputStream = require('../lib/util/parse-input-stream.js')
66

7+
var COMPOUND_KEY_SEPARATOR = '+'
8+
79
module.exports = function (db, opts) {
810
if (!opts) opts = {}
911
if (!opts.dataset) throw new Error('Error: Must specify dataset (-d)')
1012
if (opts.dataset === 'files') throw new Error('Cannot import into the \'files\' dataset')
1113

1214
var transform = through.obj(function (obj, enc, next) {
1315
debug('heres my obj!', obj)
14-
var key = obj[opts.key] || obj.key || uuid()
16+
17+
var key
18+
if (opts.keys) {
19+
key = opts.keys.sort().map(function (key) {
20+
return obj[key] || ''
21+
}).join(COMPOUND_KEY_SEPARATOR)
22+
} else {
23+
key = obj[opts.key] || obj.key
24+
}
25+
26+
if (!key || key === COMPOUND_KEY_SEPARATOR) key = uuid()
1527
var doc = {type: 'put', key: key, value: obj}
1628
next(null, doc)
1729
})

tests/import.js

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,25 @@ test('import: dat import json', function (t) {
4141
st.end()
4242
})
4343

44+
test('import: dat import json with compound key', function (t) {
45+
var json = path.resolve(__dirname + '/fixtures/all_hour.json')
46+
var st = spawn(t, dat + ' import ' + json + ' --keys=latitude,longitude -d compound', {cwd: dat2})
47+
st.stdout.empty()
48+
st.stderr.match(/Done importing data/)
49+
st.end()
50+
})
51+
52+
test('import: dat keys get integer id', function (t) {
53+
var st = spawn(t, dat + ' keys -d compound', {cwd: dat2})
54+
st.stdout.match(function (output) {
55+
var keys = output.split('\n')
56+
t.same(keys[0], '33.9233322+-117.9376678')
57+
return true
58+
})
59+
st.stderr.empty()
60+
st.end()
61+
})
62+
4463
test('import: dat import json with integer id', function (t) {
4564
var json = path.resolve(__dirname + '/fixtures/all_hour.json')
4665
var st = spawn(t, dat + ' import ' + json + ' --key=int --dataset=int-id', {cwd: dat2})

usage/export.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,5 @@ dat export
22
-d <dataset-name> (required) # the name of the dataset
33
--limit=<number> # the number of rows to output. default: infinity
44
--format=[ndjson, csv, json] # how to parse the output. default: ndjson
5+
--full # output full metadata with each row, including autogenerated keys
56

usage/import.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
dat import <filename or - for stdin> (required)
22
-d <dataset-name> (required) # the name of the dataset to import
33
-f [ndjson, csv, json] # how to parse the file. guessed if not supplied.
4+
--key <column name># the primary key to use. defaults to auto-generated keys
5+
--keys <column,names,separated> # a comma-separated list of column names for a compound key

0 commit comments

Comments
 (0)