From e25317d439b093833c25bf1b167cda62351976eb Mon Sep 17 00:00:00 2001 From: Ryan Clark Date: Tue, 25 Aug 2015 16:54:00 -0700 Subject: [PATCH 1/2] Revert "switch to hat + hat.rack for batch" --- index.js | 1 + lib/batch.js | 4 +--- lib/utils.js | 7 +++---- package.json | 1 - test/cli.test.js | 2 -- test/indexing.js | 5 +---- test/utils.test.js | 24 ------------------------ 7 files changed, 6 insertions(+), 38 deletions(-) diff --git a/index.js b/index.js index 6d2cf66..2572fb1 100644 --- a/index.js +++ b/index.js @@ -5,6 +5,7 @@ var queue = require('queue-async'); var Dyno = require('dyno'); var AWS = require('aws-sdk'); var extent = require('geojson-extent'); +var cuid = require('cuid'); var tilebelt = require('tilebelt'); var geobuf = require('geobuf'); var stream = require('stream'); diff --git a/lib/batch.js b/lib/batch.js index 9d73df7..e78a928 100644 --- a/lib/batch.js +++ b/lib/batch.js @@ -3,7 +3,6 @@ var queue = require('queue-async'); var geobuf = require('geobuf'); var _ = require('lodash'); var Dyno = require('dyno'); -var hat = require('hat'); module.exports = function(config) { if (!config.bucket) throw new Error('No bucket set'); @@ -35,9 +34,8 @@ module.exports = function(config) { var encoded; var q = queue(150); - var rack = hat.rack(); for (var i = 0; i < collection.features.length; i++) { - try { encoded = utils.toDatabaseRecord(collection.features[i], dataset, rack); } + try { encoded = utils.toDatabaseRecord(collection.features[i], dataset); } catch (err) { return callback(err); } records.push(encoded[0]); diff --git a/lib/utils.js b/lib/utils.js index c6e0d9b..f7430e9 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -3,7 +3,7 @@ var geobuf = require('geobuf'); var url = require('url'); var geojsonNormalize = require('geojson-normalize'); var _ = require('lodash'); -var hat = require('hat'); +var cuid = require('cuid'); var Metadata = require('./metadata'); var tilebelt = require('tilebelt'); @@ -58,12 +58,11 @@ var Utils = module.exports = function(config) { * Converts a single GeoJSON feature into backend format * @param {object} feature - a GeoJSON feature * @param {string} dataset - the name of the dataset the feature belongs to - * @param {string} [rack] - a hat rack to use for random id generation * @returns {object[]} the first element is a DynamoDB record suitable for inserting via `dyno.putItem`, the second are parameters suitable for uploading via `s3.putObject`. */ - utils.toDatabaseRecord = function(feature, dataset, rack) { + utils.toDatabaseRecord = function(feature, dataset) { if (feature.id === 0) feature.id = '0'; - var f = feature.id ? _.clone(feature) : _.extend({}, feature, { id: (rack || hat)() }); + var f = feature.id ? _.clone(feature) : _.extend({}, feature, { id: cuid() }); var primary = f.id; if (!f.geometry || !f.geometry.coordinates) diff --git a/package.json b/package.json index c298e9b..4798113 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,6 @@ "geobuf": "0.2.4", "geojson-extent": "^0.1.0", "geojson-normalize": "0.0.0", - "hat": "0.0.3", "lodash": "~2.4.1", "minimist": "0.0.9", "queue-async": "~1.0.7", diff --git a/test/cli.test.js b/test/cli.test.js index e973ca9..c2325ba 100644 --- a/test/cli.test.js +++ b/test/cli.test.js @@ -119,8 +119,6 @@ test('[cli] list', function(assert) { exec(params.join(' '), function(err, stdout, stderr) { assert.ifError(err, 'success'); var found = JSON.parse(stdout.trim()); - found.features = _(found.features).sortBy('id').value(); - putResults.features = _(putResults.features).sortBy('id').value(); assert.deepEqual(found, putResults, 'got expected FeatureCollection'); assert.end(); }); diff --git a/test/indexing.js b/test/indexing.js index 0ade920..3d2c085 100644 --- a/test/indexing.js +++ b/test/indexing.js @@ -454,13 +454,10 @@ test('list first page with maxFeatures', function(t) { cardboard.batch.put(features, 'default', function page(err, putResult) { t.equal(err, null); t.pass('collection inserted'); - - var indexed = _.indexBy(putResult.features, 'id'); - cardboard.list('default', {maxFeatures: 1}, function(err, data) { t.equal(err, null, 'no error'); t.deepEqual(data.features.length, 1, 'first page has one feature'); - t.deepEqual(data.features[0], indexed[data.features[0].id], 'obj as expected'); + t.deepEqual(data.features[0].id, putResult.features[0].id, 'id as expected'); t.end(); }); }); diff --git a/test/utils.test.js b/test/utils.test.js index 4a62992..20237ec 100644 --- a/test/utils.test.js +++ b/test/utils.test.js @@ -82,30 +82,6 @@ test('[utils] toDatabaseRecord - no ID', function(assert) { assert.end(); }); -test('[utils] toDatabaseRecord - no ID + rack', function(assert) { - var noId = { - type: 'Feature', - properties: { - hasNo: 'id' - }, - geometry: { - type: 'Point', - coordinates: [0, 0] - } - }; - - var rack = function() { - return 'big-mac'; - }; - - var encoded = utils.toDatabaseRecord(noId, 'dataset', rack); - var item = encoded[0]; - - assert.equal(item.id, 'id!big-mac', 'an id was assigned'); - - assert.end(); -}); - test('[utils] toDatabaseRecord - with ID', function(assert) { var hasId = { id: 'bacon-lettuce-tomato', From 9b310d986247701e4008e0f18f73003011e5fe55 Mon Sep 17 00:00:00 2001 From: Ryan Clark Date: Tue, 25 Aug 2015 17:00:31 -0700 Subject: [PATCH 2/2] pin cuid --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 4798113..67895ed 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ "homepage": "https://github.com/mapbox/cardboard", "dependencies": { "aws-sdk": "~2.1.5", - "cuid": "^1.2.4", + "cuid": "1.2.4", "dyno": "^0.15.1", "geobuf": "0.2.4", "geojson-extent": "^0.1.0",