diff --git a/.travis.yml b/.travis.yml index 43ee57b..b5a3300 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,28 +1,21 @@ -sudo: required -dist: trusty +dist: bionic language: node_js node_js: - - 10.2.1 + - 12.4.0 env: matrix: - MONGODB_VERSION=stable MONGODB_TOPOLOGY=standalone addons: apt: sources: - - ubuntu-toolchain-r-test + - sourceline: "ppa:ubuntu-toolchain-r/test" packages: - libkrb5-dev - - xvfb - libsecret-1-dev - - gnome-keyring - - python-gnomekeyring before_install: - npm i -g npm@latest install: - npm ci -before_script: - - export DISPLAY=:99.0 - - sh -e /etc/init.d/xvfb start script: - npm run cover cache: npm diff --git a/package-lock.json b/package-lock.json index 435e027..2509b4d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -34854,6 +34854,27 @@ "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, + "xvfb-maybe": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/xvfb-maybe/-/xvfb-maybe-0.2.1.tgz", + "integrity": "sha1-7YyxMpV7eEi0OZhMZvAQ6n8kNhs=", + "dev": true, + "requires": { + "debug": "^2.2.0", + "which": "^1.2.4" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + } + } + }, "y18n": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", diff --git a/package.json b/package.json index c7254b9..fd1ec81 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,8 @@ "prestart": "electron-rebuild --force --only keytar", "start": "webpack-dev-server --config ./config/webpack.dev.config.js", "test": "cross-env NODE_ENV=test mocha-webpack \"./src/**/*.spec.js\"", - "cover": "nyc npm run test", + "test:dev": "cross-env NODE_ENV=test mocha-webpack", + "cover": "xvfb-maybe nyc npm run test", "check": "mongodb-js-precommit './src/**/*{.js,.jsx}' './test/**/*.js'", "prepublishOnly": "npm run compile", "storybook": "cross-env NODE_ENV=development start-storybook -p 9001 -c .storybook", @@ -143,7 +144,8 @@ "webpack-dev-server": "^3.8.2", "webpack-merge": "^4.2.2", "webpack-node-externals": "^1.7.2", - "webpack-sources": "^1.4.3" + "webpack-sources": "^1.4.3", + "xvfb-maybe": "^0.2.1" }, "dependencies": { "JSONStream": "^1.3.5", diff --git a/src/utils/formatters.js b/src/utils/formatters.js index ec79610..c734985 100644 --- a/src/utils/formatters.js +++ b/src/utils/formatters.js @@ -10,6 +10,7 @@ import csv from 'fast-csv'; import { EJSON } from 'bson'; import { serialize as flatten } from './bson-csv'; import { Transform } from 'stream'; +import { EOL } from 'os'; /** * @returns {Stream.Transform} @@ -19,7 +20,14 @@ export const createJSONFormatter = function({ brackets = true } = {}) { readableObjectMode: false, writableObjectMode: true, transform: function(doc, encoding, callback) { - const s = EJSON.stringify(doc); + if (this._counter === 1) { + if (brackets) { + this.push(','); + } else { + this.push(EOL); + } + } + const s = EJSON.stringify(doc, null, brackets ? 2 : null); if (this._counter === undefined) { this._counter = 0; if (brackets) { diff --git a/src/utils/formatters.spec.js b/src/utils/formatters.spec.js new file mode 100644 index 0000000..8f24f8b --- /dev/null +++ b/src/utils/formatters.spec.js @@ -0,0 +1,80 @@ +import { createJSONFormatter } from './formatters'; +import stream from 'stream'; +import bson, { EJSON } from 'bson'; +import fs from 'fs'; +import path from 'path'; +import { promisify } from 'util'; +import { EOL } from 'os'; + +const pipeline = promisify(stream.pipeline); +const readFile = promisify(fs.readFile); + +const rm = function(src) { + return new Promise((resolve) => { + fs.unlink(src, function() { + resolve(true); + }); + }); +}; + +const BASE_FIXTURE_PATH = path.join(__dirname, '..', '..', '..', 'test'); +const FIXTURES = { + JSON_SINGLE_DOC: path.join(BASE_FIXTURE_PATH, 'export-single-doc.json'), + JSON_TWO_DOCS: path.join(BASE_FIXTURE_PATH, 'export-two-docs.json'), + JSONL: path.join(BASE_FIXTURE_PATH, 'export-two-docs.jsonl'), +}; + +describe('formatters', () => { + describe('json', () => { + it('should format a single docment in an array', () => { + const source = stream.Readable.from([{_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')}]); + const formatter = createJSONFormatter({brackets: true}); + const dest = fs.createWriteStream(FIXTURES.JSON_SINGLE_DOC); + + return pipeline(source, formatter, dest) + .then(() => readFile(FIXTURES.JSON_SINGLE_DOC)) + .then((contents) => { + const parsed = EJSON.parse(contents); + expect(parsed).to.deep.equal([{_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')}]); + }) + .then(() => rm(FIXTURES.JSON_SINGLE_DOC)); + }); + it('should format two docment in an array', () => { + const docs = [ + {_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')}, + {_id: new bson.ObjectId('5e6bafc438e060f695591713')} + ]; + const source = stream.Readable.from(docs); + const formatter = createJSONFormatter({brackets: true}); + const dest = fs.createWriteStream(FIXTURES.JSON_TWO_DOCS); + + return pipeline(source, formatter, dest) + .then(() => readFile(FIXTURES.JSON_TWO_DOCS)) + .then((contents) => { + const parsed = EJSON.parse(contents); + expect(parsed).to.deep.equal(docs); + }) + .then(() => rm(FIXTURES.JSON_TWO_DOCS)); + }); + }); + describe('jsonl', () => { + it('should support newline delimited ejson', () => { + const docs = [ + {_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')}, + {_id: new bson.ObjectId('5e6bafc438e060f695591713')} + ]; + const source = stream.Readable.from(docs); + const formatter = createJSONFormatter({brackets: false}); + const dest = fs.createWriteStream(FIXTURES.JSONL); + + return pipeline(source, formatter, dest) + .then(() => readFile(FIXTURES.JSONL)) + .then((buf) => { + const sources = buf.toString('utf-8').split(EOL); + expect(EJSON.parse(sources[0])).to.deep.equal(docs[0]); + expect(EJSON.parse(sources[1])).to.deep.equal(docs[1]); + }) + .then(() => rm(FIXTURES.JSONL)); + }); + }); +}); diff --git a/src/utils/import-preview.spec.js b/src/utils/import-preview.spec.js index cef9f44..3e44be4 100644 --- a/src/utils/import-preview.spec.js +++ b/src/utils/import-preview.spec.js @@ -17,7 +17,7 @@ const FIXTURES = { ) }; -describe.skip('import-preview', () => { +describe('import-preview', () => { describe('createPreviewWritable', () => { it('should work with docs < MAX_SIZE', (done) => { const dest = createPreviewWritable(); diff --git a/src/utils/import-size-guesstimator.spec.js b/src/utils/import-size-guesstimator.spec.js index e7ea36e..d1efd51 100644 --- a/src/utils/import-size-guesstimator.spec.js +++ b/src/utils/import-size-guesstimator.spec.js @@ -14,10 +14,7 @@ import { createCSVParser } from './import-parser'; import createImportSizeGuesstimator from './import-size-guesstimator'; import { pipeline } from 'stream'; -/** - * TODO: lucas: This works functionally in electron but can't - * figure out how/why mocha-webpack until we get electron@6 - */ +// TODO (lucas) Find proper fixture for this to check in. describe.skip('guesstimator', () => { it('should guess', function(done) { this.timeout(5000); diff --git a/src/utils/remove-blanks.spec.js b/src/utils/remove-blanks.spec.js index 146d986..4c8bbf0 100644 --- a/src/utils/remove-blanks.spec.js +++ b/src/utils/remove-blanks.spec.js @@ -1,4 +1,6 @@ -import removeBlanks from './remove-blanks'; +import removeBlanks, { removeBlanksStream } from './remove-blanks'; +import stream from 'stream'; +import { Stream } from 'mongodb-stitch-browser-sdk'; describe('remove-blanks', () => { it('should remove empty strings', () => { @@ -26,4 +28,47 @@ describe('remove-blanks', () => { undef: undefined }); }); + it('should tolerate empty docs if a bad projection was specified', () => { + expect(removeBlanks({})).to.deep.equal({}); + }); + it('should tolerate arrays', () => { + expect(removeBlanks([{}])).to.deep.equal([{}]); + }); + describe('stream', () => { + it('should return a passthrough if not ignoring blanks', () => { + const transform = removeBlanksStream(false); + expect(transform).to.be.instanceOf(stream.PassThrough); + }); + it('should remove blanks via a transform', (done) => { + const src = stream.Readable.from([{ + _id: 1, + empty: '', + nulled: null, + falsed: false, + undef: undefined + }]); + const transform = removeBlanksStream(true); + let result; + const dest = new stream.Writable({ + objectMode: true, + write: function(doc, encoding, next) { + result = doc; + return next(null); + } + }); + stream.pipeline(src, transform, dest, function(err) { + if (err) { + return done(err); + } + + expect(result).to.deep.equal({ + _id: 1, + nulled: null, + falsed: false, + undef: undefined + }); + done(); + }); + }); + }); });