Skip to content
This repository was archived by the owner on May 17, 2021. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 3 additions & 10 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,28 +1,21 @@
sudo: required
dist: trusty
dist: bionic
language: node_js
node_js:
- 10.2.1
- 12.4.0
env:
matrix:
- MONGODB_VERSION=stable MONGODB_TOPOLOGY=standalone
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- sourceline: "ppa:ubuntu-toolchain-r/test"
packages:
- libkrb5-dev
- xvfb
- libsecret-1-dev
- gnome-keyring
- python-gnomekeyring
before_install:
- npm i -g npm@latest
install:
- npm ci
before_script:
- export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start
script:
- npm run cover
cache: npm
Expand Down
21 changes: 21 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@
"prestart": "electron-rebuild --force --only keytar",
"start": "webpack-dev-server --config ./config/webpack.dev.config.js",
"test": "cross-env NODE_ENV=test mocha-webpack \"./src/**/*.spec.js\"",
"cover": "nyc npm run test",
"test:dev": "cross-env NODE_ENV=test mocha-webpack",
"cover": "xvfb-maybe nyc npm run test",
"check": "mongodb-js-precommit './src/**/*{.js,.jsx}' './test/**/*.js'",
"prepublishOnly": "npm run compile",
"storybook": "cross-env NODE_ENV=development start-storybook -p 9001 -c .storybook",
Expand Down Expand Up @@ -143,7 +144,8 @@
"webpack-dev-server": "^3.8.2",
"webpack-merge": "^4.2.2",
"webpack-node-externals": "^1.7.2",
"webpack-sources": "^1.4.3"
"webpack-sources": "^1.4.3",
"xvfb-maybe": "^0.2.1"
},
"dependencies": {
"JSONStream": "^1.3.5",
Expand Down
10 changes: 9 additions & 1 deletion src/utils/formatters.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import csv from 'fast-csv';
import { EJSON } from 'bson';
import { serialize as flatten } from './bson-csv';
import { Transform } from 'stream';
import { EOL } from 'os';

/**
* @returns {Stream.Transform}
Expand All @@ -19,7 +20,14 @@ export const createJSONFormatter = function({ brackets = true } = {}) {
readableObjectMode: false,
writableObjectMode: true,
transform: function(doc, encoding, callback) {
const s = EJSON.stringify(doc);
if (this._counter === 1) {
if (brackets) {
this.push(',');
} else {
this.push(EOL);
}
}
const s = EJSON.stringify(doc, null, brackets ? 2 : null);
if (this._counter === undefined) {
this._counter = 0;
if (brackets) {
Expand Down
80 changes: 80 additions & 0 deletions src/utils/formatters.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import { createJSONFormatter } from './formatters';
import stream from 'stream';
import bson, { EJSON } from 'bson';
import fs from 'fs';
import path from 'path';
import { promisify } from 'util';
import { EOL } from 'os';

const pipeline = promisify(stream.pipeline);
const readFile = promisify(fs.readFile);

const rm = function(src) {
return new Promise((resolve) => {
fs.unlink(src, function() {
resolve(true);
});
});
};

const BASE_FIXTURE_PATH = path.join(__dirname, '..', '..', '..', 'test');
const FIXTURES = {
JSON_SINGLE_DOC: path.join(BASE_FIXTURE_PATH, 'export-single-doc.json'),
JSON_TWO_DOCS: path.join(BASE_FIXTURE_PATH, 'export-two-docs.json'),
JSONL: path.join(BASE_FIXTURE_PATH, 'export-two-docs.jsonl'),
};

describe('formatters', () => {
describe('json', () => {
it('should format a single docment in an array', () => {
const source = stream.Readable.from([{_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')}]);
const formatter = createJSONFormatter({brackets: true});
const dest = fs.createWriteStream(FIXTURES.JSON_SINGLE_DOC);

return pipeline(source, formatter, dest)
.then(() => readFile(FIXTURES.JSON_SINGLE_DOC))
.then((contents) => {
const parsed = EJSON.parse(contents);
expect(parsed).to.deep.equal([{_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')}]);
})
.then(() => rm(FIXTURES.JSON_SINGLE_DOC));
});
it('should format two docment in an array', () => {
const docs = [
{_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')},
{_id: new bson.ObjectId('5e6bafc438e060f695591713')}
];
const source = stream.Readable.from(docs);
const formatter = createJSONFormatter({brackets: true});
const dest = fs.createWriteStream(FIXTURES.JSON_TWO_DOCS);

return pipeline(source, formatter, dest)
.then(() => readFile(FIXTURES.JSON_TWO_DOCS))
.then((contents) => {
const parsed = EJSON.parse(contents);
expect(parsed).to.deep.equal(docs);
})
.then(() => rm(FIXTURES.JSON_TWO_DOCS));
});
});
describe('jsonl', () => {
it('should support newline delimited ejson', () => {
const docs = [
{_id: new bson.ObjectId('5e5ea7558d35931a05eafec0')},
{_id: new bson.ObjectId('5e6bafc438e060f695591713')}
];
const source = stream.Readable.from(docs);
const formatter = createJSONFormatter({brackets: false});
const dest = fs.createWriteStream(FIXTURES.JSONL);

return pipeline(source, formatter, dest)
.then(() => readFile(FIXTURES.JSONL))
.then((buf) => {
const sources = buf.toString('utf-8').split(EOL);
expect(EJSON.parse(sources[0])).to.deep.equal(docs[0]);
expect(EJSON.parse(sources[1])).to.deep.equal(docs[1]);
})
.then(() => rm(FIXTURES.JSONL));
});
});
});
2 changes: 1 addition & 1 deletion src/utils/import-preview.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ const FIXTURES = {
)
};

describe.skip('import-preview', () => {
describe('import-preview', () => {
describe('createPreviewWritable', () => {
it('should work with docs < MAX_SIZE', (done) => {
const dest = createPreviewWritable();
Expand Down
5 changes: 1 addition & 4 deletions src/utils/import-size-guesstimator.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@ import { createCSVParser } from './import-parser';
import createImportSizeGuesstimator from './import-size-guesstimator';
import { pipeline } from 'stream';

/**
* TODO: lucas: This works functionally in electron but can't
* figure out how/why mocha-webpack until we get electron@6
*/
// TODO (lucas) Find proper fixture for this to check in.
describe.skip('guesstimator', () => {
it('should guess', function(done) {
this.timeout(5000);
Expand Down
47 changes: 46 additions & 1 deletion src/utils/remove-blanks.spec.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import removeBlanks from './remove-blanks';
import removeBlanks, { removeBlanksStream } from './remove-blanks';
import stream from 'stream';
import { Stream } from 'mongodb-stitch-browser-sdk';

describe('remove-blanks', () => {
it('should remove empty strings', () => {
Expand Down Expand Up @@ -26,4 +28,47 @@ describe('remove-blanks', () => {
undef: undefined
});
});
it('should tolerate empty docs if a bad projection was specified', () => {
expect(removeBlanks({})).to.deep.equal({});
});
it('should tolerate arrays', () => {
expect(removeBlanks([{}])).to.deep.equal([{}]);
});
describe('stream', () => {
it('should return a passthrough if not ignoring blanks', () => {
const transform = removeBlanksStream(false);
expect(transform).to.be.instanceOf(stream.PassThrough);
});
it('should remove blanks via a transform', (done) => {
const src = stream.Readable.from([{
_id: 1,
empty: '',
nulled: null,
falsed: false,
undef: undefined
}]);
const transform = removeBlanksStream(true);
let result;
const dest = new stream.Writable({
objectMode: true,
write: function(doc, encoding, next) {
result = doc;
return next(null);
}
});
stream.pipeline(src, transform, dest, function(err) {
if (err) {
return done(err);
}

expect(result).to.deep.equal({
_id: 1,
nulled: null,
falsed: false,
undef: undefined
});
done();
});
});
});
});