Skip to content

Commit

Permalink
fix(compass-import-export): Picks only specified columns in JSON export
Browse files Browse the repository at this point in the history
  • Loading branch information
leorossi committed Feb 22, 2022
1 parent 0f3cf17 commit 170622c
Show file tree
Hide file tree
Showing 5 changed files with 137 additions and 46 deletions.
4 changes: 3 additions & 1 deletion packages/compass-import-export/package.json
Expand Up @@ -24,6 +24,8 @@
"compile": "cross-env NODE_ENV=production webpack --config ./config/webpack.prod.config.js",
"start": "webpack-dev-server --config ./config/webpack.dev.config.js",
"test": "cross-env NODE_ENV=test mocha-webpack \"./src/**/*.spec.js\"",
"pretest": "mongodb-runner start --port=27018",
"posttest": "mongodb-runner stop --port=27018",
"test:watch": "cross-env NODE_ENV=test mocha-webpack \"./src/**/*.spec.js\" --watch",
"test:dev": "cross-env NODE_ENV=test mocha-webpack",
"cover": "nyc npm run test",
Expand Down Expand Up @@ -146,13 +148,13 @@
},
"dependencies": {
"@mongodb-js/compass-logging": "^0.7.0",
"JSONStream": "^1.3.5",
"ansi-to-html": "^0.6.11",
"bson": "*",
"csv-parser": "^2.3.1",
"fast-csv": "^3.4.0",
"flat": "cipacda/flat",
"javascript-stringify": "^2.0.1",
"JSONStream": "^1.3.5",
"lodash.isobjectlike": "^4.0.0",
"lodash.isplainobject": "^4.0.6",
"lodash.throttle": "^4.1.1",
Expand Down
4 changes: 3 additions & 1 deletion packages/compass-import-export/src/modules/export.js
Expand Up @@ -440,14 +440,16 @@ export const startExport = () => {
const spec = exportData.isFullCollection
? { filter: {} }
: exportData.query;

const numDocsToExport = exportData.isFullCollection
? await fetchDocumentCount(dataService, ns, spec)
: exportData.count;
// filter out only the fields we want to include in our export data
const projection = Object.fromEntries(
Object.entries(exportData.fields)
.filter((keyAndValue) => keyAndValue[1] === 1));
if (Object.keys(projection).length > 0 && (undefined === exportData.fields._id || exportData.fields._id === 0)) {
projection._id = 0;
}
log.info(mongoLogId(1001000083), 'Export', 'Start reading from collection', {
ns,
numDocsToExport,
Expand Down
170 changes: 129 additions & 41 deletions packages/compass-import-export/src/modules/export.spec.js
Expand Up @@ -9,66 +9,154 @@ import AppRegistry from 'hadron-app-registry';
import FILE_TYPES from '../constants/file-types';
import reducer, * as actions from './export';
import configureExportStore from '../stores/export-store';

import { ConnectionOptions, DataService } from 'mongodb-data-service';
import { promisify } from 'util';
import { once } from 'events';
describe('export [module]', () => {
describe('#reducer', () => {
context('#startExport', () => {
let tempFile;
let store;
const localAppRegistry = new AppRegistry();
const globalAppRegistry = new AppRegistry();
const dataService = new DataService({ connectionString: 'mongodb://localhost:27018/local'});
const createCollection = promisify(dataService.createCollection).bind(dataService);
const dropCollection = promisify(dataService.dropCollection).bind(dataService);
const insertMany = promisify(dataService.insertMany).bind(dataService);
const TEST_COLLECTION_NAME = 'local.foobar';

afterEach(async function() {
await dropCollection(TEST_COLLECTION_NAME);
});

beforeEach(async function() {
tempFile = path.join(
os.tmpdir(),
`test-${Date.now()}.csv`
);
const mockDocuments = [
{
_id: 'foo',
first_name: 'John',
last_name: 'Appleseed'
}
];
await dataService.connect();
try {
await createCollection(TEST_COLLECTION_NAME);
await insertMany(TEST_COLLECTION_NAME, [
{
_id: 'foo',
first_name: 'John',
last_name: 'Appleseed'
}
]);
} catch (err) {
console.log(err);
}

store = configureExportStore({
localAppRegistry: localAppRegistry,
globalAppRegistry: globalAppRegistry,
namespace: TEST_COLLECTION_NAME,
dataProvider: {
error: function(err) { throw err; },
dataProvider: {
estimatedCount: function(ns, options, callback) { return callback(null, mockDocuments.length); },
count: function(ns, filter, options, callback ) { return callback(null, mockDocuments.length); },
fetch: function() {
return {
stream: function() {
return Readable.from(mockDocuments);
}
};
}
}
dataProvider: dataService
}
});
});

afterEach(function(done) {
rimraf(tempFile, done);
});
it('should set the correct fields to export', (done) => {
const fields = { 'first_name': 1, '_id': 1, 'foobar': 1, 'last_name': 0};
store.dispatch(actions.updateSelectedFields(fields));
async function configureAndStartExport(selectedFields, fileType, tempFile) {
store.dispatch(actions.updateSelectedFields(selectedFields));
store.dispatch(actions.selectExportFileName(tempFile));
store.dispatch(actions.selectExportFileType('csv'));
store.dispatch(actions.selectExportFileType(fileType));
store.dispatch(actions.toggleFullCollection());

store.dispatch(actions.startExport());
localAppRegistry.addListener('export-finished', function() {
fs.readFile(tempFile, 'utf-8', function(err, data) {
if (err) { return done(err); }
const writtenData = data.split('\n');
expect(writtenData[0]).to.equal('first_name,_id,foobar');
expect(writtenData[1]).to.equal('John,foo,');
done();
});
await once(localAppRegistry, 'export-finished');
const writtenData = fs.readFileSync(tempFile, 'utf-8');
return writtenData;
}
describe('CSV Export', () => {
let tempFile;
beforeEach(() => {
tempFile = path.join(
os.tmpdir(),
`test-${Date.now()}.csv`
);
});
afterEach(() => {
fs.unlinkSync(tempFile);
});
it('should set the correct fields to CSV export', async() => {
const fields = { 'first_name': 1, 'foobar': 1, 'last_name': 0};
const data = await configureAndStartExport(fields, 'csv', tempFile);
const writtenData = data.split('\n');
expect(writtenData[0]).to.equal('first_name,foobar');
expect(writtenData[1]).to.equal('John,');
});
it('should not include _id if not specified', async() => {
const fields = { 'first_name': 1, 'foobar': 1 };
const data = await configureAndStartExport(fields, 'csv', tempFile);
const writtenData = data.split('\n');
expect(writtenData[0]).to.equal('first_name,foobar');
expect(writtenData[1]).to.equal('John,');
});
});
describe('JSON Export', () => {
let tempFile;
beforeEach(() => {
tempFile = path.join(
os.tmpdir(),
`test-${Date.now()}.json`
);
});
afterEach(() => {
fs.unlinkSync(tempFile);
});
it('should not include _id if omitted', async() => {
const fields = { 'first_name': 1, 'last_name': 0 };
const data = await configureAndStartExport(fields, 'json', tempFile);
const writtenData = JSON.parse(data);
expect(writtenData).to.deep.equal([
{
first_name: 'John',
}
]);
});

it('should not include _id if is set to 0', async() => {
const fields = { 'first_name': 1, 'last_name': 0, _id: 0 };
const data = await configureAndStartExport(fields, 'json', tempFile);
const writtenData = JSON.parse(data);
expect(writtenData).to.deep.equal([
{
first_name: 'John',
}
]);
});

it('should include _id if is set to 1', async() => {
const fields = { 'first_name': 1, 'last_name': 0, _id: 1 };
const data = await configureAndStartExport(fields, 'json', tempFile);
const writtenData = JSON.parse(data);
expect(writtenData).to.deep.equal([
{
_id: 'foo',
first_name: 'John',
}
]);
});

it('should include all fields if projection is empty', async() => {
const fields = {};
const data = await configureAndStartExport(fields, 'json', tempFile);
const writtenData = JSON.parse(data);
expect(writtenData).to.deep.equal([
{
_id: 'foo',
first_name: 'John',
last_name: 'Appleseed'
}
]);
});
it('should include all fields all fields are set to 0', async() => {
const fields = { first_name: 0, last_name: 0, _id: 0};
const data = await configureAndStartExport(fields, 'json', tempFile);
const writtenData = JSON.parse(data);
expect(writtenData).to.deep.equal([
{
_id: 'foo',
first_name: 'John',
last_name: 'Appleseed'
}
]);
});
});
});
Expand Down
1 change: 0 additions & 1 deletion packages/compass-import-export/src/utils/formatters.js
Expand Up @@ -7,7 +7,6 @@ import { EJSON } from 'bson';
import { serialize as flatten } from './bson-csv';
import { Transform } from 'stream';
import { EOL } from 'os';

/**
* @returns {Stream.Transform}
*/
Expand Down
4 changes: 2 additions & 2 deletions packages/compass-import-export/src/utils/formatters.spec.js
Expand Up @@ -28,7 +28,7 @@ const FIXTURES = {

describe('formatters', () => {
describe('json', () => {
it('should format a single docment in an array', () => {
it('should format a single document in an array', () => {
const source = stream.Readable.from([{_id: new ObjectID('5e5ea7558d35931a05eafec0')}]);
const formatter = createJSONFormatter({brackets: true});
const dest = fs.createWriteStream(FIXTURES.JSON_SINGLE_DOC);
Expand All @@ -41,7 +41,7 @@ describe('formatters', () => {
})
.then(() => rm(FIXTURES.JSON_SINGLE_DOC));
});
it('should format more than 2 docments in an array', () => {
it('should format more than 2 documents in an array', () => {
const docs = [
{_id: new ObjectID('5e5ea7558d35931a05eafec0')},
{_id: new ObjectID('5e6bafc438e060f695591713')},
Expand Down

0 comments on commit 170622c

Please sign in to comment.