diff --git a/packages/cli-repl/src/cli-repl.ts b/packages/cli-repl/src/cli-repl.ts index bd25bbed5a..76e7fceef3 100644 --- a/packages/cli-repl/src/cli-repl.ts +++ b/packages/cli-repl/src/cli-repl.ts @@ -298,11 +298,15 @@ class CliRepl { // This checks for error instances. // The writer gets called immediately by the internal `this.repl.eval` // in case of errors. - if (result && result.message && typeof result.stack === 'string') { - this.bus.emit('mongosh:error', result); + if (result && ( + (result.message !== undefined && typeof result.stack === 'string') || + (result.code !== undefined && result.errmsg !== undefined) + )) { this.shellEvaluator.revertState(); - return formatOutput({ type: 'Error', value: result }); + const output = { ...result, message: result.message || result.errmsg, name: result.name || 'MongoshInternalError' }; + this.bus.emit('mongosh:error', output); + return formatOutput({ type: 'Error', value: output }); } return formatOutput(result); diff --git a/packages/cli-repl/test/e2e.spec.ts b/packages/cli-repl/test/e2e.spec.ts index 4a7eb9da31..b95301103d 100644 --- a/packages/cli-repl/test/e2e.spec.ts +++ b/packages/cli-repl/test/e2e.spec.ts @@ -124,6 +124,20 @@ describe('e2e', function() { client.close(); }); + describe('error formatting', () => { + it('throws when a syntax error is encountered', async() => { + await shell.executeLine(' { + await shell.executeLine('throw new Error(\'a errmsg\')'); + shell.assertContainsError('Error: a errmsg'); + }); + it('recognizes a driver error as error', async() => { + await shell.executeLine('db.coll.initializeOrderedBulkOp().find({}).update({}, {}).execute()'); + shell.assertContainsOutput('MongoshInternalError: multi update is not supported for replacement-style update'); + }); + }); it('throws multiline input with a single line string', async() => { // this is an unterminated string constant and should throw, since it does // not pass: https://www.ecma-international.org/ecma-262/#sec-line-terminators @@ -169,11 +183,6 @@ describe('e2e', function() { }); }); }); - it('throws when a syntax error is encountered', async() => { - await shell.executeLine(' { await shell.writeInputLine('function x () {\nconsole.log(\'y\')\n }'); shell.assertNoErrors(); diff --git a/packages/i18n/src/locales/en_US.js b/packages/i18n/src/locales/en_US.js index 4094347a7b..f8b2f3d716 100644 --- a/packages/i18n/src/locales/en_US.js +++ b/packages/i18n/src/locales/en_US.js @@ -528,6 +528,16 @@ const translations = { description: 'returns the $latencyStats aggregation for the collection. Takes an options document with an optional boolean \'histograms\' field.', example: 'db.latencyStats({ histograms: true })' }, + initializeUnorderedBulkOp: { + link: 'https://docs.mongodb.com/manual/reference/method/db.collection.initializeUnorderedBulkOp', + description: 'Initializes an unordered bulk command. Returns an instance of Bulk', + example: 'db.initializeUnorderedBulkOp()' + }, + initializeOrderedBulkOp: { + link: 'https://docs.mongodb.com/manual/reference/method/db.collection.initializeOrderedBulkOp', + description: 'Initializes an ordered bulk command. Returns an instance of Bulk', + example: 'db.initializeOrderedBulkOp()' + } } } }, @@ -1207,6 +1217,97 @@ const translations = { }, attributes: {} }, + Bulk: { + help: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk', + description: 'Bulk operations builder used to construct a list of write operations to perform in bulk for a single collection. To instantiate the builder, use either the db.collection.initializeOrderedBulkOp() or the db.collection.initializeUnorderedBulkOp() method.', + attributes: { + insert: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.insert/', + description: 'Adds an insert to the bulk operation.', + example: 'db.insert()' + }, + execute: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.execute/', + description: 'Executes the bulk operation.', + example: 'bulkOp.execute()', + }, + find: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find/', + description: 'Adds a find to the bulk operation.', + example: 'bulkOp.find()', + }, + getOperations: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.getOperations/', + description: 'Returns the batches executed by the bulk write.', + example: 'bulkOp.getOperations()', + }, + tojson: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.tojson/', + description: 'Returns a JSON document that contains the number of operations and batches in the Bulk() object.', + example: 'bulkOp.tojson()', + }, + toString: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.toString/', + description: 'Returns as a string a JSON document that contains the number of operations and batches in the Bulk() object.', + example: 'bulkOp.toString()', + } + } + } + }, + BulkFindOp: { + help: { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find', + description: 'Bulk operations builder returned after Bulk.find()', + attributes: { + 'arrayFilters': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.arrayFilters/', + description: 'Adds an arrayFilter to the bulk operation.', + example: 'bulkOp.find(...).arrayFilters()', + }, + 'collation': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.collation/', + description: 'Not currently implemented, use db.collection.bulkWrite as an alternative', + example: 'bulkOp.find(...).collation()', + }, + 'remove': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.remove/', + description: 'Adds an remove to the bulk operation.', + example: 'bulkOp.find(...).remove()', + }, + 'removeOne': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.removeOne/', + description: 'Adds an removeOne to the bulk operation.', + example: 'bulkOp.find(...).removeOne()', + }, + 'replaceOne': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.replaceOne/', + description: 'Adds an replaceOne to the bulk operation.', + example: 'bulkOp.find(...).replaceOne()', + }, + 'updateOne': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.updateOne/', + description: 'Adds an updateOne to the bulk operation.', + example: 'bulkOp.find(...).updateOne()', + }, + 'update': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.update/', + description: 'Adds an update to the bulk operation.', + example: 'bulkOp.find(...).update()', + }, + 'upsert': { + link: 'https://docs.mongodb.com/manual/reference/method/Bulk.find.upsert/', + description: 'Adds an upsert to the bulk operation updates for this find(...).', + example: 'bulkOp.find(...).upsert()', + } + } + } + } } }, 'transport-browser': { diff --git a/packages/service-provider-core/src/bulk.ts b/packages/service-provider-core/src/bulk.ts new file mode 100644 index 0000000000..95f82944aa --- /dev/null +++ b/packages/service-provider-core/src/bulk.ts @@ -0,0 +1,109 @@ +import Document from './document'; + +export interface BulkBatch { + originalZeroIndex: number; + batchType: number; + operations: any[]; + [key: string]: any; +} + +export interface DriverBulkResult { + result: { + ok: boolean; + /** + * The number of documents inserted. + */ + nInserted: number; + + /** + * The number of existing documents selected for update or replacement. + */ + nMatched: number; + + /** + * The number of existing documents updated or replaced. + */ + nModified: number; + + /** + * The number of documents removed. + */ + nRemoved: number; + + /** + * The number of upserted documents. + */ + nUpserted: number; + + /** + * Ids of upserted documents. + */ + upserted: {[index: number]: any}; + + /** + * Ids of inserted documents. + */ + insertedIds: {[index: number]: any}; + }; +} + +export interface ServiceProviderBulkFindOp { + /** + * Add a remove operation + */ + remove(): ServiceProviderBulkOp; + + /** + * Add a removeOne operation + */ + removeOne(): ServiceProviderBulkOp; + + /** + * Add a replaceOne operation + */ + replaceOne(replacement: Document): ServiceProviderBulkOp; + + /** + * Add a updateOne operation + */ + updateOne(update: Document): ServiceProviderBulkOp; + + /** + * Add a update operation + */ + update(update: Document): ServiceProviderBulkOp; + + /** + * Make subsequent update operations upsert: true + */ + upsert(): ServiceProviderBulkFindOp; +} + +export default interface ServiceProviderBulkOp { + /** + * Internal state + */ + s: { + batches: BulkBatch[]; + currentUpdateBatch: BulkBatch; + currentRemoveBatch: BulkBatch; + currentInsertBatch: BulkBatch; + currentBatch: BulkBatch; + }; + + /** + * Execute the operation. + */ + execute(): Promise; + + /** + * Find + */ + find(document: Document): ServiceProviderBulkFindOp; + + /** + * Insert + */ + insert(document: Document): ServiceProviderBulkOp; + +} diff --git a/packages/service-provider-core/src/index.ts b/packages/service-provider-core/src/index.ts index 2822590b78..8f61eb6272 100644 --- a/packages/service-provider-core/src/index.ts +++ b/packages/service-provider-core/src/index.ts @@ -15,6 +15,7 @@ import CliOptions from './cli-options'; import generateUri, { Scheme } from './uri-generator'; const DEFAULT_DB = 'test'; import bson from 'bson'; +import ServiceProviderBulkOp, { ServiceProviderBulkFindOp, BulkBatch } from './bulk'; export { ServiceProvider, @@ -37,5 +38,8 @@ export { Scheme, DEFAULT_DB, ServiceProviderCore, - bson + bson, + ServiceProviderBulkFindOp, + ServiceProviderBulkOp, + BulkBatch }; diff --git a/packages/service-provider-core/src/writable.ts b/packages/service-provider-core/src/writable.ts index 8cf4f81d78..74985a221c 100644 --- a/packages/service-provider-core/src/writable.ts +++ b/packages/service-provider-core/src/writable.ts @@ -400,5 +400,22 @@ export default interface Writable { newName: string, options?: Document, dbOptions?: DatabaseOptions): Promise; + + /** + * Initialize a bulk operation. + * + * @param dbName + * @param collName + * @param ordered + * @param options + * @param dbOptions + */ + initializeBulkOp( + dbName: string, + collName: string, + ordered: boolean, + options?: any, + dbOptions?: any + ): Promise; } diff --git a/packages/service-provider-server/src/cli-service-provider.ts b/packages/service-provider-server/src/cli-service-provider.ts index 455590466a..c8b4c80c6b 100644 --- a/packages/service-provider-server/src/cli-service-provider.ts +++ b/packages/service-provider-server/src/cli-service-provider.ts @@ -1052,6 +1052,19 @@ class CliServiceProvider extends ServiceProviderCore implements ServiceProvider }; } } + + async initializeBulkOp( + dbName: string, + collName: string, + ordered: boolean, + options = {}, + dbOptions?: any + ): Promise { + if (ordered) { + return await this.db(dbName, dbOptions).collection(collName).initializeOrderedBulkOp(options); + } + return await this.db(dbName, dbOptions).collection(collName).initializeUnorderedBulkOp(options); + } } export default CliServiceProvider; diff --git a/packages/shell-api/src/bulk.spec.ts b/packages/shell-api/src/bulk.spec.ts new file mode 100644 index 0000000000..c6346a75a3 --- /dev/null +++ b/packages/shell-api/src/bulk.spec.ts @@ -0,0 +1,434 @@ +import { expect } from 'chai'; +import Bulk, { BulkFindOp } from './bulk'; +import { ALL_PLATFORMS, ALL_SERVER_VERSIONS, ALL_TOPOLOGIES, asShellResult } from './enums'; +import { signatures } from './decorators'; +import sinon, { StubbedInstance, stubInterface } from 'ts-sinon'; +import { bson, ServiceProvider } from '@mongosh/service-provider-core'; +import { EventEmitter } from 'events'; +import ShellInternalState from './shell-internal-state'; +import Collection from './collection'; +import { BulkWriteResult } from './result'; + +describe('Bulk API', () => { + describe('Bulk', () => { + describe('help', () => { + const apiClass: any = new Bulk({} as any, {} as any); + it('calls help function', async() => { + expect((await apiClass.help()[asShellResult]()).type).to.equal('Help'); + expect((await apiClass.help[asShellResult]()).type).to.equal('Help'); + }); + it('calls help function for methods', async() => { + expect((await apiClass.execute.help()[asShellResult]()).type).to.equal('Help'); + expect((await apiClass.execute.help[asShellResult]()).type).to.equal('Help'); + }); + }); + describe('signatures', () => { + it('type', () => { + expect(signatures.Bulk.type).to.equal('Bulk'); + }); + it('attributes', () => { + expect(signatures.Bulk.attributes.find).to.deep.equal({ + type: 'function', + returnsPromise: false, + returnType: { attributes: {}, type: 'unknown' }, // no async calls, so don't need to track + platforms: ALL_PLATFORMS, + topologies: ALL_TOPOLOGIES, + serverVersions: ALL_SERVER_VERSIONS + }); + }); + it('hasAsyncChild', () => { + expect(signatures.Bulk.hasAsyncChild).to.equal(true); + }); + }); + describe('Metadata', () => { + describe('asShellResult', () => { + const mongo = sinon.spy(); + const inner = { + s: { batches: [1, 2, 3], currentInsertBatch: {} as any } + } as any; + const b = new Bulk(mongo, inner); + it('value', async() => { + expect((await b[asShellResult]()).value).to.deep.equal({ nInsertOps: 0, nUpdateOps: 0, nRemoveOps: 0, nBatches: 4 }); + }); + it('type', async() => { + expect((await b[asShellResult]()).type).to.equal('Bulk'); + }); + }); + }); + ['ordered', 'unordered'].forEach((t) => { + describe(t, () => { + describe('commands', () => { + let collection: Collection; + let serviceProvider: StubbedInstance; + let bulk: Bulk; + let bus: StubbedInstance; + let internalState: ShellInternalState; + let innerStub: StubbedInstance; + const bulkWriteResult = { + ok: 1, + nInserted: 1, + insertedIds: [ 'oid' ], + nMatched: 0, + nModified: 0, + nRemoved: 0, + nUpserted: 0, + upserted: [] + }; + beforeEach(() => { + bus = stubInterface(); + serviceProvider = stubInterface(); + serviceProvider.initialDb = 'db1'; + serviceProvider.bsonLibrary = bson; + serviceProvider.runCommand.resolves({ ok: 1 }); + internalState = new ShellInternalState(serviceProvider, bus); + const db = internalState.currentDb; + collection = new Collection(db._mongo, db, 'coll1'); + innerStub = stubInterface(); + innerStub.s = { batches: [1, 2, 3], currentInsertBatch: 4, currentBatch: 4 }; + bulk = new Bulk(collection, innerStub, t === 'ordered'); + }); + describe('insert', () => { + it('calls innerBulk.insert and returns self', () => { + innerStub.insert.returns({ ok: 1 }); + bulk.insert({ insertedDoc: 1 }); + expect(innerStub.insert).to.have.been.calledWith({ insertedDoc: 1 }); + expect(bulk._batchCounts.nInsertOps).to.equal(1); + }); + + it('returns self', () => { + expect(bulk.insert({})).to.equal(bulk); + }); + + it('throws if innerBulk.insert throws', async() => { + const expectedError = new Error(); + innerStub.insert.throws(expectedError); + expect(() => bulk.insert({})).to.throw(expectedError); + }); + }); + describe('tojson', () => { + it('returns the batches length + currentInsert/Update/RemoveBatch?', () => { + expect(bulk.tojson()).to.deep.equal({ + nInsertOps: 0, nUpdateOps: 0, nRemoveOps: 0, nBatches: 4 + }); + }); + it('returns unknown if batches cannot be counted', () => { + const bulk2 = new Bulk({} as any, { insert: () => {} } as any, t === 'ordered').insert({}).insert({}); + expect(bulk2.tojson()).to.deep.equal({ + nInsertOps: 2, nUpdateOps: 0, nRemoveOps: 0, nBatches: 'unknown' + }); + }); + it('counts current batches', () => { + const bulk2 = new Bulk({} as any, { + insert: () => {}, + s: { + batches: [], + currentInsertBatch: {} as any, + currentUpdateBatch: {} as any, + currentRemoveBatch: {} as any, + currentBatch: {} as any + } + } as any, + t === 'ordered' + ).insert({}).insert({}); + expect(bulk2.tojson()).to.deep.equal({ + nInsertOps: 2, nUpdateOps: 0, nRemoveOps: 0, nBatches: t === 'ordered' ? 1 : 3 + }); + }); + }); + describe('find', () => { + it('calls innerBulk.find', () => { + innerStub.find.returns({ driverFindOp: 1 }); + bulk.find({ search: 1 }); + expect(innerStub.find).to.have.been.calledWith({ search: 1 }); + }); + it('returns new BulkFindOp with arg', async() => { + innerStub.find.returns({ driverFindOp: 1 }); + const res = bulk.find({ search: 1 }); + expect((await res[asShellResult]()).type).to.equal('BulkFindOp'); + expect(res._serviceProviderBulkFindOp).to.deep.equal({ driverFindOp: 1 }); + }); + it('throws if innerBulk.find throws', () => { + const expectedError = new Error(); + innerStub.find.throws(expectedError); + expect(() => bulk.find({})).to.throw(expectedError); + }); + }); + describe('execute', async() => { + it('calls innerBulk.execute', () => { + innerStub.execute.returns({ result: bulkWriteResult }); + bulk.execute(); + expect(innerStub.execute).to.have.been.calledWith(); + }); + it('returns new BulkWriteResult', async() => { + innerStub.execute.returns({ result: bulkWriteResult }); + const res = await bulk.execute(); + expect((await res[asShellResult]()).type).to.equal('BulkWriteResult'); + expect(res).to.deep.equal( + new BulkWriteResult( + !!bulkWriteResult.ok, // acknowledged + bulkWriteResult.nInserted, + bulkWriteResult.insertedIds, + bulkWriteResult.nMatched, + bulkWriteResult.nModified, + bulkWriteResult.nRemoved, + bulkWriteResult.nUpserted, + bulkWriteResult.upserted + ) + ); + expect(bulk._executed).to.equal(true); + expect(bulk._batches).to.deep.equal([1, 2, 3, 4]); + }); + it('throws if innerBulk.execute rejects', async() => { + const expectedError = new Error(); + innerStub.execute.rejects(expectedError); + const catchedError = await bulk.execute() + .catch(e => e); + expect(catchedError).to.equal(expectedError); + }); + }); + describe('getOperations', () => { + it('returns batches', () => { + bulk._executed = true; + bulk._batches = [ + { + originalZeroIndex: 1, + batchType: 1, + operations: [{ 1: 1 }], + other: 1 + }, + { + originalZeroIndex: 2, + batchType: 2, + operations: [{ 2: 2 }], + other: 2 + } + ]; + expect(bulk.getOperations()).to.deep.equal([ + { + originalZeroIndex: 1, + batchType: 1, + operations: [{ 1: 1 }], + }, + { + originalZeroIndex: 2, + batchType: 2, + operations: [{ 2: 2 }], + } + ]); + }); + }); + }); + }); + }); + }); + describe('BulkFindOp', () => { + describe('help', () => { + const apiClass: any = new BulkFindOp({} as any, {} as any); + it('calls help function', async() => { + expect((await apiClass.help()[asShellResult]()).type).to.equal('Help'); + expect((await apiClass.help[asShellResult]()).type).to.equal('Help'); + }); + it('calls help function for methods', async() => { + expect((await apiClass.remove.help()[asShellResult]()).type).to.equal('Help'); + expect((await apiClass.remove.help[asShellResult]()).type).to.equal('Help'); + }); + }); + describe('signatures', () => { + it('type', () => { + expect(signatures.BulkFindOp.type).to.equal('BulkFindOp'); + }); + it('attributes', () => { + expect(signatures.BulkFindOp.attributes.hint).to.deep.equal({ + type: 'function', + returnsPromise: false, + returnType: { attributes: {}, type: 'unknown' }, // no async calls, so don't need to track + platforms: ALL_PLATFORMS, + topologies: ALL_TOPOLOGIES, + serverVersions: ALL_SERVER_VERSIONS + }); + }); + it('hasAsyncChild', () => { + expect(signatures.BulkFindOp.hasAsyncChild).to.equal(false); + }); + }); + describe('Metadata', () => { + describe('asShellResult', () => { + const b = new BulkFindOp({} as any, {} as any); + it('value', async() => { + expect((await b[asShellResult]()).value).to.deep.equal('BulkFindOp'); + }); + it('type', async() => { + expect((await b[asShellResult]()).type).to.equal('BulkFindOp'); + }); + }); + }); + describe('commands', () => { + let bulk: Bulk; + let innerStub: StubbedInstance; + let bulkFindOp: BulkFindOp; + beforeEach(() => { + innerStub = stubInterface(); + innerStub.s = { batches: [1, 2, 3, 4] }; + bulk = stubInterface(); + bulk._batchCounts = { + nRemoveOps: 0, nInsertOps: 0, nUpdateOps: 0 + }; + bulkFindOp = new BulkFindOp(innerStub, bulk); + }); + describe('multiple batches', () => { + + }); + describe('remove', () => { + it('calls serviceProviderBulkOp.remove and returns parent', () => { + bulkFindOp.remove(); + expect(innerStub.remove).to.have.been.calledWith(); + expect(bulk._batchCounts.nRemoveOps).to.equal(1); + }); + + it('returns self', () => { + expect(bulkFindOp.remove()).to.equal(bulk); + }); + + it('throws if serviceProviderBulkOp.remove throws', async() => { + const expectedError = new Error(); + innerStub.remove.throws(expectedError); + expect(() => bulkFindOp.remove()).to.throw(expectedError); + }); + }); + describe('removeOne', () => { + it('calls serviceProviderBulkOp.removeOne and returns parent', () => { + bulkFindOp.removeOne(); + expect(innerStub.removeOne).to.have.been.calledWith(); + expect(bulk._batchCounts.nRemoveOps).to.equal(1); + }); + + it('returns self', () => { + expect(bulkFindOp.removeOne()).to.equal(bulk); + }); + + it('throws if serviceProviderBulkOp.removeOne throws', async() => { + const expectedError = new Error(); + innerStub.removeOne.throws(expectedError); + expect(() => bulkFindOp.removeOne()).to.throw(expectedError); + }); + }); + describe('upsert', () => { + it('calls serviceProviderBulkOp.upsert and returns parent', () => { + bulkFindOp.upsert(); + expect(innerStub.upsert).to.have.been.calledWith(); + expect(bulk._batchCounts.nUpdateOps).to.equal(0); + }); + + it('returns self', () => { + expect(bulkFindOp.upsert()).to.equal(bulkFindOp); + }); + + it('throws if serviceProviderBulkOp.upsert throws', async() => { + const expectedError = new Error(); + innerStub.upsert.throws(expectedError); + expect(() => bulkFindOp.upsert()).to.throw(expectedError); + }); + }); + describe('update', () => { + it('calls serviceProviderBulkOp.update and returns parent', () => { + bulkFindOp.update({ updateDoc: 1 }); + expect(innerStub.update).to.have.been.calledWith({ updateDoc: 1 }); + expect(bulk._batchCounts.nUpdateOps).to.equal(1); + }); + + it('calls serviceProviderBulkOp.update and returns parent when hint/arrayFilter set', () => { + bulkFindOp.hint({ hint: 1 }); + // bulkFindOp.arrayFilters(['filter']); + bulkFindOp.update({ updateDoc: 1 }); + expect(innerStub.update).to.have.been.calledWith({ + updateDoc: 1, + hint: { hint: 1 }, + // arrayFilters: [ 'filter' ] + }); + expect(bulk._batchCounts.nUpdateOps).to.equal(1); + }); + + it('returns self', () => { + expect(bulkFindOp.update({})).to.equal(bulk); + }); + + it('throws if serviceProviderBulkOp.update throws', async() => { + const expectedError = new Error(); + innerStub.update.throws(expectedError); + expect(() => bulkFindOp.update({})).to.throw(expectedError); + }); + }); + describe('updateOne', () => { + it('calls serviceProviderBulkOp.updateOne and returns parent', () => { + bulkFindOp.updateOne({ updateOneDoc: 1 }); + expect(innerStub.updateOne).to.have.been.calledWith({ updateOneDoc: 1 }); + expect(bulk._batchCounts.nUpdateOps).to.equal(1); + }); + + it('calls serviceProviderBulkOp.updateOne and returns parent when hint/arrayFilter set', () => { + bulkFindOp.hint({ hint: 1 }); + // bulkFindOp.arrayFilters(['filter']); + bulkFindOp.updateOne({ updateOneDoc: 1 }); + expect(innerStub.updateOne).to.have.been.calledWith({ + updateOneDoc: 1, + hint: { hint: 1 }, + // arrayFilters: [ 'filter' ] + }); + expect(bulk._batchCounts.nUpdateOps).to.equal(1); + }); + + + it('returns self', () => { + expect(bulkFindOp.updateOne({})).to.equal(bulk); + }); + + it('throws if serviceProviderBulkOp.updateOne throws', async() => { + const expectedError = new Error(); + innerStub.updateOne.throws(expectedError); + expect(() => bulkFindOp.updateOne({})).to.throw(expectedError); + }); + }); + describe('replaceOne', () => { + it('calls serviceProviderBulkOp.replaceOne and returns parent', () => { + bulkFindOp.replaceOne({ replaceOneDoc: 1 }); + expect(innerStub.replaceOne).to.have.been.calledWith({ replaceOneDoc: 1 }); + expect(bulk._batchCounts.nUpdateOps).to.equal(1); + }); + + it('calls serviceProviderBulkOp.replaceOne and returns parent when hint set', () => { + bulkFindOp.hint({ hint: 1 }); + bulkFindOp.replaceOne({ replaceOneDoc: 1 }); + expect(innerStub.replaceOne).to.have.been.calledWith({ + replaceOneDoc: 1, + hint: { hint: 1 } + }); + expect(bulk._batchCounts.nUpdateOps).to.equal(1); + }); + + it('returns self', () => { + expect(bulkFindOp.replaceOne({})).to.equal(bulk); + }); + + it('throws if serviceProviderBulkOp.replaceOne throws', async() => { + const expectedError = new Error(); + innerStub.replaceOne.throws(expectedError); + expect(() => bulkFindOp.replaceOne({})).to.throw(expectedError); + }); + }); + describe('hint', () => { + it('sets the attribute and returns self', () => { + const attr = { hint: 1 }; + expect(bulkFindOp.hint(attr)).to.equal(bulkFindOp); + expect(bulkFindOp._hint).to.deep.equal(attr); + }); + }); + // describe('arrayFilters', () => { + // it('sets the attribute and returns self', () => { + // const attr = [1]; + // expect(bulkFindOp.arrayFilters(attr)).to.equal(bulkFindOp); + // expect(bulkFindOp._arrayFilters).to.deep.equal(attr); + // }); + // }); + }); + }); +}); diff --git a/packages/shell-api/src/bulk.ts b/packages/shell-api/src/bulk.ts new file mode 100644 index 0000000000..0fa392d665 --- /dev/null +++ b/packages/shell-api/src/bulk.ts @@ -0,0 +1,249 @@ +import { hasAsyncChild, returnsPromise, ShellApiClass, shellApiClassDefault } from './decorators'; +import Mongo from './mongo'; +import { MongoshInternalError, MongoshInvalidInputError, MongoshUnimplementedError } from '@mongosh/errors'; +import { + Document, + WriteConcern, + ServiceProviderBulkOp, + ServiceProviderBulkFindOp, + BulkBatch +} from '@mongosh/service-provider-core'; +import { assertArgsDefined } from './helpers'; +import { BulkWriteResult } from './result'; + +@shellApiClassDefault +export class BulkFindOp { + _serviceProviderBulkFindOp: ServiceProviderBulkFindOp; + _parentBulk: Bulk; + _hint: Document; + _arrayFilters: Document[]; + constructor(innerFind: ServiceProviderBulkFindOp, parentBulk: Bulk) { + this._serviceProviderBulkFindOp = innerFind; + this._parentBulk = parentBulk; + } + + _asPrintable(): string { + return 'BulkFindOp'; + } + + // Blocked by NODE-2757 + collation(): BulkFindOp { + throw new MongoshUnimplementedError( + 'collation method on fluent Bulk API is not currently supported. ' + + 'As an alternative, consider using the \'db.collection.bulkWrite(...)\' helper ' + + 'which accepts \'collation\' as a field in the operations.' + ); + } + + // Blocked by NODE-2751 + arrayFilters(): BulkFindOp { + throw new MongoshUnimplementedError( + 'arrayFilters method on fluent Bulk API is not currently supported.' + ); + } + + hint(hintDoc: Document): BulkFindOp { + assertArgsDefined(hintDoc); + this._hint = hintDoc; + return this; + } + + remove(): Bulk { + this._parentBulk._batchCounts.nRemoveOps++; + this._serviceProviderBulkFindOp.remove(); + return this._parentBulk; + } + + removeOne(): Bulk { + this._parentBulk._batchCounts.nRemoveOps++; + this._serviceProviderBulkFindOp.removeOne(); + return this._parentBulk; + } + + replaceOne(replacement: Document): Bulk { + this._parentBulk._batchCounts.nUpdateOps++; + assertArgsDefined(replacement); + const op = { ...replacement }; + if (this._hint) { + op.hint = this._hint; + } + this._serviceProviderBulkFindOp.replaceOne(op); + return this._parentBulk; + } + + updateOne(update: Document): Bulk { + this._parentBulk._batchCounts.nUpdateOps++; + assertArgsDefined(update); + const op = { ...update }; + if (this._hint) { + op.hint = this._hint; + } + if (this._arrayFilters) { + op.arrayFilters = this._arrayFilters; + } + this._serviceProviderBulkFindOp.updateOne(op); + return this._parentBulk; + } + + update(update: Document): Bulk { + this._parentBulk._batchCounts.nUpdateOps++; + assertArgsDefined(update); + const op = { ...update }; + if (this._hint) { + op.hint = this._hint; + } + if (this._arrayFilters) { + op.arrayFilters = this._arrayFilters; + } + this._serviceProviderBulkFindOp.update(op); + return this._parentBulk; + } + + upsert(): BulkFindOp { + assertArgsDefined(); + this._serviceProviderBulkFindOp.upsert(); + return this; + } +} + + +@shellApiClassDefault +@hasAsyncChild +export default class Bulk extends ShellApiClass { + _mongo: Mongo; + _collection: any; // to avoid circular ref + _batchCounts: any; + _executed: boolean; + _batches: BulkBatch[]; + _serviceProviderBulkOp: ServiceProviderBulkOp; + _ordered: boolean; + + constructor(collection: any, innerBulk: ServiceProviderBulkOp, ordered = false) { + super(); + this._collection = collection; + this._mongo = collection._mongo; + this._serviceProviderBulkOp = innerBulk; + this._batches = []; + this._batchCounts = { + nInsertOps: 0, + nUpdateOps: 0, + nRemoveOps: 0 + }; + this._executed = false; + this._ordered = ordered; + } + + private _checkInternalShape(innerBulkState): boolean { + return ( + innerBulkState !== undefined && + Array.isArray(innerBulkState.batches) + ); + } + + private _getBatches(): BulkBatch[] { + const batches = [...this._serviceProviderBulkOp.s.batches]; + if (this._ordered) { + if (this._serviceProviderBulkOp.s.currentBatch) { + batches.push(this._serviceProviderBulkOp.s.currentBatch); + } + return batches; + } + if (this._serviceProviderBulkOp.s.currentInsertBatch) { + batches.push(this._serviceProviderBulkOp.s.currentInsertBatch); + } + if (this._serviceProviderBulkOp.s.currentUpdateBatch) { + batches.push(this._serviceProviderBulkOp.s.currentUpdateBatch); + } + if (this._serviceProviderBulkOp.s.currentRemoveBatch) { + batches.push(this._serviceProviderBulkOp.s.currentRemoveBatch); + } + return batches; + } + + /** + * Internal method to determine what is printed for this class. + */ + _asPrintable(): any { + return this.tojson(); + } + + /** + * Internal helper for emitting collection API call events. + * + * @param methodName + * @param methodArguments + * @private + */ + private _emitBulkApiCall(methodName: string, methodArguments: Document = {}): void { + this._mongo._internalState.emitApiCall({ + method: methodName, + class: 'Bulk', + db: this._collection._database._name, + coll: this._collection._name, + arguments: methodArguments + }); + } + + @returnsPromise + async execute(writeConcern?: WriteConcern): Promise { + if (!this._executed && this._checkInternalShape(this._serviceProviderBulkOp.s)) { + this._batches = this._getBatches(); + } + const result = await this._serviceProviderBulkOp.execute(); + this._executed = true; + this._emitBulkApiCall('execute', { writeConcern: writeConcern }); + return new BulkWriteResult( + !!result.result.ok, // acknowledged + result.result.nInserted, + result.result.insertedIds, + result.result.nMatched, + result.result.nModified, + result.result.nRemoved, + result.result.nUpserted, + result.result.upserted + ); + } + + find(query: Document): BulkFindOp { + assertArgsDefined(query); + return new BulkFindOp(this._serviceProviderBulkOp.find(query), this); + } + + insert(document: Document): Bulk { + this._batchCounts.nInsertOps++; + assertArgsDefined(document); + this._serviceProviderBulkOp.insert(document); + return this; + } + + tojson(): Record<'nInsertOps' | 'nUpdateOps' | 'nRemoveOps' | 'nBatches', number> { + let batches = -1; + if (this._checkInternalShape(this._serviceProviderBulkOp.s)) { + batches = this._getBatches().length; + } + + return { + ...this._batchCounts, + nBatches: batches < 0 ? 'unknown' : batches + }; + } + + toString(): string { + return JSON.stringify(this.tojson()); + } + + getOperations(): BulkBatch[] { + if (!this._checkInternalShape(this._serviceProviderBulkOp.s)) { + throw new MongoshInternalError('Bulk error: cannot access operation list because internal structure of MongoDB Bulk class has changed.'); + } + if (!this._executed) { + throw new MongoshInvalidInputError('Cannot call getOperations on an unexecuted Bulk operation'); + } + return this._batches.map((b) => ({ + originalZeroIndex: b.originalZeroIndex, + batchType: b.batchType, + operations: b.operations + })); + } +} + diff --git a/packages/shell-api/src/collection.spec.ts b/packages/shell-api/src/collection.spec.ts index 323880141a..75001bfdce 100644 --- a/packages/shell-api/src/collection.spec.ts +++ b/packages/shell-api/src/collection.spec.ts @@ -895,5 +895,60 @@ describe('Collection', () => { expect(catchedError).to.equal(expectedError); }); }); + + describe('initializeUnorderedBulkOp', () => { + it('calls serviceProvider.aggregate on the database with options', async() => { + await collection.initializeUnorderedBulkOp(); + + expect(serviceProvider.initializeBulkOp).to.have.been.calledWith( + database._name, + collection._name, + false + ); + }); + + it('returns Bulk wrapping whatever serviceProvider returns', async() => { + const expectedResult = { s: { batches: [] } }; + serviceProvider.initializeBulkOp.resolves(expectedResult); + const result = await collection.initializeUnorderedBulkOp(); + expect((await result[asShellResult]()).type).to.equal('Bulk'); + expect(result._serviceProviderBulkOp).to.deep.equal(expectedResult); + }); + + it('throws if serviceProvider.runCommand rejects', async() => { + const expectedError = new Error(); + serviceProvider.initializeBulkOp.throws(expectedError); + const catchedError = await collection.initializeUnorderedBulkOp() + .catch(e => e); + expect(catchedError).to.equal(expectedError); + }); + }); + describe('initializeOrderedBulkOp', () => { + it('calls serviceProvider.aggregate on the database with options', async() => { + await collection.initializeOrderedBulkOp(); + + expect(serviceProvider.initializeBulkOp).to.have.been.calledWith( + database._name, + collection._name, + true + ); + }); + + it('returns Bulk wrapped in whatever serviceProvider returns', async() => { + const expectedResult = { s: { batches: [] } }; + serviceProvider.initializeBulkOp.resolves(expectedResult); + const result = await collection.initializeOrderedBulkOp(); + expect((await result[asShellResult]()).type).to.equal('Bulk'); + expect(result._serviceProviderBulkOp).to.deep.equal(expectedResult); + }); + + it('throws if serviceProvider rejects', async() => { + const expectedError = new Error(); + serviceProvider.initializeBulkOp.throws(expectedError); + const catchedError = await collection.initializeOrderedBulkOp() + .catch(e => e); + expect(catchedError).to.equal(expectedError); + }); + }); }); }); diff --git a/packages/shell-api/src/collection.ts b/packages/shell-api/src/collection.ts index b866d910a7..89d15a74ec 100644 --- a/packages/shell-api/src/collection.ts +++ b/packages/shell-api/src/collection.ts @@ -23,6 +23,7 @@ import { UpdateResult } from './index'; import { MongoshInvalidInputError, MongoshRuntimeError } from '@mongosh/errors'; +import Bulk from './bulk'; @shellApiClassDefault @hasAsyncChild @@ -1263,6 +1264,7 @@ export default class Collection extends ShellApiClass { options.scale = options.scale || 1; options.indexDetails = options.indexDetails || false; + this._emitCollectionApiCall('stats', { options }); const result = await this._mongo._serviceProvider.runCommand( this._database._name, { @@ -1316,6 +1318,7 @@ export default class Collection extends ShellApiClass { @returnsPromise async latencyStats(options = {}): Promise { + this._emitCollectionApiCall('latencyStats', { options }); const pipeline = [{ $collStats: { latencyStats: options } }]; const providerCursor = this._mongo._serviceProvider.aggregate( this._database._name, @@ -1325,4 +1328,28 @@ export default class Collection extends ShellApiClass { ); return await providerCursor.toArray(); } + + @returnsPromise + @returnType('Bulk') + async initializeOrderedBulkOp(): Promise { + this._emitCollectionApiCall('initializeOrderedBulkOp'); + const innerBulk = await this._mongo._serviceProvider.initializeBulkOp( + this._database._name, + this._name, + true + ); + return new Bulk(this, innerBulk, true); + } + + @returnsPromise + @returnType('Bulk') + async initializeUnorderedBulkOp(): Promise { + this._emitCollectionApiCall('initializeUnorderedBulkOp'); + const innerBulk = await this._mongo._serviceProvider.initializeBulkOp( + this._database._name, + this._name, + false + ); + return new Bulk(this, innerBulk); + } } diff --git a/packages/shell-api/src/integration.spec.ts b/packages/shell-api/src/integration.spec.ts index 815d7ac2d9..4a27640240 100644 --- a/packages/shell-api/src/integration.spec.ts +++ b/packages/shell-api/src/integration.spec.ts @@ -1,6 +1,11 @@ import { expect } from 'chai'; import { CliServiceProvider } from '../../service-provider-server'; // avoid cyclic dep just for test -import { Cursor, Explainable, AggregationCursor, ShellInternalState, Mongo, ShellApi, asShellResult } from './index'; +import ShellInternalState from './shell-internal-state'; +import Cursor from './cursor'; +import Explainable from './explainable'; +import AggregationCursor from './aggregation-cursor'; +import ShellApi from './shell-api'; +import { asShellResult } from './enums'; import { startTestServer } from '../../../testing/integration-testing-hooks'; describe('Shell API (integration)', function() { @@ -63,8 +68,7 @@ describe('Shell API (integration)', function() { internalState = new ShellInternalState(serviceProvider); shellApi = new ShellApi(internalState); - mongo = new Mongo(internalState); - mongo.use(dbName); + mongo = internalState.currentDb.getMongo(); database = mongo.getDB(dbName); collection = database.getCollection(collectionName); await database.dropDatabase(); @@ -885,4 +889,369 @@ describe('Shell API (integration)', function() { }); }); }); + + describe('Bulk API', async() => { + let bulk; + ['initializeUnorderedBulkOp', 'initializeOrderedBulkOp'].forEach((m) => { + describe(m, () => { + describe('insert', () => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + bulk.insert({ x: i }); + } + expect(await collection.countDocuments()).to.equal(0); + await bulk.execute(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 1000, nUpdateOps: 0, nRemoveOps: 0, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments()).to.equal(1000); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(1); + expect(op.operations.length).to.equal(1000); + expect(op.operations[99].x).to.equal(99); + }); + }); + describe('remove', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + bulk.find({ x: { $mod: [ 2, 0 ] } }).remove(); + await bulk.execute(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 0, nRemoveOps: 1, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments()).to.equal(500); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(3); + expect(op.operations.length).to.equal(1); + }); + }); + describe('removeOne', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + bulk.find({ x: { $mod: [ 2, 0 ] } }).removeOne(); + await bulk.execute(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 0, nRemoveOps: 1, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments()).to.equal(999); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(3); + expect(op.operations.length).to.equal(1); + }); + }); + describe('replaceOne', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + bulk.find({ x: 2 }).replaceOne({ x: 1 }); + await bulk.execute(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 1, nRemoveOps: 0, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments({ x: 1 })).to.equal(2); + expect(await collection.countDocuments({ x: 2 })).to.equal(0); + expect(await collection.countDocuments()).to.equal(1000); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(2); + expect(op.operations.length).to.equal(1); + }); + }); + describe('updateOne', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + bulk.find({ x: 2 }).updateOne({ x: 1 }); + await bulk.execute(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 1, nRemoveOps: 0, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments({ x: 1 })).to.equal(2); + expect(await collection.countDocuments({ x: 2 })).to.equal(0); + expect(await collection.countDocuments()).to.equal(1000); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(2); + expect(op.operations.length).to.equal(1); + }); + }); + describe('update', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + bulk.find({ x: { $mod: [ 2, 0 ] } }).update({ $inc: { x: 1 } }); + await bulk.execute(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 1, nRemoveOps: 0, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments()).to.equal(1000); + expect(await collection.countDocuments({ x: { $mod: [ 2, 0 ] } })).to.equal(0); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(2); + expect(op.operations.length).to.equal(1); + }); + }); + describe('upsert().update', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + expect(await collection.countDocuments({ y: { $exists: true } })).to.equal(0); + bulk.find({ y: 0 }).upsert().update({ $set: { y: 1 } }); + await bulk.execute(); + }); + afterEach(async() => { + await collection.drop(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 1, nRemoveOps: 0, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments()).to.equal(1001); + expect(await collection.countDocuments({ y: { $exists: true } })).to.equal(1); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(2); + expect(op.operations.length).to.equal(1); + }); + }); + describe('upsert().updateOne', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + expect(await collection.countDocuments({ y: { $exists: true } })).to.equal(0); + bulk.find({ y: 0 }).upsert().updateOne({ $set: { y: 1 } }); + await bulk.execute(); + }); + it('tojson returns correctly', async() => { + expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 1, nRemoveOps: 0, nBatches: 1 }); + }); + it('executes', async() => { + expect(await collection.countDocuments()).to.equal(1001); + expect(await collection.countDocuments({ y: { $exists: true } })).to.equal(1); + }); + it('getOperations returns correctly', () => { + const ops = bulk.getOperations(); + expect(ops.length).to.equal(1); + const op = ops[0]; + expect(op.originalZeroIndex).to.equal(0); + expect(op.batchType).to.equal(2); + expect(op.operations.length).to.equal(1); + }); + }); + describe('update without upsert', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + await collection.insertOne({ x: i }); + } + expect(await collection.countDocuments()).to.equal(1000); + expect(await collection.countDocuments({ y: { $exists: true } })).to.equal(0); + bulk.find({ y: 0 }).update({ $set: { y: 1 } }); + await bulk.execute(); + }); + it('executes', async() => { + expect(await collection.countDocuments()).to.equal(1000); + expect(await collection.countDocuments({ y: { $exists: true } })).to.equal(0); + }); + }); + describe('multiple batches', async() => { + beforeEach(async() => { + bulk = await collection[m](); + for (let i = 0; i < 1000; i++) { + bulk.insert({ x: 1 }); + } + expect(bulk.tojson().nBatches).to.equal(1); + bulk.find({ x: 1 }).remove(); + expect(bulk.tojson().nBatches).to.equal(2); + bulk.find({ x: 2 }).update({ $inc: { x: 1 } }); + expect(bulk.tojson().nBatches).to.equal(3); + for (let i = 0; i < 1000; i++) { + bulk.insert({ x: 1 }); + } + }); + it('updates count depending on ordered or not', () => { + expect(bulk.tojson().nBatches).to.equal(m === 'initializeUnorderedBulkOp' ? 3 : 4); + }); + }); + // NOTE: blocked by NODE-2751 + // describe('arrayFilters().update', async() => { + // beforeEach(async() => { + // bulk = await collection[m](); + // for (let i = 0; i < 10; i++) { + // await collection.insertOne({ x: i, array: [1, -1] }); + // } + // expect(await collection.countDocuments({ x: { $exists: true } })).to.equal(10); + // bulk.find({ x: { $exists: true } }).arrayFilters([{ element: { $gte: 0 } }]).update({ $set: { 'arr.$[element]': 1 } }); + // await bulk.execute(); + // }); + // afterEach(async() => { + // await collection.drop(); + // }); + // it('tojson returns correctly', async() => { + // expect(bulk.tojson()).to.deep.equal({ nInsertOps: 0, nUpdateOps: 1, nRemoveOps: 0, nBatches: 1 }); + // }); + // it('executes', async() => { + // expect(await collection.countDocuments()).to.equal(10); + // expect(await collection.countDocuments({ arr: [ -1, -1 ] })).to.equal(10); + // expect(await collection.countDocuments({ arr: [ 1, -1 ] })).to.equal(0); + // }); + // it('getOperations returns correctly', () => { + // const ops = bulk.getOperations(); + // expect(ops.length).to.equal(1); + // const op = ops[0]; + // expect(op.originalZeroIndex).to.equal(0); + // expect(op.batchType).to.equal(2); + // expect(op.operations.length).to.equal(1); + // }); + // }); + describe('error states', () => { + it('cannot be executed twice', async() => { + bulk = await collection[m](); + bulk.insert({ x: 1 }); + await bulk.execute(); + try { + await bulk.execute(); + } catch (err) { + expect(err.name).to.equal('BulkWriteError'); + return; + } + expect.fail('Error not thrown'); + }); + it('getOperations fails before execute', async() => { + bulk = await collection[m](); + bulk.insert({ x: 1 }); + try { + bulk.getOperations(); + } catch (err) { + expect(err.name).to.equal('MongoshInvalidInputError'); + return; + } + expect.fail('Error not thrown'); + }); + it('No ops', async() => { + bulk = await collection[m](); + try { + await bulk.execute(); + } catch (err) { + expect(err.name).to.equal('MongoError'); + return; + } + expect.fail('Error not thrown'); + }); + it('Driver error', async() => { + bulk = await collection[m](); + bulk.find({}).update({ x: 1 }); + try { + await bulk.execute(); + } catch (err) { + expect(err.name).to.equal('BulkWriteError'); + return; + } + expect.fail('Error not thrown'); + }); + it('Driver structure change', async() => { + bulk = await collection[m](); + bulk.insert({}); + await bulk.execute(); + bulk._serviceProviderBulkOp.s = undefined; + try { + bulk.getOperations(); + } catch (err) { + expect(err.name).to.equal('MongoshInternalError'); + return; + } + expect.fail('Error not thrown'); + }); + it('collation', async() => { + bulk = await collection[m](); + try { + await bulk.find({}).collation({}); + } catch (err) { + expect(err.name).to.equal('MongoshUnimplementedError'); + return; + } + expect.fail('Error not thrown'); + }); + it('arrayFilters', async() => { + bulk = await collection[m](); + try { + await bulk.find({}).arrayFilters([{}]); + } catch (err) { + expect(err.name).to.equal('MongoshUnimplementedError'); + return; + } + expect.fail('Error not thrown'); + }); + }); + }); + }); + }); });