From 319fcd08be1fbda23fa2d056de132656f0a014aa Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Thu, 15 Jul 2021 00:22:01 -0400 Subject: [PATCH 01/19] refactor(NODE-3404): implement MongoRuntimeError children --- src/change_stream.ts | 38 ++++++++++++++++++--------- src/cmap/connect.ts | 5 ++-- src/cmap/message_stream.ts | 4 +-- src/cmap/wire_protocol/compression.ts | 10 +++---- src/connection_string.ts | 14 +++++----- src/gridfs/download.ts | 24 +++++++++-------- src/gridfs/upload.ts | 15 ++++++++--- 7 files changed, 65 insertions(+), 45 deletions(-) diff --git a/src/change_stream.ts b/src/change_stream.ts index aab273b06c0..2eb1259ec1c 100644 --- a/src/change_stream.ts +++ b/src/change_stream.ts @@ -1,5 +1,11 @@ import Denque = require('denque'); -import { MongoError, AnyError, isResumableError, MongoDriverError } from './error'; +import { + MongoError, + AnyError, + isResumableError, + MongoDriverError, + MongoChangeStreamError +} from './error'; import { AggregateOperation, AggregateOptions } from './operations/aggregate'; import { maxWireVersion, @@ -259,9 +265,8 @@ export class ChangeStream extends TypedEven } else if (parent instanceof MongoClient) { this.type = CHANGE_DOMAIN_TYPES.CLUSTER; } else { - // TODO(NODE-3404): Replace this with MongoChangeStreamError - throw new MongoDriverError( - 'Parent provided to ChangeStream constructor must an instance of Collection, Db, or MongoClient' + throw new MongoChangeStreamError( + 'Argument "parent" provided to ChangeStream constructor must be an instance of Collection, Db, or MongoClient' ); } @@ -365,8 +370,7 @@ export class ChangeStream extends TypedEven */ stream(options?: CursorStreamOptions): Readable { this.streamOptions = options; - // TODO(NODE-3404): Replace this with MongoChangeStreamError - if (!this.cursor) throw new MongoDriverError(NO_CURSOR_ERROR); + if (!this.cursor) throw new MongoChangeStreamError(NO_CURSOR_ERROR); return this.cursor.stream(options); } @@ -543,8 +547,8 @@ const CHANGE_STREAM_EVENTS = [ function setIsEmitter(changeStream: ChangeStream): void { if (changeStream[kMode] === 'iterator') { - throw new MongoDriverError( - 'Cannot use ChangeStream as an EventEmitter after using as an iterator' + throw new MongoChangeStreamError( + "ChangeStream can't be used as an EventEmitter after being used as an iterator" ); } changeStream[kMode] = 'emitter'; @@ -552,8 +556,8 @@ function setIsEmitter(changeStream: ChangeStream): void { function setIsIterator(changeStream: ChangeStream): void { if (changeStream[kMode] === 'emitter') { - throw new MongoDriverError( - 'Cannot use ChangeStream as iterator after using as an EventEmitter' + throw new MongoChangeStreamError( + "ChangeStream can't be used as an EventEmitter after being used as an iterator" ); } changeStream[kMode] = 'iterator'; @@ -630,6 +634,7 @@ function waitForTopologyConnected( } if (calculateDurationInMs(start) > timeout) { + // TODO: Replace with MongoNetworkTimeoutError return callback(new MongoDriverError('Timed out waiting for connection')); } @@ -676,17 +681,23 @@ function processNewChange( callback?: Callback> ) { if (changeStream[kClosed]) { + // TODO(NODE-3405): Replace with MongoStreamClosedError if (callback) callback(new MongoDriverError(CHANGESTREAM_CLOSED_ERROR)); return; } // a null change means the cursor has been notified, implicitly closing the change stream if (change == null) { + // TODO(NODE-3405): Replace with MongoStreamClosedError return closeWithError(changeStream, new MongoDriverError(CHANGESTREAM_CLOSED_ERROR), callback); } if (change && !change._id) { - return closeWithError(changeStream, new MongoDriverError(NO_RESUME_TOKEN_ERROR), callback); + return closeWithError( + changeStream, + new MongoChangeStreamError(NO_RESUME_TOKEN_ERROR), + callback + ); } // cache the resume token @@ -710,6 +721,7 @@ function processError( // If the change stream has been closed explicitly, do not process error. if (changeStream[kClosed]) { + // TODO(NODE-3405): Replace with MongoStreamClosedError if (callback) callback(new MongoDriverError(CHANGESTREAM_CLOSED_ERROR)); return; } @@ -770,6 +782,7 @@ function processError( */ function getCursor(changeStream: ChangeStream, callback: Callback>) { if (changeStream[kClosed]) { + // TODO(NODE-3405): Replace with MongoStreamClosedError callback(new MongoDriverError(CHANGESTREAM_CLOSED_ERROR)); return; } @@ -795,11 +808,12 @@ function processResumeQueue(changeStream: ChangeStream, err?: const request = changeStream[kResumeQueue].pop(); if (!err) { if (changeStream[kClosed]) { + // TODO(NODE-3405): Replace with MongoStreamClosedError request(new MongoDriverError(CHANGESTREAM_CLOSED_ERROR)); return; } if (!changeStream.cursor) { - request(new MongoDriverError(NO_CURSOR_ERROR)); + request(new MongoChangeStreamError(NO_CURSOR_ERROR)); return; } } diff --git a/src/cmap/connect.ts b/src/cmap/connect.ts index 3cb4ea73f90..c4cc8755c36 100644 --- a/src/cmap/connect.ts +++ b/src/cmap/connect.ts @@ -7,8 +7,9 @@ import { AnyError, MongoDriverError, MongoCompatibilityError, + MongoInvalidArgumentError, MongoServerError, - MongoInvalidArgumentError + MongoURIError } from '../error'; import { AUTH_PROVIDERS, AuthMechanism } from './auth/defaultAuthProviders'; import { AuthContext } from './auth/auth_provider'; @@ -284,7 +285,7 @@ function parseConnectOptions(options: ConnectionOptions): SocketConnectOpts { } else { // This should never happen since we set up HostAddresses // But if we don't throw here the socket could hang until timeout - throw new MongoDriverError(`Unexpected HostAddress ${JSON.stringify(hostAddress)}`); + throw new MongoURIError(`Unexpected HostAddress ${JSON.stringify(hostAddress)}`); } } diff --git a/src/cmap/message_stream.ts b/src/cmap/message_stream.ts index 569c89dabea..f59b9069881 100644 --- a/src/cmap/message_stream.ts +++ b/src/cmap/message_stream.ts @@ -1,6 +1,6 @@ import { Duplex, DuplexOptions } from 'stream'; import { Response, Msg, BinMsg, Query, WriteProtocolMessageType, MessageHeader } from './commands'; -import { MongoDriverError, MongoParseError } from '../error'; +import { MongoDecompressionError, MongoDriverError, MongoParseError } from '../error'; import { OP_COMPRESSED, OP_MSG } from './wire_protocol/constants'; import { compress, @@ -191,7 +191,7 @@ function processIncomingData(stream: MessageStream, callback: Callback) if (messageBody.length !== messageHeader.length) { callback( - new MongoDriverError( + new MongoDecompressionError( 'Decompressing a compressed message from the server failed. The message is corrupt.' ) ); diff --git a/src/cmap/wire_protocol/compression.ts b/src/cmap/wire_protocol/compression.ts index 576d8e7badd..92e09f2197e 100644 --- a/src/cmap/wire_protocol/compression.ts +++ b/src/cmap/wire_protocol/compression.ts @@ -3,7 +3,7 @@ import type { Callback } from '../../utils'; import type { OperationDescription } from '../message_stream'; import { Snappy } from '../../deps'; -import { MongoDriverError } from '../../error'; +import { MongoCompressionError, MongoDecompressionError, MongoDriverError } from '../../error'; /** @public */ export const Compressor = Object.freeze({ @@ -53,10 +53,8 @@ export function compress( zlib.deflate(dataToBeCompressed, zlibOptions, callback as zlib.CompressCallback); break; default: - throw new MongoDriverError( - 'Attempt to compress message using unknown compressor "' + - self.options.agreedCompressor + - '".' + throw new MongoCompressionError( + `Unknown compressor ${self.options.agreedCompressor} failed to compress` ); } } @@ -68,7 +66,7 @@ export function decompress( callback: Callback ): void { if (compressorID < 0 || compressorID > Math.max(2)) { - throw new MongoDriverError( + throw new MongoDecompressionError( `Server sent message compressed using an unsupported compressor.` + ` (Received compressor ID ${compressorID})` ); diff --git a/src/connection_string.ts b/src/connection_string.ts index dc661b4fae4..d06128c2dd6 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -6,7 +6,7 @@ import { AuthMechanism } from './cmap/auth/defaultAuthProviders'; import { ReadPreference, ReadPreferenceMode } from './read_preference'; import { ReadConcern, ReadConcernLevel } from './read_concern'; import { W, WriteConcern } from './write_concern'; -import { MongoParseError } from './error'; +import { MongoParseError, MongoURIError } from './error'; import { AnyOptions, Callback, @@ -64,11 +64,11 @@ function matchesParentDomain(srvAddress: string, parentDomain: string): boolean */ export function resolveSRVRecord(options: MongoOptions, callback: Callback): void { if (typeof options.srvHost !== 'string') { - return callback(new MongoParseError('Cannot resolve empty srv string')); + return callback(new MongoURIError("srvHost string in options can't be empty")); } if (options.srvHost.split('.').length < 3) { - return callback(new MongoParseError('URI does not have hostname, domain name and tld')); + return callback(new MongoURIError('URI must include hostname, domain name, and tld')); } // Resolve the SRV record and use the result as the list of hosts to connect to. @@ -77,14 +77,12 @@ export function resolveSRVRecord(options: MongoOptions, callback: Callback expectedN) { errmsg = `ChunkIsMissing: Got unexpected n: ${doc.n}, expected: ${expectedN}`; - return __handleError(stream, new MongoDriverError(errmsg)); + return __handleError(stream, new MongoGridFSChunkError(errmsg)); } if (doc.n < expectedN) { errmsg = `ExtraChunk: Got unexpected n: ${doc.n}, expected: ${expectedN}`; - return __handleError(stream, new MongoDriverError(errmsg)); + return __handleError(stream, new MongoGridFSChunkError(errmsg)); } let buf = Buffer.isBuffer(doc.data) ? doc.data : doc.data.buffer; @@ -250,15 +256,11 @@ function doRead(stream: GridFSBucketReadStream): void { if (buf.byteLength !== expectedLength) { if (bytesRemaining <= 0) { errmsg = `ExtraChunk: Got unexpected n: ${doc.n}`; - return __handleError(stream, new MongoDriverError(errmsg)); + return __handleError(stream, new MongoGridFSChunkError(errmsg)); } - errmsg = - 'ChunkIsWrongSize: Got unexpected length: ' + - buf.byteLength + - ', expected: ' + - expectedLength; - return __handleError(stream, new MongoDriverError(errmsg)); + errmsg = `ChunkIsWrongSize: Got unexpected length: ${buf.byteLength}, expected: ${expectedLength}`; + return __handleError(stream, new MongoGridFSChunkError(errmsg)); } stream.s.bytesRead += buf.byteLength; diff --git a/src/gridfs/upload.ts b/src/gridfs/upload.ts index bcc7cec84be..62d3171591d 100644 --- a/src/gridfs/upload.ts +++ b/src/gridfs/upload.ts @@ -1,5 +1,11 @@ import { Writable } from 'stream'; -import { MongoError, AnyError, MONGODB_ERROR_CODES, MongoDriverError } from '../error'; +import { + MongoError, + AnyError, + MONGODB_ERROR_CODES, + MongoDriverError, + MongoGridFSStreamError +} from '../error'; import { WriteConcern } from './../write_concern'; import { PromiseProvider } from '../promise_provider'; import { ObjectId } from '../bson'; @@ -143,16 +149,16 @@ export class GridFSBucketWriteStream extends Writable { abort(callback: Callback): void; abort(callback?: Callback): Promise | void { const Promise = PromiseProvider.get(); - let error: MongoDriverError; + let error: MongoGridFSStreamError; if (this.state.streamEnd) { - error = new MongoDriverError('Cannot abort a stream that has already completed'); + error = new MongoGridFSStreamError('Cannot abort a stream that has already completed'); if (typeof callback === 'function') { return callback(error); } return Promise.reject(error); } if (this.state.aborted) { - error = new MongoDriverError('Cannot call abort() on a stream twice'); + error = new MongoGridFSStreamError('Cannot call abort() on a stream twice'); if (typeof callback === 'function') { return callback(error); } @@ -556,6 +562,7 @@ function writeRemnant(stream: GridFSBucketWriteStream, callback?: Callback): boo function checkAborted(stream: GridFSBucketWriteStream, callback?: Callback): boolean { if (stream.state.aborted) { if (typeof callback === 'function') { + // TODO(NODE-3405): Replace with MongoStreamClosedError callback(new MongoDriverError('this stream has been aborted')); } return true; From 2629a607a9e4c673b14f216e89505365dc0b3a3b Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Thu, 15 Jul 2021 21:24:47 -0400 Subject: [PATCH 02/19] refactor(NODE-3404): pruned more instances of MongoDriverError --- src/connection_string.ts | 8 ++++---- test/functional/change_stream.test.js | 12 ++++++------ test/functional/unified-spec-runner/entities.ts | 3 ++- test/functional/unified-spec-runner/operations.ts | 3 ++- test/manual/ldap.test.js | 3 ++- 5 files changed, 16 insertions(+), 13 deletions(-) diff --git a/src/connection_string.ts b/src/connection_string.ts index d06128c2dd6..484533287ad 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -6,7 +6,7 @@ import { AuthMechanism } from './cmap/auth/defaultAuthProviders'; import { ReadPreference, ReadPreferenceMode } from './read_preference'; import { ReadConcern, ReadConcernLevel } from './read_concern'; import { W, WriteConcern } from './write_concern'; -import { MongoParseError, MongoURIError } from './error'; +import { MongoCompressionError, MongoParseError, MongoURIError } from './error'; import { AnyOptions, Callback, @@ -64,7 +64,7 @@ function matchesParentDomain(srvAddress: string, parentDomain: string): boolean */ export function resolveSRVRecord(options: MongoOptions, callback: Callback): void { if (typeof options.srvHost !== 'string') { - return callback(new MongoURIError("srvHost string in options can't be empty")); + return callback(new MongoURIError('srvHost string in options cannot be empty')); } if (options.srvHost.split('.').length < 3) { @@ -399,7 +399,7 @@ export function parseOptions( if (options.promiseLibrary) PromiseProvider.set(options.promiseLibrary); if (mongoOptions.directConnection && typeof mongoOptions.srvHost === 'string') { - throw new MongoParseError('directConnection not supported with SRV URI'); + throw new MongoURIError('SRV URI does not support directConnection'); } const lbError = validateLoadBalancedOptions(hosts, mongoOptions); @@ -615,7 +615,7 @@ export const OPTIONS = { if (['none', 'snappy', 'zlib'].includes(String(c))) { compressionList.add(String(c)); } else { - throw new MongoParseError(`${c} is not a valid compression mechanism`); + throw new MongoCompressionError(`${c} is not a valid compression mechanism`); } } } diff --git a/test/functional/change_stream.test.js b/test/functional/change_stream.test.js index accb54c65ce..1f688be61a6 100644 --- a/test/functional/change_stream.test.js +++ b/test/functional/change_stream.test.js @@ -1,7 +1,7 @@ 'use strict'; const assert = require('assert'); const { Transform, PassThrough } = require('stream'); -const { MongoNetworkError, MongoDriverError } = require('../../src/error'); +const { MongoNetworkError, MongoDriverError, MongoChangeStreamError } = require('../../src/error'); const { delay, setupDatabase, withClient, withCursor } = require('./shared'); const co = require('co'); const mock = require('../tools/mock'); @@ -72,12 +72,12 @@ function triggerResumableError(changeStream, delay, onClose) { function triggerError() { const cursorStream = changeStream.cursorStream; if (cursorStream) { - cursorStream.emit('error', new MongoNetworkError('error triggered from test')); + cursorStream.emit('error', new MongoChangeStreamError('error triggered from test')); return; } const nextStub = sinon.stub(changeStream.cursor, 'next').callsFake(function (callback) { - callback(new MongoNetworkError('error triggered from test')); + callback(new MongoChangeStreamError('error triggered from test')); nextStub.restore(); }); @@ -100,7 +100,7 @@ function triggerResumableError(changeStream, delay, onClose) { */ function waitForStarted(changeStream, callback) { const timeout = setTimeout(() => { - throw new Error('Change stream never started'); + throw new MongoChangeStreamError('Change stream never started'); }, 2000); changeStream.cursor.once('init', () => { @@ -874,7 +874,7 @@ describe('Change Streams', function () { .next() .then(function () { // We should never execute this line because calling changeStream.next() should throw an error - throw new Error( + throw new MongoChangeStreamError( 'ChangeStream.next() returned a change document but it should have returned a MongoNetworkError' ); }) @@ -2007,7 +2007,7 @@ describe('Change Streams', function () { if (counter === 2) { changeStream.close(close); } else if (counter >= 3) { - close(new Error('should not have received more than 2 events')); + close(new MongoChangeStreamError('should not have received more than 2 events')); } }); changeStream.on('error', err => close(err)); diff --git a/test/functional/unified-spec-runner/entities.ts b/test/functional/unified-spec-runner/entities.ts index 76a6eaf1a74..e95b21a860e 100644 --- a/test/functional/unified-spec-runner/entities.ts +++ b/test/functional/unified-spec-runner/entities.ts @@ -4,7 +4,8 @@ import { Collection, GridFSBucket, Document, - HostAddress + HostAddress, + MongoAPIError } from '../../../src/index'; import { ReadConcern } from '../../../src/read_concern'; import { WriteConcern } from '../../../src/write_concern'; diff --git a/test/functional/unified-spec-runner/operations.ts b/test/functional/unified-spec-runner/operations.ts index cad0e0cd620..bd576f7ea3f 100644 --- a/test/functional/unified-spec-runner/operations.ts +++ b/test/functional/unified-spec-runner/operations.ts @@ -11,6 +11,7 @@ import { expectErrorCheck, resultCheck } from './match'; import type { OperationDescription } from './schema'; import { CommandStartedEvent } from '../../../src/cmap/command_monitoring_events'; import { translateOptions } from './unified-utils'; +import { MongoChangeStreamError } from '../../../src/error'; interface OperationFunctionParams { client: MongoClient; @@ -203,7 +204,7 @@ operations.set('createChangeStream', async ({ entities, operation }) => { return new Promise((resolve, reject) => { const timeout = setTimeout(() => { - reject(new Error('Change stream never started')); + reject(new MongoChangeStreamError('Change stream never started')); }, 2000); changeStream.cursor.once('init', () => { diff --git a/test/manual/ldap.test.js b/test/manual/ldap.test.js index 5e683804749..501cca4b640 100644 --- a/test/manual/ldap.test.js +++ b/test/manual/ldap.test.js @@ -1,10 +1,11 @@ 'use strict'; const { MongoClient } = require('../../src'); const { expect } = require('chai'); +const { MongoURIError } = require('../../src/error'); describe('LDAP', function () { if (process.env.MONGODB_URI == null) { - throw new Error(`skipping SSL tests, MONGODB_URI environment variable is not defined`); + throw new MongoURIError(`skipping SSL tests, MONGODB_URI environment variable is not defined`); } it('Should correctly authenticate against ldap', function (done) { From 143f44af5e93f0f5c10bde745af338d3003d6a31 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Thu, 15 Jul 2021 23:26:18 -0400 Subject: [PATCH 03/19] refactor(NODE-3404): modifying errors in tests --- src/cmap/message_stream.ts | 2 +- src/cmap/wire_protocol/compression.ts | 2 +- test/unit/core/connection_string.test.js | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/cmap/message_stream.ts b/src/cmap/message_stream.ts index f59b9069881..387fc4c4e6f 100644 --- a/src/cmap/message_stream.ts +++ b/src/cmap/message_stream.ts @@ -1,6 +1,6 @@ import { Duplex, DuplexOptions } from 'stream'; import { Response, Msg, BinMsg, Query, WriteProtocolMessageType, MessageHeader } from './commands'; -import { MongoDecompressionError, MongoDriverError, MongoParseError } from '../error'; +import { MongoDecompressionError, MongoParseError } from '../error'; import { OP_COMPRESSED, OP_MSG } from './wire_protocol/constants'; import { compress, diff --git a/src/cmap/wire_protocol/compression.ts b/src/cmap/wire_protocol/compression.ts index 92e09f2197e..e533e359f47 100644 --- a/src/cmap/wire_protocol/compression.ts +++ b/src/cmap/wire_protocol/compression.ts @@ -3,7 +3,7 @@ import type { Callback } from '../../utils'; import type { OperationDescription } from '../message_stream'; import { Snappy } from '../../deps'; -import { MongoCompressionError, MongoDecompressionError, MongoDriverError } from '../../error'; +import { MongoCompressionError, MongoDecompressionError } from '../../error'; /** @public */ export const Compressor = Object.freeze({ diff --git a/test/unit/core/connection_string.test.js b/test/unit/core/connection_string.test.js index c1f51322889..bd9cfea5e1b 100644 --- a/test/unit/core/connection_string.test.js +++ b/test/unit/core/connection_string.test.js @@ -1,6 +1,6 @@ 'use strict'; -const { MongoParseError, MongoDriverError } = require('../../../src/error'); +const { MongoParseError, MongoDriverError, MongoCompressionError } = require('../../../src/error'); const { loadSpecTests } = require('../../spec'); const chai = require('chai'); const { parseOptions } = require('../../../src/connection_string'); @@ -110,7 +110,7 @@ describe('Connection String', function () { describe('validation', function () { it('should validate compressors options', function () { expect(() => parseOptions('mongodb://localhost/?compressors=bunnies')).to.throw( - MongoParseError, + MongoCompressionError, 'bunnies is not a valid compression mechanism' ); }); From 409afa206a4575f2ec389a80d287a1f87b266121 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Sat, 17 Jul 2021 16:01:03 -0400 Subject: [PATCH 04/19] test(NODE-3404): correct gridfs tests to wait for the right errors --- src/change_stream.ts | 4 ++-- test/functional/gridfs_stream.test.js | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/change_stream.ts b/src/change_stream.ts index 2eb1259ec1c..69569e74932 100644 --- a/src/change_stream.ts +++ b/src/change_stream.ts @@ -265,8 +265,8 @@ export class ChangeStream extends TypedEven } else if (parent instanceof MongoClient) { this.type = CHANGE_DOMAIN_TYPES.CLUSTER; } else { - throw new MongoChangeStreamError( - 'Argument "parent" provided to ChangeStream constructor must be an instance of Collection, Db, or MongoClient' + throw new MongoDriverError( + 'parent provided to ChangeStream constructor is not an instance of Collection, Db, or MongoClient' ); } diff --git a/test/functional/gridfs_stream.test.js b/test/functional/gridfs_stream.test.js index 4d7b2c7a12b..cf29f1473a0 100644 --- a/test/functional/gridfs_stream.test.js +++ b/test/functional/gridfs_stream.test.js @@ -473,7 +473,7 @@ describe('GridFS Stream', function () { // Fail if user tries to abort an aborted stream uploadStream.abort().then(null, function (error) { expect(error.toString()).to.equal( - 'MongoDriverError: Cannot call abort() on a stream twice' + 'MongoGridFSStreamError: Cannot call abort() on a stream twice' ); client.close(done); }); @@ -526,7 +526,7 @@ describe('GridFS Stream', function () { // Fail if user tries to abort an aborted stream uploadStream.abort().then(null, function (error) { expect(error.toString()).to.equal( - 'MongoDriverError: Cannot call abort() on a stream twice' + 'MongoGridFSStreamError: Cannot call abort() on a stream twice' ); client.close(done); }); @@ -1184,7 +1184,7 @@ describe('GridFS Stream', function () { // Fail if user tries to abort an aborted stream uploadStream.abort().then(null, function (error) { expect(error.toString()).to.equal( - 'MongoDriverError: Cannot call abort() on a stream twice' + 'MongoGridFSStreamError: Cannot call abort() on a stream twice' ); client.close(done); }); From eeea0051761fa19646ea087c8309e74e5b11bef3 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Mon, 19 Jul 2021 15:01:12 -0400 Subject: [PATCH 05/19] refactor(NODE-3404): exported errors from src/ --- src/index.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/index.ts b/src/index.ts index 870bb63a117..fdfcde5072b 100644 --- a/src/index.ts +++ b/src/index.ts @@ -43,7 +43,14 @@ export { MongoSystemError, MongoServerSelectionError, MongoParseError, - MongoWriteConcernError + MongoWriteConcernError, + MongoURIError, + MongoStreamError, + MongoChangeStreamError, + MongoGridFSStreamError, + MongoGridFSChunkError, + MongoCompressionError, + MongoDecompressionError } from './error'; export { MongoBulkWriteError, BulkWriteOptions, AnyBulkWriteOperation } from './bulk/common'; export { From 3a13cb62a6afae57e56c10dba89b66a67d61195b Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Mon, 19 Jul 2021 15:49:19 -0400 Subject: [PATCH 06/19] refactor(NODE-3404): export MongoRuntimeError from src/ --- src/index.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/index.ts b/src/index.ts index fdfcde5072b..35b3a47d777 100644 --- a/src/index.ts +++ b/src/index.ts @@ -44,6 +44,7 @@ export { MongoServerSelectionError, MongoParseError, MongoWriteConcernError, + MongoRuntimeError, MongoURIError, MongoStreamError, MongoChangeStreamError, From 8a17031e381d8c9cec9b4d135759456675797cdb Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Tue, 20 Jul 2021 12:07:59 -0400 Subject: [PATCH 07/19] test(NODE-3404): revert use of MongoChangeStreamError --- test/functional/change_stream.test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/functional/change_stream.test.js b/test/functional/change_stream.test.js index 1f688be61a6..99cb7c7f73b 100644 --- a/test/functional/change_stream.test.js +++ b/test/functional/change_stream.test.js @@ -72,12 +72,12 @@ function triggerResumableError(changeStream, delay, onClose) { function triggerError() { const cursorStream = changeStream.cursorStream; if (cursorStream) { - cursorStream.emit('error', new MongoChangeStreamError('error triggered from test')); + cursorStream.emit('error', new MongoNetworkError('error triggered from test')); return; } const nextStub = sinon.stub(changeStream.cursor, 'next').callsFake(function (callback) { - callback(new MongoChangeStreamError('error triggered from test')); + callback(new MongoNetworkError('error triggered from test')); nextStub.restore(); }); From 9cc26c8a942bbb93633def838693c77537423043 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Wed, 21 Jul 2021 13:39:11 -0400 Subject: [PATCH 08/19] refactor(NODE-3404): revert select usage of MongoGridFSStreamError --- src/change_stream.ts | 50 +++++++++++++-------------- src/connection_string.ts | 38 ++++++++++---------- src/gridfs/download.ts | 10 +++--- src/gridfs/upload.ts | 20 ++++++----- test/functional/gridfs_stream.test.js | 7 ++-- 5 files changed, 65 insertions(+), 60 deletions(-) diff --git a/src/change_stream.ts b/src/change_stream.ts index 69569e74932..307f923ee90 100644 --- a/src/change_stream.ts +++ b/src/change_stream.ts @@ -1,38 +1,38 @@ import Denque = require('denque'); +import type { Readable } from 'stream'; +import type { Document, Timestamp } from './bson'; +import { Collection } from './collection'; +import { + AbstractCursor, + AbstractCursorEvents, + AbstractCursorOptions, + CursorStreamOptions +} from './cursor/abstract_cursor'; +import { Db } from './db'; import { - MongoError, AnyError, isResumableError, + MongoChangeStreamError, MongoDriverError, - MongoChangeStreamError + MongoError } from './error'; +import { MongoClient } from './mongo_client'; +import { InferIdType, Nullable, TypedEventEmitter } from './mongo_types'; import { AggregateOperation, AggregateOptions } from './operations/aggregate'; +import type { CollationOptions, OperationParent } from './operations/command'; +import { executeOperation, ExecutionResult } from './operations/execute_operation'; +import type { ReadPreference } from './read_preference'; +import type { Topology } from './sdam/topology'; +import type { ClientSession } from './sessions'; import { - maxWireVersion, calculateDurationInMs, - now, + Callback, + getTopology, + maxWireVersion, maybePromise, MongoDBNamespace, - Callback, - getTopology + now } from './utils'; -import type { ReadPreference } from './read_preference'; -import type { Timestamp, Document } from './bson'; -import type { Topology } from './sdam/topology'; -import type { OperationParent, CollationOptions } from './operations/command'; -import { MongoClient } from './mongo_client'; -import { Db } from './db'; -import { Collection } from './collection'; -import type { Readable } from 'stream'; -import { - AbstractCursor, - AbstractCursorEvents, - AbstractCursorOptions, - CursorStreamOptions -} from './cursor/abstract_cursor'; -import type { ClientSession } from './sessions'; -import { executeOperation, ExecutionResult } from './operations/execute_operation'; -import { InferIdType, Nullable, TypedEventEmitter } from './mongo_types'; /** @internal */ const kResumeQueue = Symbol('resumeQueue'); @@ -548,7 +548,7 @@ const CHANGE_STREAM_EVENTS = [ function setIsEmitter(changeStream: ChangeStream): void { if (changeStream[kMode] === 'iterator') { throw new MongoChangeStreamError( - "ChangeStream can't be used as an EventEmitter after being used as an iterator" + 'ChangeStream cannot be used as an EventEmitter after being used as an iterator' ); } changeStream[kMode] = 'emitter'; @@ -557,7 +557,7 @@ function setIsEmitter(changeStream: ChangeStream): void { function setIsIterator(changeStream: ChangeStream): void { if (changeStream[kMode] === 'emitter') { throw new MongoChangeStreamError( - "ChangeStream can't be used as an EventEmitter after being used as an iterator" + 'ChangeStream cannot be used as an EventEmitter after being used as an iterator' ); } changeStream[kMode] = 'iterator'; diff --git a/src/connection_string.ts b/src/connection_string.ts index 484533287ad..48e629e5ec9 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -2,22 +2,12 @@ import * as dns from 'dns'; import * as fs from 'fs'; import ConnectionString from 'mongodb-connection-string-url'; import { URLSearchParams } from 'url'; +import type { Document } from './bson'; import { AuthMechanism } from './cmap/auth/defaultAuthProviders'; -import { ReadPreference, ReadPreferenceMode } from './read_preference'; -import { ReadConcern, ReadConcernLevel } from './read_concern'; -import { W, WriteConcern } from './write_concern'; +import { MongoCredentials } from './cmap/auth/mongo_credentials'; +import { Encrypter } from './encrypter'; import { MongoCompressionError, MongoParseError, MongoURIError } from './error'; -import { - AnyOptions, - Callback, - DEFAULT_PK_FACTORY, - isRecord, - makeClientMetadata, - setDifference, - HostAddress, - emitWarning -} from './utils'; -import type { Document } from './bson'; +import { Logger, LoggerLevel } from './logger'; import { DriverInfo, MongoClient, @@ -27,11 +17,21 @@ import { ServerApi, ServerApiVersion } from './mongo_client'; -import { MongoCredentials } from './cmap/auth/mongo_credentials'; -import type { TagSet } from './sdam/server_description'; -import { Logger, LoggerLevel } from './logger'; import { PromiseProvider } from './promise_provider'; -import { Encrypter } from './encrypter'; +import { ReadConcern, ReadConcernLevel } from './read_concern'; +import { ReadPreference, ReadPreferenceMode } from './read_preference'; +import type { TagSet } from './sdam/server_description'; +import { + AnyOptions, + Callback, + DEFAULT_PK_FACTORY, + emitWarning, + HostAddress, + isRecord, + makeClientMetadata, + setDifference +} from './utils'; +import { W, WriteConcern } from './write_concern'; const VALID_TXT_RECORDS = ['authSource', 'replicaSet', 'loadBalanced']; @@ -64,7 +64,7 @@ function matchesParentDomain(srvAddress: string, parentDomain: string): boolean */ export function resolveSRVRecord(options: MongoOptions, callback: Callback): void { if (typeof options.srvHost !== 'string') { - return callback(new MongoURIError('srvHost string in options cannot be empty')); + return callback(new MongoURIError('Option "srvHost" must not be empty')); } if (options.srvHost.split('.').length < 3) { diff --git a/src/gridfs/download.ts b/src/gridfs/download.ts index 2a219c38796..b7e20ab935e 100644 --- a/src/gridfs/download.ts +++ b/src/gridfs/download.ts @@ -1,4 +1,8 @@ +import type { ObjectId } from 'bson'; import { Readable } from 'stream'; +import type { Document } from '../bson'; +import type { Collection } from '../collection'; +import type { FindCursor } from '../cursor/find_cursor'; import { AnyError, MongoDriverError, @@ -6,15 +10,11 @@ import { MongoGridFSStreamError, MongoInvalidArgumentError } from '../error'; -import type { Document } from '../bson'; import type { FindOptions } from '../operations/find'; +import type { ReadPreference } from '../read_preference'; import type { Sort } from '../sort'; import type { Callback } from '../utils'; -import type { Collection } from '../collection'; -import type { ReadPreference } from '../read_preference'; import type { GridFSChunk } from './upload'; -import type { FindCursor } from '../cursor/find_cursor'; -import type { ObjectId } from 'bson'; /** @public */ export interface GridFSBucketReadStreamOptions { diff --git a/src/gridfs/upload.ts b/src/gridfs/upload.ts index 62d3171591d..e56c8472922 100644 --- a/src/gridfs/upload.ts +++ b/src/gridfs/upload.ts @@ -1,20 +1,20 @@ import { Writable } from 'stream'; +import type { Document } from '../bson'; +import { ObjectId } from '../bson'; +import type { Collection } from '../collection'; import { - MongoError, AnyError, MONGODB_ERROR_CODES, MongoDriverError, + MongoError, MongoGridFSStreamError } from '../error'; -import { WriteConcern } from './../write_concern'; import { PromiseProvider } from '../promise_provider'; -import { ObjectId } from '../bson'; import type { Callback } from '../utils'; -import type { Document } from '../bson'; -import type { GridFSBucket } from './index'; -import type { GridFSFile } from './download'; import type { WriteConcernOptions } from '../write_concern'; -import type { Collection } from '../collection'; +import { WriteConcern } from './../write_concern'; +import type { GridFSFile } from './download'; +import type { GridFSBucket } from './index'; /** @public */ export interface GridFSChunk { @@ -151,14 +151,16 @@ export class GridFSBucketWriteStream extends Writable { const Promise = PromiseProvider.get(); let error: MongoGridFSStreamError; if (this.state.streamEnd) { - error = new MongoGridFSStreamError('Cannot abort a stream that has already completed'); + // TODO(NODE-3405): Replace with MongoStreamClosedError + error = new MongoDriverError('Cannot abort a stream that has already completed'); if (typeof callback === 'function') { return callback(error); } return Promise.reject(error); } if (this.state.aborted) { - error = new MongoGridFSStreamError('Cannot call abort() on a stream twice'); + // TODO(NODE-3405): Replace with MongoStreamClosedError + error = new MongoDriverError('Cannot call abort() on a stream twice'); if (typeof callback === 'function') { return callback(error); } diff --git a/test/functional/gridfs_stream.test.js b/test/functional/gridfs_stream.test.js index cf29f1473a0..dc6937b665b 100644 --- a/test/functional/gridfs_stream.test.js +++ b/test/functional/gridfs_stream.test.js @@ -473,7 +473,8 @@ describe('GridFS Stream', function () { // Fail if user tries to abort an aborted stream uploadStream.abort().then(null, function (error) { expect(error.toString()).to.equal( - 'MongoGridFSStreamError: Cannot call abort() on a stream twice' + // TODO(NODE-3405): Replace with MongoStreamClosedError + 'MongoDriverError: Cannot call abort() on a stream twice' ); client.close(done); }); @@ -526,6 +527,7 @@ describe('GridFS Stream', function () { // Fail if user tries to abort an aborted stream uploadStream.abort().then(null, function (error) { expect(error.toString()).to.equal( + // TODO(NODE-3405): Replace with MongoStreamClosedError 'MongoGridFSStreamError: Cannot call abort() on a stream twice' ); client.close(done); @@ -1184,7 +1186,8 @@ describe('GridFS Stream', function () { // Fail if user tries to abort an aborted stream uploadStream.abort().then(null, function (error) { expect(error.toString()).to.equal( - 'MongoGridFSStreamError: Cannot call abort() on a stream twice' + // TODO(NODE-3405): Replace with MongoStreamClosedError + 'MongoDriverError: Cannot call abort() on a stream twice' ); client.close(done); }); From 68d440e5a6cae7cc580a5ddcdc23ee133f8c50a2 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Wed, 21 Jul 2021 14:24:07 -0400 Subject: [PATCH 09/19] test(NODE-3404): fix GridFS test waiting for the wrong error --- test/functional/gridfs_stream.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/functional/gridfs_stream.test.js b/test/functional/gridfs_stream.test.js index dc6937b665b..bfe6a5ec8fc 100644 --- a/test/functional/gridfs_stream.test.js +++ b/test/functional/gridfs_stream.test.js @@ -528,7 +528,7 @@ describe('GridFS Stream', function () { uploadStream.abort().then(null, function (error) { expect(error.toString()).to.equal( // TODO(NODE-3405): Replace with MongoStreamClosedError - 'MongoGridFSStreamError: Cannot call abort() on a stream twice' + 'MongoDriverError: Cannot call abort() on a stream twice' ); client.close(done); }); From 04207b8ffd4781753a4e99463dff3218b4a4308b Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Wed, 4 Aug 2021 18:30:17 -0400 Subject: [PATCH 10/19] refactor(NODE-3404): use MongoRuntime children where appropriate --- src/change_stream.ts | 58 ++++++++++++------------ src/cmap/connect.ts | 7 +-- src/cmap/message_stream.ts | 4 +- src/cmap/wire_protocol/compression.ts | 7 ++- src/connection_string.ts | 47 ++++++++++--------- src/db.ts | 2 +- src/error.ts | 4 +- src/gridfs/download.ts | 57 +++++++++++++---------- src/gridfs/upload.ts | 1 + src/index.ts | 1 - test/unit/core/connection_string.test.js | 8 +++- 11 files changed, 105 insertions(+), 91 deletions(-) diff --git a/src/change_stream.ts b/src/change_stream.ts index 307f923ee90..d4258c3ced2 100644 --- a/src/change_stream.ts +++ b/src/change_stream.ts @@ -1,38 +1,39 @@ import Denque = require('denque'); -import type { Readable } from 'stream'; -import type { Document, Timestamp } from './bson'; -import { Collection } from './collection'; -import { - AbstractCursor, - AbstractCursorEvents, - AbstractCursorOptions, - CursorStreamOptions -} from './cursor/abstract_cursor'; -import { Db } from './db'; import { + MongoError, AnyError, isResumableError, - MongoChangeStreamError, MongoDriverError, - MongoError + MongoAPIError, + MongoChangeStreamError } from './error'; -import { MongoClient } from './mongo_client'; -import { InferIdType, Nullable, TypedEventEmitter } from './mongo_types'; import { AggregateOperation, AggregateOptions } from './operations/aggregate'; -import type { CollationOptions, OperationParent } from './operations/command'; -import { executeOperation, ExecutionResult } from './operations/execute_operation'; -import type { ReadPreference } from './read_preference'; -import type { Topology } from './sdam/topology'; -import type { ClientSession } from './sessions'; import { - calculateDurationInMs, - Callback, - getTopology, maxWireVersion, + calculateDurationInMs, + now, maybePromise, MongoDBNamespace, - now + Callback, + getTopology } from './utils'; +import type { ReadPreference } from './read_preference'; +import type { Timestamp, Document } from './bson'; +import type { Topology } from './sdam/topology'; +import type { OperationParent, CollationOptions } from './operations/command'; +import { MongoClient } from './mongo_client'; +import { Db } from './db'; +import { Collection } from './collection'; +import type { Readable } from 'stream'; +import { + AbstractCursor, + AbstractCursorEvents, + AbstractCursorOptions, + CursorStreamOptions +} from './cursor/abstract_cursor'; +import type { ClientSession } from './sessions'; +import { executeOperation, ExecutionResult } from './operations/execute_operation'; +import { InferIdType, Nullable, TypedEventEmitter } from './mongo_types'; /** @internal */ const kResumeQueue = Symbol('resumeQueue'); @@ -265,8 +266,8 @@ export class ChangeStream extends TypedEven } else if (parent instanceof MongoClient) { this.type = CHANGE_DOMAIN_TYPES.CLUSTER; } else { - throw new MongoDriverError( - 'parent provided to ChangeStream constructor is not an instance of Collection, Db, or MongoClient' + throw new MongoChangeStreamError( + 'Parent provided to ChangeStream constructor must be an instance of Collection, Db, or MongoClient' ); } @@ -547,7 +548,8 @@ const CHANGE_STREAM_EVENTS = [ function setIsEmitter(changeStream: ChangeStream): void { if (changeStream[kMode] === 'iterator') { - throw new MongoChangeStreamError( + // TODO(NODE-3484): Replace with MongoChangeStreamModeError + throw new MongoAPIError( 'ChangeStream cannot be used as an EventEmitter after being used as an iterator' ); } @@ -556,7 +558,7 @@ function setIsEmitter(changeStream: ChangeStream): void { function setIsIterator(changeStream: ChangeStream): void { if (changeStream[kMode] === 'emitter') { - throw new MongoChangeStreamError( + throw new MongoAPIError( 'ChangeStream cannot be used as an EventEmitter after being used as an iterator' ); } @@ -634,7 +636,7 @@ function waitForTopologyConnected( } if (calculateDurationInMs(start) > timeout) { - // TODO: Replace with MongoNetworkTimeoutError + // TODO(NODE-3497): Replace with MongoNetworkTimeoutError return callback(new MongoDriverError('Timed out waiting for connection')); } diff --git a/src/cmap/connect.ts b/src/cmap/connect.ts index c4cc8755c36..64d91fabf5b 100644 --- a/src/cmap/connect.ts +++ b/src/cmap/connect.ts @@ -5,11 +5,11 @@ import { MongoNetworkError, MongoNetworkTimeoutError, AnyError, - MongoDriverError, MongoCompatibilityError, MongoInvalidArgumentError, MongoServerError, - MongoURIError + MongoRuntimeError, + MongoDriverError } from '../error'; import { AUTH_PROVIDERS, AuthMechanism } from './auth/defaultAuthProviders'; import { AuthContext } from './auth/auth_provider'; @@ -285,7 +285,8 @@ function parseConnectOptions(options: ConnectionOptions): SocketConnectOpts { } else { // This should never happen since we set up HostAddresses // But if we don't throw here the socket could hang until timeout - throw new MongoURIError(`Unexpected HostAddress ${JSON.stringify(hostAddress)}`); + // TODO(NODE-3484): Replace with a child of MongoParseError + throw new MongoRuntimeError(`Unexpected HostAddress ${JSON.stringify(hostAddress)}`); } } diff --git a/src/cmap/message_stream.ts b/src/cmap/message_stream.ts index 387fc4c4e6f..8ab3290e3be 100644 --- a/src/cmap/message_stream.ts +++ b/src/cmap/message_stream.ts @@ -191,9 +191,7 @@ function processIncomingData(stream: MessageStream, callback: Callback) if (messageBody.length !== messageHeader.length) { callback( - new MongoDecompressionError( - 'Decompressing a compressed message from the server failed. The message is corrupt.' - ) + new MongoDecompressionError('Message body and message header must be the same length') ); return; diff --git a/src/cmap/wire_protocol/compression.ts b/src/cmap/wire_protocol/compression.ts index e533e359f47..368f944c5e9 100644 --- a/src/cmap/wire_protocol/compression.ts +++ b/src/cmap/wire_protocol/compression.ts @@ -3,7 +3,7 @@ import type { Callback } from '../../utils'; import type { OperationDescription } from '../message_stream'; import { Snappy } from '../../deps'; -import { MongoCompressionError, MongoDecompressionError } from '../../error'; +import { MongoDecompressionError, MongoInvalidArgumentError } from '../../error'; /** @public */ export const Compressor = Object.freeze({ @@ -53,7 +53,7 @@ export function compress( zlib.deflate(dataToBeCompressed, zlibOptions, callback as zlib.CompressCallback); break; default: - throw new MongoCompressionError( + throw new MongoInvalidArgumentError( `Unknown compressor ${self.options.agreedCompressor} failed to compress` ); } @@ -67,8 +67,7 @@ export function decompress( ): void { if (compressorID < 0 || compressorID > Math.max(2)) { throw new MongoDecompressionError( - `Server sent message compressed using an unsupported compressor.` + - ` (Received compressor ID ${compressorID})` + `Server sent message compressed using an unsupported compressor. (Received compressor ID ${compressorID})` ); } diff --git a/src/connection_string.ts b/src/connection_string.ts index 48e629e5ec9..6b9e2ac716d 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -2,12 +2,22 @@ import * as dns from 'dns'; import * as fs from 'fs'; import ConnectionString from 'mongodb-connection-string-url'; import { URLSearchParams } from 'url'; -import type { Document } from './bson'; import { AuthMechanism } from './cmap/auth/defaultAuthProviders'; -import { MongoCredentials } from './cmap/auth/mongo_credentials'; -import { Encrypter } from './encrypter'; -import { MongoCompressionError, MongoParseError, MongoURIError } from './error'; -import { Logger, LoggerLevel } from './logger'; +import { ReadPreference, ReadPreferenceMode } from './read_preference'; +import { ReadConcern, ReadConcernLevel } from './read_concern'; +import { W, WriteConcern } from './write_concern'; +import { MongoAPIError, MongoInvalidArgumentError, MongoParseError, MongoURIError } from './error'; +import { + AnyOptions, + Callback, + DEFAULT_PK_FACTORY, + isRecord, + makeClientMetadata, + setDifference, + HostAddress, + emitWarning +} from './utils'; +import type { Document } from './bson'; import { DriverInfo, MongoClient, @@ -17,21 +27,11 @@ import { ServerApi, ServerApiVersion } from './mongo_client'; -import { PromiseProvider } from './promise_provider'; -import { ReadConcern, ReadConcernLevel } from './read_concern'; -import { ReadPreference, ReadPreferenceMode } from './read_preference'; +import { MongoCredentials } from './cmap/auth/mongo_credentials'; import type { TagSet } from './sdam/server_description'; -import { - AnyOptions, - Callback, - DEFAULT_PK_FACTORY, - emitWarning, - HostAddress, - isRecord, - makeClientMetadata, - setDifference -} from './utils'; -import { W, WriteConcern } from './write_concern'; +import { Logger, LoggerLevel } from './logger'; +import { PromiseProvider } from './promise_provider'; +import { Encrypter } from './encrypter'; const VALID_TXT_RECORDS = ['authSource', 'replicaSet', 'loadBalanced']; @@ -64,11 +64,12 @@ function matchesParentDomain(srvAddress: string, parentDomain: string): boolean */ export function resolveSRVRecord(options: MongoOptions, callback: Callback): void { if (typeof options.srvHost !== 'string') { - return callback(new MongoURIError('Option "srvHost" must not be empty')); + return callback(new MongoAPIError('Option "srvHost" must not be empty')); } if (options.srvHost.split('.').length < 3) { - return callback(new MongoURIError('URI must include hostname, domain name, and tld')); + // TODO(NODE-3484): Replace with MongoConnectionStringError + return callback(new MongoAPIError('URI must include hostname, domain name, and tld')); } // Resolve the SRV record and use the result as the list of hosts to connect to. @@ -615,7 +616,9 @@ export const OPTIONS = { if (['none', 'snappy', 'zlib'].includes(String(c))) { compressionList.add(String(c)); } else { - throw new MongoCompressionError(`${c} is not a valid compression mechanism`); + throw new MongoInvalidArgumentError( + `${c} is not a valid compression mechanism. Must be 'none', 'snappy', or 'zlib'. ` + ); } } } diff --git a/src/db.ts b/src/db.ts index 595ed7a8f46..2ed3f8f3c6c 100644 --- a/src/db.ts +++ b/src/db.ts @@ -737,6 +737,7 @@ export class Db { } } +// TODO(NODE-3483): Refactor into MongoDBNamespace // Validate the database name function validateDatabaseName(databaseName: string) { if (typeof databaseName !== 'string') @@ -748,7 +749,6 @@ function validateDatabaseName(databaseName: string) { const invalidChars = [' ', '.', '$', '/', '\\']; for (let i = 0; i < invalidChars.length; i++) { if (databaseName.indexOf(invalidChars[i]) !== -1) - // TODO(NODE-3405): Change this out for a child of MongoParseError throw new MongoDriverError( `database names cannot contain the character '${invalidChars[i]}'` ); diff --git a/src/error.ts b/src/error.ts index 4ca00ad1838..08d930d216f 100644 --- a/src/error.ts +++ b/src/error.ts @@ -189,7 +189,7 @@ export class MongoDriverError extends MongoError { */ export class MongoAPIError extends MongoDriverError { - protected constructor(message: string) { + constructor(message: string) { super(message); } @@ -209,7 +209,7 @@ export class MongoAPIError extends MongoDriverError { * @category Error */ export class MongoRuntimeError extends MongoDriverError { - protected constructor(message: string) { + constructor(message: string) { super(message); } diff --git a/src/gridfs/download.ts b/src/gridfs/download.ts index b7e20ab935e..38b1a2651ec 100644 --- a/src/gridfs/download.ts +++ b/src/gridfs/download.ts @@ -1,20 +1,19 @@ -import type { ObjectId } from 'bson'; import { Readable } from 'stream'; -import type { Document } from '../bson'; -import type { Collection } from '../collection'; -import type { FindCursor } from '../cursor/find_cursor'; import { - AnyError, MongoDriverError, MongoGridFSChunkError, MongoGridFSStreamError, MongoInvalidArgumentError } from '../error'; +import type { Document } from '../bson'; import type { FindOptions } from '../operations/find'; -import type { ReadPreference } from '../read_preference'; import type { Sort } from '../sort'; import type { Callback } from '../utils'; +import type { Collection } from '../collection'; +import type { ReadPreference } from '../read_preference'; import type { GridFSChunk } from './upload'; +import type { FindCursor } from '../cursor/find_cursor'; +import type { ObjectId } from 'bson'; /** @public */ export interface GridFSBucketReadStreamOptions { @@ -215,7 +214,8 @@ function doRead(stream: GridFSBucketReadStream): void { return; } if (error) { - return __handleError(stream, error); + stream.emit(GridFSBucketReadStream.ERROR, error); + return; } if (!doc) { stream.push(null); @@ -224,7 +224,7 @@ function doRead(stream: GridFSBucketReadStream): void { if (!stream.s.cursor) return; stream.s.cursor.close(error => { if (error) { - __handleError(stream, error); + stream.emit(GridFSBucketReadStream.ERROR, error); return; } @@ -240,27 +240,38 @@ function doRead(stream: GridFSBucketReadStream): void { const bytesRemaining = stream.s.file.length - stream.s.bytesRead; const expectedN = stream.s.expected++; const expectedLength = Math.min(stream.s.file.chunkSize, bytesRemaining); - let errmsg: string; if (doc.n > expectedN) { - errmsg = `ChunkIsMissing: Got unexpected n: ${doc.n}, expected: ${expectedN}`; - return __handleError(stream, new MongoGridFSChunkError(errmsg)); + return stream.emit( + GridFSBucketReadStream.ERROR, + new MongoGridFSChunkError( + `ChunkIsMissing: Got unexpected n: ${doc.n}, expected: ${expectedN}` + ) + ); } if (doc.n < expectedN) { - errmsg = `ExtraChunk: Got unexpected n: ${doc.n}, expected: ${expectedN}`; - return __handleError(stream, new MongoGridFSChunkError(errmsg)); + return stream.emit( + GridFSBucketReadStream.ERROR, + new MongoGridFSChunkError(`ExtraChunk: Got unexpected n: ${doc.n}, expected: ${expectedN}`) + ); } let buf = Buffer.isBuffer(doc.data) ? doc.data : doc.data.buffer; if (buf.byteLength !== expectedLength) { if (bytesRemaining <= 0) { - errmsg = `ExtraChunk: Got unexpected n: ${doc.n}`; - return __handleError(stream, new MongoGridFSChunkError(errmsg)); + return stream.emit( + GridFSBucketReadStream.ERROR, + new MongoGridFSChunkError(`ExtraChunk: Got unexpected n: ${doc.n}`) + ); } - errmsg = `ChunkIsWrongSize: Got unexpected length: ${buf.byteLength}, expected: ${expectedLength}`; - return __handleError(stream, new MongoGridFSChunkError(errmsg)); + return stream.emit( + GridFSBucketReadStream.ERROR, + new MongoGridFSChunkError( + `ChunkIsWrongSize: Got unexpected length: ${buf.byteLength}, expected: ${expectedLength}` + ) + ); } stream.s.bytesRead += buf.byteLength; @@ -307,7 +318,7 @@ function init(stream: GridFSBucketReadStream): void { stream.s.files.findOne(stream.s.filter, findOneOptions, (error, doc) => { if (error) { - return __handleError(stream, error); + return stream.emit(GridFSBucketReadStream.ERROR, error); } if (!doc) { @@ -317,7 +328,7 @@ function init(stream: GridFSBucketReadStream): void { const errmsg = `FileNotFound: file ${identifier} was not found`; const err = new MongoDriverError(errmsg); err.code = 'ENOENT'; // TODO: NODE-3338 set property as part of constructor - return __handleError(stream, err); + return stream.emit(GridFSBucketReadStream.ERROR, err); } // If document is empty, kill the stream immediately and don't @@ -338,7 +349,7 @@ function init(stream: GridFSBucketReadStream): void { try { stream.s.bytesToSkip = handleStartOption(stream, doc, stream.s.options); } catch (error) { - return __handleError(stream, error); + return stream.emit(GridFSBucketReadStream.ERROR, error); } const filter: Document = { files_id: doc._id }; @@ -364,7 +375,7 @@ function init(stream: GridFSBucketReadStream): void { try { stream.s.bytesToTrim = handleEndOption(stream, doc, stream.s.cursor, stream.s.options); } catch (error) { - return __handleError(stream, error); + return stream.emit(GridFSBucketReadStream.ERROR, error); } stream.emit(GridFSBucketReadStream.FILE, doc); @@ -440,7 +451,3 @@ function handleEndOption( } throw new MongoInvalidArgumentError('End option must be defined'); } - -function __handleError(stream: GridFSBucketReadStream, error?: AnyError): void { - stream.emit(GridFSBucketReadStream.ERROR, error); -} diff --git a/src/gridfs/upload.ts b/src/gridfs/upload.ts index e56c8472922..ff52e526633 100644 --- a/src/gridfs/upload.ts +++ b/src/gridfs/upload.ts @@ -139,6 +139,7 @@ export class GridFSBucketWriteStream extends Writable { return waitForIndexes(this, () => doWrite(this, chunk, encoding, callback)); } + // TODO(NODE-3405): Refactor this with maybePromise and MongoStreamClosedError /** * Places this write stream into an aborted state (all future writes fail) * and deletes all chunks that have already been written. diff --git a/src/index.ts b/src/index.ts index 35b3a47d777..9860cda77c6 100644 --- a/src/index.ts +++ b/src/index.ts @@ -46,7 +46,6 @@ export { MongoWriteConcernError, MongoRuntimeError, MongoURIError, - MongoStreamError, MongoChangeStreamError, MongoGridFSStreamError, MongoGridFSChunkError, diff --git a/test/unit/core/connection_string.test.js b/test/unit/core/connection_string.test.js index bd9cfea5e1b..41f0f227ba0 100644 --- a/test/unit/core/connection_string.test.js +++ b/test/unit/core/connection_string.test.js @@ -1,6 +1,10 @@ 'use strict'; -const { MongoParseError, MongoDriverError, MongoCompressionError } = require('../../../src/error'); +const { + MongoParseError, + MongoDriverError, + MongoInvalidArgumentError +} = require('../../../src/error'); const { loadSpecTests } = require('../../spec'); const chai = require('chai'); const { parseOptions } = require('../../../src/connection_string'); @@ -110,7 +114,7 @@ describe('Connection String', function () { describe('validation', function () { it('should validate compressors options', function () { expect(() => parseOptions('mongodb://localhost/?compressors=bunnies')).to.throw( - MongoCompressionError, + MongoInvalidArgumentError, 'bunnies is not a valid compression mechanism' ); }); From 6dab25a9eed19b82494c695ee396fd70b862dd18 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Thu, 5 Aug 2021 16:45:35 -0400 Subject: [PATCH 11/19] test(NODE-3404): remove errors throws and replace with expect.fail --- src/connection_string.ts | 14 +++++++++++--- test/functional/change_stream.test.js | 6 +++--- test/functional/unified-spec-runner/entities.ts | 3 +-- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/src/connection_string.ts b/src/connection_string.ts index 6b9e2ac716d..2dee82f426f 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -15,7 +15,8 @@ import { makeClientMetadata, setDifference, HostAddress, - emitWarning + emitWarning, + enumToString } from './utils'; import type { Document } from './bson'; import { @@ -34,6 +35,11 @@ import { PromiseProvider } from './promise_provider'; import { Encrypter } from './encrypter'; const VALID_TXT_RECORDS = ['authSource', 'replicaSet', 'loadBalanced']; +const VALID_COMPRESSION_MECHS = Object.freeze({ + NONE: 'none', + SNAPPY: 'snappy', + ZLIB: 'zlib' +}); const LB_SINGLE_HOST_ERROR = 'loadBalanced option only supported with a single host in the URI'; const LB_REPLICA_SET_ERROR = 'loadBalanced option not supported with a replicaSet option'; @@ -613,11 +619,13 @@ export const OPTIONS = { const compressionList = new Set(); for (const compVal of values as string[]) { for (const c of compVal.split(',')) { - if (['none', 'snappy', 'zlib'].includes(String(c))) { + if (Object.values(VALID_COMPRESSION_MECHS).includes(String(c))) { compressionList.add(String(c)); } else { throw new MongoInvalidArgumentError( - `${c} is not a valid compression mechanism. Must be 'none', 'snappy', or 'zlib'. ` + `${c} is not a valid compression mechanism. Must be one of: ${enumToString( + VALID_COMPRESSION_MECHS + )}.` ); } } diff --git a/test/functional/change_stream.test.js b/test/functional/change_stream.test.js index 99cb7c7f73b..a3f74480710 100644 --- a/test/functional/change_stream.test.js +++ b/test/functional/change_stream.test.js @@ -100,7 +100,7 @@ function triggerResumableError(changeStream, delay, onClose) { */ function waitForStarted(changeStream, callback) { const timeout = setTimeout(() => { - throw new MongoChangeStreamError('Change stream never started'); + expect.fail('Change stream never started'); }, 2000); changeStream.cursor.once('init', () => { @@ -874,7 +874,7 @@ describe('Change Streams', function () { .next() .then(function () { // We should never execute this line because calling changeStream.next() should throw an error - throw new MongoChangeStreamError( + expect.fail( 'ChangeStream.next() returned a change document but it should have returned a MongoNetworkError' ); }) @@ -2007,7 +2007,7 @@ describe('Change Streams', function () { if (counter === 2) { changeStream.close(close); } else if (counter >= 3) { - close(new MongoChangeStreamError('should not have received more than 2 events')); + expect.fail('Should not have received more than 2 events'); } }); changeStream.on('error', err => close(err)); diff --git a/test/functional/unified-spec-runner/entities.ts b/test/functional/unified-spec-runner/entities.ts index e95b21a860e..76a6eaf1a74 100644 --- a/test/functional/unified-spec-runner/entities.ts +++ b/test/functional/unified-spec-runner/entities.ts @@ -4,8 +4,7 @@ import { Collection, GridFSBucket, Document, - HostAddress, - MongoAPIError + HostAddress } from '../../../src/index'; import { ReadConcern } from '../../../src/read_concern'; import { WriteConcern } from '../../../src/write_concern'; From 34ec48028e5e3bd52c5db006dd1f0e885cc93271 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Fri, 6 Aug 2021 10:37:09 -0400 Subject: [PATCH 12/19] style(NODE-3404): remove unused import --- test/functional/change_stream.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/functional/change_stream.test.js b/test/functional/change_stream.test.js index a3f74480710..74de4d9586b 100644 --- a/test/functional/change_stream.test.js +++ b/test/functional/change_stream.test.js @@ -1,7 +1,7 @@ 'use strict'; const assert = require('assert'); const { Transform, PassThrough } = require('stream'); -const { MongoNetworkError, MongoDriverError, MongoChangeStreamError } = require('../../src/error'); +const { MongoNetworkError, MongoDriverError } = require('../../src/error'); const { delay, setupDatabase, withClient, withCursor } = require('./shared'); const co = require('co'); const mock = require('../tools/mock'); From 7ddb4aac2b025bbc1cca6e0c138ddcf43ad531dc Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Fri, 6 Aug 2021 14:39:44 -0400 Subject: [PATCH 13/19] refactor(NODE-3404): use existing compressor enum --- src/connection_string.ts | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/src/connection_string.ts b/src/connection_string.ts index 2dee82f426f..9494e3e35c3 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -15,8 +15,7 @@ import { makeClientMetadata, setDifference, HostAddress, - emitWarning, - enumToString + emitWarning } from './utils'; import type { Document } from './bson'; import { @@ -33,13 +32,9 @@ import type { TagSet } from './sdam/server_description'; import { Logger, LoggerLevel } from './logger'; import { PromiseProvider } from './promise_provider'; import { Encrypter } from './encrypter'; +import { Compressor } from './cmap/wire_protocol/compression'; const VALID_TXT_RECORDS = ['authSource', 'replicaSet', 'loadBalanced']; -const VALID_COMPRESSION_MECHS = Object.freeze({ - NONE: 'none', - SNAPPY: 'snappy', - ZLIB: 'zlib' -}); const LB_SINGLE_HOST_ERROR = 'loadBalanced option only supported with a single host in the URI'; const LB_REPLICA_SET_ERROR = 'loadBalanced option not supported with a replicaSet option'; @@ -619,12 +614,12 @@ export const OPTIONS = { const compressionList = new Set(); for (const compVal of values as string[]) { for (const c of compVal.split(',')) { - if (Object.values(VALID_COMPRESSION_MECHS).includes(String(c))) { + if (Object.keys(Compressor).includes(String(c))) { compressionList.add(String(c)); } else { throw new MongoInvalidArgumentError( - `${c} is not a valid compression mechanism. Must be one of: ${enumToString( - VALID_COMPRESSION_MECHS + `${c} is not a valid compression mechanism. Must be one of: ${Object.keys( + Compressor )}.` ); } From 3b5c9e18993d7ad887f8d50d44bf809f42c386a9 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Mon, 9 Aug 2021 22:39:05 -0400 Subject: [PATCH 14/19] refactor(NODE-3404): remove MongoURIError --- src/change_stream.ts | 2 +- src/connection_string.ts | 10 +++++----- src/db.ts | 2 +- src/error.ts | 16 ---------------- src/index.ts | 1 - .../functional/unified-spec-runner/operations.ts | 3 +-- test/manual/ldap.test.js | 3 +-- 7 files changed, 9 insertions(+), 28 deletions(-) diff --git a/src/change_stream.ts b/src/change_stream.ts index d4258c3ced2..1e80cf7e58e 100644 --- a/src/change_stream.ts +++ b/src/change_stream.ts @@ -548,7 +548,7 @@ const CHANGE_STREAM_EVENTS = [ function setIsEmitter(changeStream: ChangeStream): void { if (changeStream[kMode] === 'iterator') { - // TODO(NODE-3484): Replace with MongoChangeStreamModeError + // TODO(NODE-3485): Replace with MongoChangeStreamModeError throw new MongoAPIError( 'ChangeStream cannot be used as an EventEmitter after being used as an iterator' ); diff --git a/src/connection_string.ts b/src/connection_string.ts index 9494e3e35c3..b61eb817d56 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -6,7 +6,7 @@ import { AuthMechanism } from './cmap/auth/defaultAuthProviders'; import { ReadPreference, ReadPreferenceMode } from './read_preference'; import { ReadConcern, ReadConcernLevel } from './read_concern'; import { W, WriteConcern } from './write_concern'; -import { MongoAPIError, MongoInvalidArgumentError, MongoParseError, MongoURIError } from './error'; +import { MongoAPIError, MongoInvalidArgumentError, MongoParseError } from './error'; import { AnyOptions, Callback, @@ -79,12 +79,12 @@ export function resolveSRVRecord(options: MongoOptions, callback: Callback { return new Promise((resolve, reject) => { const timeout = setTimeout(() => { - reject(new MongoChangeStreamError('Change stream never started')); + reject(new Error('Change stream never started')); }, 2000); changeStream.cursor.once('init', () => { diff --git a/test/manual/ldap.test.js b/test/manual/ldap.test.js index 501cca4b640..5e683804749 100644 --- a/test/manual/ldap.test.js +++ b/test/manual/ldap.test.js @@ -1,11 +1,10 @@ 'use strict'; const { MongoClient } = require('../../src'); const { expect } = require('chai'); -const { MongoURIError } = require('../../src/error'); describe('LDAP', function () { if (process.env.MONGODB_URI == null) { - throw new MongoURIError(`skipping SSL tests, MONGODB_URI environment variable is not defined`); + throw new Error(`skipping SSL tests, MONGODB_URI environment variable is not defined`); } it('Should correctly authenticate against ldap', function (done) { From e1751811c60e060f08723f84e4ca6fce5b12cefa Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Tue, 10 Aug 2021 10:04:01 -0400 Subject: [PATCH 15/19] refactor(NODE-3405): revert use of MongoRuntimeError --- src/cmap/connect.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/cmap/connect.ts b/src/cmap/connect.ts index 64d91fabf5b..cdcbd8faa29 100644 --- a/src/cmap/connect.ts +++ b/src/cmap/connect.ts @@ -285,8 +285,7 @@ function parseConnectOptions(options: ConnectionOptions): SocketConnectOpts { } else { // This should never happen since we set up HostAddresses // But if we don't throw here the socket could hang until timeout - // TODO(NODE-3484): Replace with a child of MongoParseError - throw new MongoRuntimeError(`Unexpected HostAddress ${JSON.stringify(hostAddress)}`); + throw new MongoDriverError(`Unexpected HostAddress ${JSON.stringify(hostAddress)}`); } } From 1b0ae05b4f14777ace4a70473220ba6769ac412e Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Tue, 10 Aug 2021 12:06:39 -0400 Subject: [PATCH 16/19] style(NODE-3404): remove ununsed import --- src/cmap/connect.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/cmap/connect.ts b/src/cmap/connect.ts index cdcbd8faa29..a87bcf1f7b4 100644 --- a/src/cmap/connect.ts +++ b/src/cmap/connect.ts @@ -8,7 +8,6 @@ import { MongoCompatibilityError, MongoInvalidArgumentError, MongoServerError, - MongoRuntimeError, MongoDriverError } from '../error'; import { AUTH_PROVIDERS, AuthMechanism } from './auth/defaultAuthProviders'; From 56596547a37968041b2e5ef2e3984929eb56a7b9 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Tue, 10 Aug 2021 14:30:00 -0400 Subject: [PATCH 17/19] test(NODE-3404): revert expect.fail use in ChangeStream test --- test/functional/change_stream.test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/functional/change_stream.test.js b/test/functional/change_stream.test.js index 74de4d9586b..5508a0fbb18 100644 --- a/test/functional/change_stream.test.js +++ b/test/functional/change_stream.test.js @@ -2007,7 +2007,7 @@ describe('Change Streams', function () { if (counter === 2) { changeStream.close(close); } else if (counter >= 3) { - expect.fail('Should not have received more than 2 events'); + close(new Error('Should not have received more than 2 events')); } }); changeStream.on('error', err => close(err)); From 57420ce82597387d7f2c131783ee7ecd6527fa75 Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Tue, 10 Aug 2021 15:44:42 -0400 Subject: [PATCH 18/19] fix(NODE-3404): export missing errors from src/ --- src/index.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/index.ts b/src/index.ts index 93da350c7ed..6db5bbab44d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -49,7 +49,11 @@ export { MongoGridFSStreamError, MongoGridFSChunkError, MongoCompressionError, - MongoDecompressionError + MongoDecompressionError, + MongoBatchReExecutionError, + MongoCursorExhaustedError, + MongoCursorInUseError, + MongoNotConnectedError } from './error'; export { MongoBulkWriteError, BulkWriteOptions, AnyBulkWriteOperation } from './bulk/common'; export { From 84e92ca72e74586fd8f0b792c5391961913538fc Mon Sep 17 00:00:00 2001 From: Andy Mina Date: Tue, 10 Aug 2021 15:59:39 -0400 Subject: [PATCH 19/19] refactor(NODE-3404): remove unused MongoCompressionError --- src/error.ts | 17 ----------------- src/index.ts | 1 - 2 files changed, 18 deletions(-) diff --git a/src/error.ts b/src/error.ts index 117abb8bcd2..2e8c0a5d195 100644 --- a/src/error.ts +++ b/src/error.ts @@ -235,23 +235,6 @@ export class MongoBatchReExecutionError extends MongoAPIError { } } -/** - * An error generated when the driver fails to compress data - * before sending it to the server. - * - * @public - * @category Error - */ -export class MongoCompressionError extends MongoRuntimeError { - constructor(message: string) { - super(message); - } - - get name(): string { - return 'MongoCompressionError'; - } -} - /** * An error generated when the driver fails to decompress * data received from the server. diff --git a/src/index.ts b/src/index.ts index 6db5bbab44d..f58caa191ca 100644 --- a/src/index.ts +++ b/src/index.ts @@ -48,7 +48,6 @@ export { MongoChangeStreamError, MongoGridFSStreamError, MongoGridFSChunkError, - MongoCompressionError, MongoDecompressionError, MongoBatchReExecutionError, MongoCursorExhaustedError,