diff --git a/.evergreen.yml b/.evergreen.yml index 5cbe12847e..de2877374e 100644 --- a/.evergreen.yml +++ b/.evergreen.yml @@ -5033,6 +5033,8 @@ functions: set -e set -x { + export NODE_JS_VERSION=${node_js_version} + . .evergreen/setup-env.sh . preload.sh ./scripts/docker/build.sh ${dockerfile} ./scripts/docker/run.sh ${dockerfile} --smokeTests @@ -8394,8 +8396,13 @@ tasks: vars: source_distribution_build_variant: debian-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: ubuntu18.04-deb - name: pkg_test_docker_ubuntu20_04_deb tags: ["smoke-test"] @@ -8408,8 +8415,13 @@ tasks: vars: source_distribution_build_variant: debian-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: ubuntu20.04-deb - name: pkg_test_docker_debian9_deb tags: ["smoke-test"] @@ -8422,8 +8434,13 @@ tasks: vars: source_distribution_build_variant: debian-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: debian9-deb - name: pkg_test_docker_debian10_deb tags: ["smoke-test"] @@ -8436,8 +8453,13 @@ tasks: vars: source_distribution_build_variant: debian-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: debian10-deb - name: pkg_test_docker_debian11_deb tags: ["smoke-test"] @@ -8450,8 +8472,13 @@ tasks: vars: source_distribution_build_variant: debian-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: debian11-deb - name: pkg_test_docker_centos7_rpm tags: ["smoke-test"] @@ -8464,8 +8491,13 @@ tasks: vars: source_distribution_build_variant: rhel7-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: centos7-rpm - name: pkg_test_docker_amazonlinux2_rpm tags: ["smoke-test"] @@ -8478,8 +8510,13 @@ tasks: vars: source_distribution_build_variant: rhel7-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: amazonlinux2-rpm - name: pkg_test_docker_rocky8_rpm tags: ["smoke-test"] @@ -8492,8 +8529,13 @@ tasks: vars: source_distribution_build_variant: rhel8-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: rocky8-rpm - name: pkg_test_docker_fedora34_rpm tags: ["smoke-test"] @@ -8506,8 +8548,13 @@ tasks: vars: source_distribution_build_variant: rhel8-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: fedora34-rpm - name: pkg_test_docker_suse12_rpm tags: ["smoke-test"] @@ -8520,8 +8567,13 @@ tasks: vars: source_distribution_build_variant: suse-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: suse12-rpm - name: pkg_test_docker_suse15_rpm tags: ["smoke-test"] @@ -8534,8 +8586,13 @@ tasks: vars: source_distribution_build_variant: suse-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: suse15-rpm - name: pkg_test_docker_amazonlinux1_rpm tags: ["smoke-test"] @@ -8548,8 +8605,13 @@ tasks: vars: source_distribution_build_variant: amzn1-x64 - func: write_preload_script + - func: install + vars: + node_js_version: "14.19.1" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "14.19.1" dockerfile: amazonlinux1-rpm - name: pkg_test_debextract_debian_arm64 tags: ["smoke-test"] diff --git a/.evergreen/evergreen.yml.in b/.evergreen/evergreen.yml.in index f4c9dd8aec..b84dd3b0d5 100644 --- a/.evergreen/evergreen.yml.in +++ b/.evergreen/evergreen.yml.in @@ -521,6 +521,8 @@ functions: set -e set -x { + export NODE_JS_VERSION=${node_js_version} + . .evergreen/setup-env.sh . preload.sh ./scripts/docker/build.sh ${dockerfile} ./scripts/docker/run.sh ${dockerfile} --smokeTests @@ -819,8 +821,13 @@ tasks: preload_script_path: preload.sh <% }; break; case 'docker': { %> + - func: install + vars: + node_js_version: "<% out(NODE_JS_VERSION_14) %>" + npm_deps_mode: cli_build - func: test_artifact_docker vars: + node_js_version: "<% out(NODE_JS_VERSION_14) %>" dockerfile: <% out(dockerfile) %> <% }; break; // We don't have docker for platforms other than x64, so for those we just diff --git a/config/build.conf.js b/config/build.conf.js index 575377e05c..0f1058be0c 100644 --- a/config/build.conf.js +++ b/config/build.conf.js @@ -3,6 +3,10 @@ const path = require('path'); const os = require('os'); +const SHARED_LIBRARY_SUFFIX = + process.platform === 'win32' ? 'dll' : + process.platform === 'darwin' ? 'dylib' : 'so'; + /** * The project root. */ @@ -45,11 +49,11 @@ const OUTPUT_DIR = path.join(ROOT, 'dist'); const EXECUTABLE_PATH = path.join(OUTPUT_DIR, process.platform === 'win32' ? 'mongosh.exe' : 'mongosh'); /** - * The name of the downloaded mongocryptd executable. - * We use the name mongocryptd-mongosh to avoid conflicts with users - * potentially installing the 'proper' mongocryptd package. + * The path to the downloaded csfe shared library. + * We use the name mongosh_csfle_v1 to avoid conflicts with users + * potentially installing the 'proper' csfle shared library. */ -const MONGOCRYPTD_PATH = path.resolve(TMP_DIR, 'mongocryptd-mongosh' + (process.platform === 'win32' ? '.exe' : '')); +const CSFLE_LIBRARY_PATH = path.resolve(TMP_DIR, 'mongosh_csfle_v1.' + SHARED_LIBRARY_SUFFIX); /** * Build info JSON data file. @@ -105,7 +109,7 @@ module.exports = { repo: 'mongosh' }, artifactUrlFile: process.env.ARTIFACT_URL_FILE, - mongocryptdPath: MONGOCRYPTD_PATH, + csfleLibraryPath: CSFLE_LIBRARY_PATH, packageInformation: { binaries: [ { @@ -120,11 +124,11 @@ module.exports = { } }, { - sourceFilePath: MONGOCRYPTD_PATH, - category: 'libexec', + sourceFilePath: CSFLE_LIBRARY_PATH, + category: 'lib', license: { - sourceFilePath: path.resolve(__dirname, '..', 'packaging', 'LICENSE-mongocryptd'), - packagedFilePath: 'LICENSE-mongocryptd', + sourceFilePath: path.resolve(__dirname, '..', 'packaging', 'LICENSE-csfle'), + packagedFilePath: 'LICENSE-csfle', debCopyright: COPYRIGHT, debIdentifier: 'Proprietary', rpmIdentifier: 'Proprietary' diff --git a/lerna.json b/lerna.json index 8cba3d8bff..2929c2dcef 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,7 @@ { "packages": [ - "packages/*" + "packages/*", + "scripts/docker" ], "version": "0.0.0-dev.0" } diff --git a/package-lock.json b/package-lock.json index 906c59fd77..30c03d0e19 100644 --- a/package-lock.json +++ b/package-lock.json @@ -54,7 +54,7 @@ "lerna": "^4.0.0", "mocha": "^7.1.2", "mongodb": "^4.6.0", - "mongodb-download-url": "^1.1.2", + "mongodb-download-url": "^1.2.0", "mongodb-js-precommit": "^2.0.0", "nock": "^13.0.11", "node-codesign": "^0.3.3", diff --git a/package.json b/package.json index a2a2f1b8b4..488ab57e45 100644 --- a/package.json +++ b/package.json @@ -124,7 +124,7 @@ "lerna": "^4.0.0", "mocha": "^7.1.2", "mongodb": "^4.6.0", - "mongodb-download-url": "^1.1.2", + "mongodb-download-url": "^1.2.0", "mongodb-js-precommit": "^2.0.0", "nock": "^13.0.11", "node-codesign": "^0.3.3", diff --git a/packages/arg-parser/src/arg-mapper.ts b/packages/arg-parser/src/arg-mapper.ts index ff6f514b20..c96707b6b2 100644 --- a/packages/arg-parser/src/arg-mapper.ts +++ b/packages/arg-parser/src/arg-mapper.ts @@ -32,6 +32,16 @@ function setAutoEncrypt( return setDriver(i, 'autoEncryption', autoEncryption); } +type AutoEncryptionExtraOptions = NonNullable; +function setAutoEncryptExtra( + i: Readonly, + key: Key, + value: AutoEncryptionExtraOptions[Key]): ConnectionInfo { + const extraOptions = i.driverOptions.autoEncryption?.extraOptions ?? {}; + extraOptions[key] = value; + return setAutoEncrypt(i, 'extraOptions', extraOptions); +} + type AWSKMSOptions = NonNullable['aws']>; function setAWSKMS( i: Readonly, @@ -95,6 +105,7 @@ const MAPPINGS: { awsSecretAccessKey: (i, v) => setAWSKMS(i, 'secretAccessKey', v), awsSessionToken: (i, v) => setAWSKMS(i, 'sessionToken', v), awsIamSessionToken: (i, v) => setAuthMechProp(i, 'AWS_SESSION_TOKEN', v), + csfleLibraryPath: (i, v) => setAutoEncryptExtra(i, 'csflePath', v), gssapiServiceName: (i, v) => setAuthMechProp(i, 'SERVICE_NAME', v), sspiRealmOverride: (i, v) => setAuthMechProp(i, 'SERVICE_REALM', v), sspiHostnameCanonicalization: diff --git a/packages/arg-parser/src/cli-options.ts b/packages/arg-parser/src/cli-options.ts index 5aef32e7bd..3976487feb 100644 --- a/packages/arg-parser/src/cli-options.ts +++ b/packages/arg-parser/src/cli-options.ts @@ -16,6 +16,7 @@ export interface CliOptions { awsIamSessionToken?: string; awsSecretAccessKey?: string; awsSessionToken?: string; + csfleLibraryPath?: string; db?: string; eval?: string; gssapiServiceName?: string; diff --git a/packages/browser-repl/package-lock.json b/packages/browser-repl/package-lock.json index c68b48d268..0483213f50 100644 --- a/packages/browser-repl/package-lock.json +++ b/packages/browser-repl/package-lock.json @@ -20958,8 +20958,8 @@ "polished": "^3.3.1", "prop-types": "^15.7.2", "qs": "^6.6.0", - "react": "^16.14.0", - "react-dom": "^16.14.0", + "react": "^16.8.3", + "react-dom": "^16.8.3", "react-draggable": "^4.0.3", "react-helmet-async": "^1.0.2", "react-hotkeys": "2.0.0", diff --git a/packages/build/src/compile/signable-compiler.ts b/packages/build/src/compile/signable-compiler.ts index 6100feafad..35a3568657 100644 --- a/packages/build/src/compile/signable-compiler.ts +++ b/packages/build/src/compile/signable-compiler.ts @@ -73,6 +73,10 @@ export class SignableCompiler { path: await findModulePath('service-provider-server', 'os-dns-native'), requireRegexp: /\bos_dns_native\.node$/ }; + const csfleLibraryVersionAddon = { + path: await findModulePath('cli-repl', 'mongodb-csfle-library-version'), + requireRegexp: /\bmongodb_csfle_library_version\.node$/ + }; // Warning! Until https://jira.mongodb.org/browse/MONGOSH-990, // packages/service-provider-server *also* has a copy of these. // We use the versions included in packages/cli-repl here, so these @@ -110,7 +114,8 @@ export class SignableCompiler { addons: [ fleAddon, osDnsAddon, - kerberosAddon + kerberosAddon, + csfleLibraryVersionAddon ].concat(winCAAddon ? [ winCAAddon ] : []).concat(winConsoleProcessListAddon ? [ diff --git a/packages/build/src/config/config.ts b/packages/build/src/config/config.ts index 712b559354..4d8acb6657 100644 --- a/packages/build/src/config/config.ts +++ b/packages/build/src/config/config.ts @@ -40,7 +40,7 @@ export interface Config { }; isPatch?: boolean; triggeringGitTag?: string; - mongocryptdPath: string; + csfleLibraryPath: string; packageInformation?: PackageInformation; artifactUrlFile?: string; manpage?: ManPageConfig; diff --git a/packages/build/src/config/redact-config.ts b/packages/build/src/config/redact-config.ts index c056166f26..a2607b5648 100644 --- a/packages/build/src/config/redact-config.ts +++ b/packages/build/src/config/redact-config.ts @@ -17,7 +17,7 @@ export function redactConfig(config: Config): Partial { repo: config.repo, isPatch: config.isPatch, packageInformation: config.packageInformation, - mongocryptdPath: config.mongocryptdPath, + csfleLibraryPath: config.csfleLibraryPath, artifactUrlFile: config.artifactUrlFile }; } diff --git a/packages/build/src/download-mongodb.ts b/packages/build/src/download-mongodb.ts index ffd706395b..2197755e20 100644 --- a/packages/build/src/download-mongodb.ts +++ b/packages/build/src/download-mongodb.ts @@ -1,8 +1,12 @@ /* eslint-disable no-return-assign, no-empty */ /* istanbul ignore file */ +import fetch from 'node-fetch'; +import tar from 'tar'; +import { promisify } from 'util'; import { promises as fs } from 'fs'; import path from 'path'; import download from 'download'; +import { pipeline } from 'stream'; import getDownloadURL from 'mongodb-download-url'; import type { Options as DownloadOptions } from 'mongodb-download-url'; @@ -21,7 +25,7 @@ export async function downloadMongoDb(tmpdir: string, targetVersionSemverSpecifi await fs.mkdir(tmpdir, { recursive: true }); if (targetVersionSemverSpecifier === 'latest-alpha') { - return await doDownload(tmpdir, 'latest-alpha', lookupDownloadUrl); + return await doDownload(tmpdir, !!options.csfle, 'latest-alpha', lookupDownloadUrl); } if (/-community$/.test(targetVersionSemverSpecifier)) { @@ -31,18 +35,25 @@ export async function downloadMongoDb(tmpdir: string, targetVersionSemverSpecifi return await doDownload( tmpdir, + !!options.csfle, targetVersionSemverSpecifier + (wantsEnterprise ? '-enterprise' : '-community'), () => lookupDownloadUrl()); } const downloadPromises: Record> = {}; -async function doDownload(tmpdir: string, version: string, lookupDownloadUrl: () => Promise) { +async function doDownload( + tmpdir: string, + isCsfle: boolean, + version: string, + lookupDownloadUrl: () => Promise) { const downloadTarget = path.resolve( tmpdir, `mongodb-${process.platform}-${process.env.DISTRO_ID || 'none'}-${process.arch}-${version}` .replace(/[^a-zA-Z0-9_-]/g, '')); return downloadPromises[downloadTarget] ??= (async() => { - const bindir = path.resolve(downloadTarget, 'bin'); + const bindir = path.resolve( + downloadTarget, + isCsfle && process.platform !== 'win32' ? 'lib' : 'bin'); try { await fs.stat(bindir); console.info(`Skipping download because ${downloadTarget} exists`); @@ -50,11 +61,38 @@ async function doDownload(tmpdir: string, version: string, lookupDownloadUrl: () } catch {} await fs.mkdir(downloadTarget, { recursive: true }); - const downloadInfo = await lookupDownloadUrl(); - console.info('Downloading...', downloadInfo); - await download(downloadInfo, downloadTarget, { extract: true, strip: 1 }); + const url = await lookupDownloadUrl(); + console.info('Downloading...', url); - await fs.stat(bindir); // Make sure it exists. + async function downloadAndExtract(withExtraStripDepth = 0): Promise { + if (url.match(/\.tgz$|\.tar(\.[^.]+)?$/)) { + // the server's tarballs can contain hard links, which the (unmaintained?) + // `download` package is unable to handle (https://github.com/kevva/decompress/issues/93) + const response = await fetch(url); + await promisify(pipeline)( + response.body, + tar.x({ cwd: downloadTarget, strip: isCsfle ? 0 : 1 }) + ); + } else { + await download(url, downloadTarget, { extract: true, strip: isCsfle ? 0 : 1 }); + } + + try { + await fs.stat(bindir); // Make sure it exists. + } catch (err) { + if (withExtraStripDepth === 0 && url.includes('macos')) { + // The server team changed how macos release artifacts are packed + // and added a `./` prefix to paths in the tarball, + // which seems like it shouldn't change anything but does + // in fact require an increased path strip depth. + console.info('Retry due to miscalculated --strip-components depth'); + return await downloadAndExtract(1); + } + throw err; + } + } + + await downloadAndExtract(); return bindir; })(); } diff --git a/packages/build/src/packaging/download-csfle-library.ts b/packages/build/src/packaging/download-csfle-library.ts new file mode 100644 index 0000000000..f7eaac7037 --- /dev/null +++ b/packages/build/src/packaging/download-csfle-library.ts @@ -0,0 +1,61 @@ +/* istanbul ignore file */ +import path from 'path'; +import { promises as fs, constants as fsConstants } from 'fs'; +import { downloadMongoDb, DownloadOptions } from '../download-mongodb'; +import { BuildVariant, getDistro, getArch } from '../config'; + +export async function downloadCsfleLibrary(variant: BuildVariant | 'host'): Promise { + const opts: DownloadOptions = {}; + opts.arch = variant === 'host' ? undefined : getArch(variant); + opts.distro = variant === 'host' ? undefined : lookupReleaseDistro(variant); + opts.enterprise = true; + opts.csfle = true; + console.info('mongosh: downloading latest csfle shared library for inclusion in package:', JSON.stringify(opts)); + + let libdir = ''; + const csfleTmpTargetDir = path.resolve(__dirname, '..', '..', '..', '..', 'tmp', 'csfle-store', variant); + // Download mongodb for latest server version. Fall back to the 6.0.0-rcX + // version if no stable version is available. + let error: Error | undefined; + for (const version of [ 'stable', '>= 6.0.0-rc5' ]) { + try { + libdir = await downloadMongoDb(csfleTmpTargetDir, version, opts); + break; + } catch (e: any) { + error = e; + } + } + if (!libdir) throw error; + const csfleLibrary = path.join( + libdir, + (await fs.readdir(libdir)).find(filename => filename.match(/^mongo_csfle_v1\.(so|dylib|dll)$/)) as string + ); + // Make sure that the binary exists and is readable. + await fs.access(csfleLibrary, fsConstants.R_OK); + console.info('mongosh: downloaded', csfleLibrary); + return csfleLibrary; +} + +function lookupReleaseDistro(variant: BuildVariant): string { + switch (getDistro(variant)) { + case 'win32': + case 'win32msi': + return 'win32'; + case 'darwin': + return 'darwin'; + default: break; + } + switch (getArch(variant)) { + case 'ppc64le': + return 'rhel81'; + case 's390x': + return 'rhel83'; + case 'arm64': + return 'amazon2'; + case 'x64': + return 'rhel70'; + default: + break; + } + return ''; +} diff --git a/packages/build/src/packaging/download-mongocryptd.ts b/packages/build/src/packaging/download-mongocryptd.ts deleted file mode 100644 index c356a48799..0000000000 --- a/packages/build/src/packaging/download-mongocryptd.ts +++ /dev/null @@ -1,70 +0,0 @@ -/* istanbul ignore file */ -import path from 'path'; -import { promises as fs, constants as fsConstants } from 'fs'; -import { downloadMongoDb, DownloadOptions } from '../download-mongodb'; -import { BuildVariant, getDistro, getArch } from '../config'; - -export async function downloadMongocrypt(variant: BuildVariant): Promise { - const opts: DownloadOptions = {}; - opts.arch = getArch(variant); - opts.distro = lookupReleaseDistro(variant); - opts.enterprise = true; - opts.cryptd = true; - console.info('mongosh: downloading latest mongocryptd for inclusion in package:', JSON.stringify(opts)); - - const bindir = await downloadMongoDb( - path.resolve(__dirname, '..', '..', '..', '..', 'tmp', 'mongocryptd-store', variant), - '*', - opts); // Download mongodb for latest server version. - let mongocryptd = path.join(bindir, 'mongocryptd'); - if (opts.distro === 'win32') { - mongocryptd += '.exe'; - } - // Make sure that the binary exists and is executable. - await fs.access(mongocryptd, fsConstants.X_OK); - console.info('mongosh: downloaded', mongocryptd); - return mongocryptd; -} - -// eslint-disable-next-line complexity -function lookupReleaseDistro(variant: BuildVariant): string { - switch (getArch(variant)) { - case 'ppc64le': - return 'rhel81'; - case 's390x': - return 'rhel72'; // TODO: switch to rhel80 once available - default: - break; - } - switch (getDistro(variant)) { - case 'win32': - case 'win32msi': - return 'win32'; - case 'darwin': - return 'darwin'; - case 'linux': - case 'debian': - return 'debian92'; - case 'suse': - return 'suse12'; - case 'amzn1': - return 'amazon'; - case 'amzn2': - return 'amazon2'; - case 'rhel7': - return 'rhel70'; - case 'rhel8': - switch (getArch(variant)) { - case 'x64': - return 'rhel80'; - case 'arm64': - return 'rhel82'; - default: - break; - } - break; - default: - break; - } - return ''; -} diff --git a/packages/build/src/packaging/package/debian.spec.ts b/packages/build/src/packaging/package/debian.spec.ts index 660650221e..cf143074b7 100644 --- a/packages/build/src/packaging/package/debian.spec.ts +++ b/packages/build/src/packaging/package/debian.spec.ts @@ -25,7 +25,7 @@ describe('tarball debian', () => { { const { stdout } = await execFile('dpkg', ['-c', tarball.path]); expect(stdout).to.match(/^-rwxr.xr-x.+\/usr\/bin\/foo$/m); - expect(stdout).to.match(/^-rwxr.xr-x.+\/usr\/libexec\/bar$/m); + expect(stdout).to.match(/^-rwxr.xr-x.+\/usr\/lib\/bar$/m); expect(stdout).to.match(/^-rw-r.-r--.+\/usr\/share\/doc\/foobar\/LICENSE_bar$/m); expect(stdout).to.match(/^-rw-r.-r--.+\/usr\/share\/doc\/foobar\/LICENSE_foo$/m); expect(stdout).to.match(/^-rw-r.-r--.+\/usr\/share\/doc\/foobar\/README$/m); diff --git a/packages/build/src/packaging/package/package-information.ts b/packages/build/src/packaging/package/package-information.ts index f440723467..1c05d11da9 100644 --- a/packages/build/src/packaging/package/package-information.ts +++ b/packages/build/src/packaging/package/package-information.ts @@ -15,7 +15,7 @@ type ManPage = DocumentationFile; export interface PackageInformation { binaries: { sourceFilePath: string; - category: 'bin' | 'libexec'; + category: 'bin' | 'lib'; license: LicenseInformation; }[]; otherDocFilePaths: DocumentationFile[]; diff --git a/packages/build/src/packaging/package/redhat.spec.ts b/packages/build/src/packaging/package/redhat.spec.ts index 71a88d1a1a..8f8c78e085 100644 --- a/packages/build/src/packaging/package/redhat.spec.ts +++ b/packages/build/src/packaging/package/redhat.spec.ts @@ -34,7 +34,7 @@ describe('tarball redhat', () => { expect(stdout).to.match(/URL\s+:\s+https:\/\/example.org/); expect(stdout).to.match(/Summary\s+:\s+Dummy package/); expect(stdout).to.match(/^\/usr\/bin\/foo$/m); - expect(stdout).to.match(/^\/usr\/libexec\/bar$/m); + expect(stdout).to.match(/^\/usr\/lib\/bar$/m); expect(stdout).to.match(/^\/usr\/share\/doc\/foobar-1.0.0\/README$/m); expect(stdout).to.match(/^\/usr\/share\/licenses\/foobar-1.0.0\/LICENSE_bar$/m); expect(stdout).to.match(/^\/usr\/share\/licenses\/foobar-1.0.0\/LICENSE_foo$/m); diff --git a/packages/build/src/packaging/package/redhat.ts b/packages/build/src/packaging/package/redhat.ts index 14a718f1a4..4910c18dfb 100644 --- a/packages/build/src/packaging/package/redhat.ts +++ b/packages/build/src/packaging/package/redhat.ts @@ -11,7 +11,7 @@ const { COPYFILE_FICLONE } = constants; interface InstallFile { fromFilename: string; toFilename: string; - category: 'man' | 'bin' | 'libexec'; + category: 'man' | 'bin' | 'libexec' | 'lib'; mode: string; } @@ -37,7 +37,7 @@ export async function createRedhatPackage( fromFilename: path.basename(sourceFilePath), toFilename: path.basename(sourceFilePath), category, - mode: '755' + mode: category === 'lib' ? '644' : '755' })); if (pkg.manpage) { installFiles.push({ diff --git a/packages/build/src/packaging/run-package.ts b/packages/build/src/packaging/run-package.ts index 031e176195..b4b9d6043c 100644 --- a/packages/build/src/packaging/run-package.ts +++ b/packages/build/src/packaging/run-package.ts @@ -1,7 +1,7 @@ import { constants as fsConstants, promises as fs } from 'fs'; import path from 'path'; import { Config, validateBuildVariant } from '../config'; -import { downloadMongocrypt } from './download-mongocryptd'; +import { downloadCsfleLibrary } from './download-csfle-library'; import { downloadManpage } from './download-manpage'; import { notarizeArtifact } from './notary-service'; import { createPackage, PackageFile } from './package'; @@ -12,20 +12,11 @@ export async function runPackage( const distributionBuildVariant = config.distributionBuildVariant; validateBuildVariant(distributionBuildVariant); - await fs.mkdir(path.dirname(config.mongocryptdPath), { recursive: true }); - // TODO: add mongocryptd and E2E tests for darwin-arm64 once server builds - // are available for that platform. - if (distributionBuildVariant !== 'darwin-arm64') { - await fs.copyFile( - await downloadMongocrypt(distributionBuildVariant), - config.mongocryptdPath, - fsConstants.COPYFILE_FICLONE); - } else { - await fs.copyFile( - path.resolve(__dirname, '..', '..', '..', '..', 'scripts', 'no-mongocryptd.sh'), - config.mongocryptdPath, - fsConstants.COPYFILE_FICLONE); - } + await fs.mkdir(path.dirname(config.csfleLibraryPath), { recursive: true }); + await fs.copyFile( + await downloadCsfleLibrary(distributionBuildVariant), + config.csfleLibraryPath, + fsConstants.COPYFILE_FICLONE); const { manpage } = config; if (manpage) { diff --git a/packages/build/test/fixtures/pkgconf.js b/packages/build/test/fixtures/pkgconf.js index 95630d4a65..97c5be6edb 100644 --- a/packages/build/test/fixtures/pkgconf.js +++ b/packages/build/test/fixtures/pkgconf.js @@ -15,7 +15,7 @@ module.exports = { }, { sourceFilePath: path.resolve(__dirname, 'bin', 'bar'), - category: 'libexec', + category: 'lib', license: { debIdentifier: 'Apple', debCopyright: '2021 Somebody Else’s Cats', diff --git a/packages/build/test/helpers.ts b/packages/build/test/helpers.ts index cd9917417a..a6e2f4d4f7 100644 --- a/packages/build/test/helpers.ts +++ b/packages/build/test/helpers.ts @@ -29,7 +29,7 @@ export const dummyConfig: Config = Object.freeze({ bundleEntrypointInput: 'bundleEntrypointInput', bundleSinglefileOutput: 'bundleSinglefileOutput', executablePath: 'executablePath', - mongocryptdPath: 'mongocryptdPath', + csfleLibraryPath: 'csfleLibraryPath', outputDir: 'outputDir', buildInfoFilePath: 'buildInfoFilePath', project: 'project', diff --git a/packages/cli-repl/package-lock.json b/packages/cli-repl/package-lock.json index 09f1c45c56..a89f433b09 100644 --- a/packages/cli-repl/package-lock.json +++ b/packages/cli-repl/package-lock.json @@ -39,7 +39,8 @@ "@types/yargs-parser": "^15.0.0", "chai-as-promised": "^7.1.1", "lodash": "^4.17.21", - "moment": "^2.29.1" + "moment": "^2.29.1", + "mongodb-csfle-library-dummy": "^1.0.1" }, "engines": { "node": ">=14.15.0" @@ -47,6 +48,7 @@ "optionalDependencies": { "get-console-process-list": "^1.0.4", "macos-export-certificate-and-key": "^1.1.1", + "mongodb-csfle-library-version": "^1.0.2", "win-export-certificate-and-key": "^1.1.1" } }, @@ -759,6 +761,27 @@ "whatwg-url": "^11.0.0" } }, + "node_modules/mongodb-csfle-library-dummy": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mongodb-csfle-library-dummy/-/mongodb-csfle-library-dummy-1.0.1.tgz", + "integrity": "sha512-HrNXwbXcgyO93EqCuNpeUborAmO5Vmr8ZCswxk7MdAke1dO8gmNo02NlabE47PwiCGzf3TPzmX5cf4K7TXIt3Q==", + "dev": true, + "hasInstallScript": true + }, + "node_modules/mongodb-csfle-library-version": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/mongodb-csfle-library-version/-/mongodb-csfle-library-version-1.0.2.tgz", + "integrity": "sha512-DzO4BDGh8nQUEjr7HcB9w1K1CZlfWQRA1Rkq1ROk8aJoaaEK2m++cyVHVUNzHGrYu7X1r5yqHlGxfPw5bSEU0w==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "bindings": "^1.5.0", + "node-addon-api": "^4.3.0" + }, + "bin": { + "mongodb-csfle-library-version": "bin/mongodb-csfle-library-version.js" + } + }, "node_modules/mongodb-log-writer": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/mongodb-log-writer/-/mongodb-log-writer-1.1.4.tgz", @@ -1514,6 +1537,22 @@ "whatwg-url": "^11.0.0" } }, + "mongodb-csfle-library-dummy": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mongodb-csfle-library-dummy/-/mongodb-csfle-library-dummy-1.0.1.tgz", + "integrity": "sha512-HrNXwbXcgyO93EqCuNpeUborAmO5Vmr8ZCswxk7MdAke1dO8gmNo02NlabE47PwiCGzf3TPzmX5cf4K7TXIt3Q==", + "dev": true + }, + "mongodb-csfle-library-version": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/mongodb-csfle-library-version/-/mongodb-csfle-library-version-1.0.2.tgz", + "integrity": "sha512-DzO4BDGh8nQUEjr7HcB9w1K1CZlfWQRA1Rkq1ROk8aJoaaEK2m++cyVHVUNzHGrYu7X1r5yqHlGxfPw5bSEU0w==", + "optional": true, + "requires": { + "bindings": "^1.5.0", + "node-addon-api": "^4.3.0" + } + }, "mongodb-log-writer": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/mongodb-log-writer/-/mongodb-log-writer-1.1.4.tgz", diff --git a/packages/cli-repl/package.json b/packages/cli-repl/package.json index 7143477435..21a58f1bd8 100644 --- a/packages/cli-repl/package.json +++ b/packages/cli-repl/package.json @@ -93,11 +93,13 @@ "@types/yargs-parser": "^15.0.0", "chai-as-promised": "^7.1.1", "lodash": "^4.17.21", - "moment": "^2.29.1" + "moment": "^2.29.1", + "mongodb-csfle-library-dummy": "^1.0.1" }, "optionalDependencies": { "get-console-process-list": "^1.0.4", "macos-export-certificate-and-key": "^1.1.1", - "win-export-certificate-and-key": "^1.1.1" + "win-export-certificate-and-key": "^1.1.1", + "mongodb-csfle-library-version": "^1.0.2" } } diff --git a/packages/cli-repl/src/arg-parser.ts b/packages/cli-repl/src/arg-parser.ts index eb3a9a48c9..af5791258a 100644 --- a/packages/cli-repl/src/arg-parser.ts +++ b/packages/cli-repl/src/arg-parser.ts @@ -23,6 +23,7 @@ const OPTIONS = { 'awsSecretAccessKey', 'awsSessionToken', 'awsIamSessionToken', + 'csfleLibraryPath', 'db', 'eval', 'gssapiHostName', diff --git a/packages/cli-repl/src/cli-repl.ts b/packages/cli-repl/src/cli-repl.ts index d6bf515915..5581e16437 100644 --- a/packages/cli-repl/src/cli-repl.ts +++ b/packages/cli-repl/src/cli-repl.ts @@ -18,8 +18,8 @@ import { buildInfo } from './build-info'; import type { StyleDefinition } from './clr'; import { ConfigManager, ShellHomeDirectory, ShellHomePaths } from './config-directory'; import { CliReplErrors } from './error-codes'; +import type { CSFLELibraryPathResult } from './csfle-library-paths'; import { MongoLogManager, MongoLogWriter, mongoLogId } from 'mongodb-log-writer'; -import { MongocryptdManager } from './mongocryptd-manager'; import MongoshNodeRepl, { MongoshNodeReplOptions, MongoshIOProvider } from './mongosh-repl'; import { setupLoggerAndTelemetry, ToggleableAnalytics } from '@mongosh/logging'; import { MongoshBus, CliUserConfig, CliUserConfigValidator } from '@mongosh/types'; @@ -50,8 +50,8 @@ type AnalyticsOptions = { export type CliReplOptions = { /** The set of parsed command line flags. */ shellCliOptions: CliOptions; - /** The list of executable paths for mongocryptd. */ - mongocryptdSpawnPaths?: string[][], + /** A function for getting the shared library path for CSFLE. */ + getCSFLELibraryPaths?: (bus: MongoshBus) => Promise; /** The stream to read user input from. */ input: Readable; /** The stream to write shell output to. */ @@ -78,7 +78,8 @@ class CliRepl implements MongoshIOProvider { mongoshRepl: MongoshNodeRepl; bus: MongoshBus; cliOptions: CliOptions; - mongocryptdManager: MongocryptdManager; + getCSFLELibraryPaths?: (bus: MongoshBus) => Promise; + cachedCSFLELibraryPath?: Promise; shellHomeDirectory: ShellHomeDirectory; configDirectory: ConfigManager; config: CliUserConfigOnDisk; @@ -113,6 +114,7 @@ class CliRepl implements MongoshIOProvider { enableTelemetry: true }; + this.getCSFLELibraryPaths = options.getCSFLELibraryPaths; this.globalConfigPaths = options.globalConfigPaths ?? []; this.shellHomeDirectory = new ShellHomeDirectory(options.shellHomePaths); this.configDirectory = new ConfigManager( @@ -129,11 +131,6 @@ class CliRepl implements MongoshIOProvider { this.bus.emit('mongosh:update-user', { userId: config.userId, anonymousId: config.telemetryAnonymousId }); }); - this.mongocryptdManager = new MongocryptdManager( - options.mongocryptdSpawnPaths ?? [], - this.shellHomeDirectory, - this.bus); - this.logManager = new MongoLogManager({ directory: this.shellHomeDirectory.localPath('.'), retentionDays: 30, @@ -235,13 +232,26 @@ class CliRepl implements MongoshIOProvider { this.globalConfig = await this.loadGlobalConfigFile(); if (driverOptions.autoEncryption) { + const origExtraOptions = driverOptions.autoEncryption.extraOptions ?? {}; + if (origExtraOptions.csflePath) { + // If a CSFLE path has been specified through 'driverOptions', save it + // for later use. + this.cachedCSFLELibraryPath = Promise.resolve({ + csflePath: origExtraOptions.csflePath + }); + } + const extraOptions = { - ...(driverOptions.autoEncryption.extraOptions ?? {}), - ...(await this.startMongocryptd()) + ...origExtraOptions, + ...await this.getCSFLELibraryOptions() }; driverOptions.autoEncryption = { ...driverOptions.autoEncryption, extraOptions }; } + if (Object.keys(driverOptions.autoEncryption ?? {}).join(',') === 'extraOptions') { + // In this case, autoEncryption opts were only specified for CSFLE library specs + delete driverOptions.autoEncryption; + } const initialServiceProvider = await this.connect(driverUri, driverOptions); const initialized = await this.mongoshRepl.initialize(initialServiceProvider); @@ -601,7 +611,6 @@ class CliRepl implements MongoshIOProvider { flushDuration = Date.now() - flushStart; } } - this.mongocryptdManager.close(); // eslint-disable-next-line chai-friendly/no-unused-expressions this.logWriter?.info('MONGOSH', mongoLogId(1_000_000_045), 'analytics', 'Flushed outstanding data', { flushError, @@ -640,16 +649,12 @@ class CliRepl implements MongoshIOProvider { return this.mongoshRepl.clr(text, style); } - /** Start a mongocryptd instance for automatic FLE. */ - async startMongocryptd(): Promise { - try { - return await this.mongocryptdManager.start(); - } catch (e: any) { - if (e?.code === 'ENOENT') { - throw new MongoshRuntimeError('Could not find a working mongocryptd - ensure your local installation works correctly. See the mongosh log file for additional information. Please also refer to the documentation: https://docs.mongodb.com/manual/reference/security-client-side-encryption-appendix/'); - } - throw e; + /** Get the right CSFLE shared library loading options. */ + async getCSFLELibraryOptions(): Promise { + if (!this.getCSFLELibraryPaths) { + throw new MongoshInternalError('This instance of mongosh is not configured for CSFLE'); } + return (this.cachedCSFLELibraryPath ??= this.getCSFLELibraryPaths(this.bus)); } /** Provide extra information for reporting internal errors */ diff --git a/packages/cli-repl/src/csfle-library-paths.spec.ts b/packages/cli-repl/src/csfle-library-paths.spec.ts new file mode 100644 index 0000000000..ee3c80f169 --- /dev/null +++ b/packages/cli-repl/src/csfle-library-paths.spec.ts @@ -0,0 +1,91 @@ +import { expect } from 'chai'; +import { SHARED_LIBRARY_SUFFIX, getCSFLELibraryPaths } from './csfle-library-paths'; +import csfleLibraryDummy from 'mongodb-csfle-library-dummy'; +import type { MongoshBus } from '@mongosh/types'; +import { useTmpdir } from '../test/repl-helpers'; +import { EventEmitter } from 'events'; +import { promises as fs } from 'fs'; +import path from 'path'; + +describe('getCSFLELibraryPaths', () => { + let bus: MongoshBus; + let events: any[]; + let fakeMongoshExecPath: string; + const tmpdir = useTmpdir(); + const csfleFilename = `mongosh_csfle_v1.${SHARED_LIBRARY_SUFFIX}`; + const expectedVersion = { version: BigInt('0x0001000000000000'), versionStr: 'mongo_csfle_v1-dummy' }; + + beforeEach(async function() { + events = []; + bus = new EventEmitter(); + bus.on('mongosh:csfle-load-found', (ev) => events.push(['mongosh:csfle-load-found', ev])); + bus.on('mongosh:csfle-load-skip', (ev) => events.push(['mongosh:csfle-load-skip', ev])); + fakeMongoshExecPath = path.join(tmpdir.path, 'bin', 'mongosh'); + await fs.mkdir(path.join(tmpdir.path, 'bin'), { recursive: true }); + await fs.mkdir(path.join(tmpdir.path, 'lib'), { recursive: true }); + await fs.mkdir(path.join(tmpdir.path, 'lib64'), { recursive: true }); + await fs.writeFile(fakeMongoshExecPath, '# dummy', { mode: 0o755 }); + }); + + it('will look up a shared library located in /../lib/', async function() { + const csflePath = path.join(tmpdir.path, 'lib', csfleFilename); + await fs.copyFile(csfleLibraryDummy, csflePath); + expect(await getCSFLELibraryPaths(bus, fakeMongoshExecPath)).to.deep.equal({ + csflePath, + expectedVersion + }); + expect(events.slice(1)).to.deep.equal([ + [ 'mongosh:csfle-load-found', { csflePath, expectedVersion } ] + ]); + }); + + it('will look up a shared library located in /../lib64/', async function() { + const csflePath = path.join(tmpdir.path, 'lib64', csfleFilename); + await fs.copyFile(csfleLibraryDummy, csflePath); + expect(await getCSFLELibraryPaths(bus, fakeMongoshExecPath)).to.deep.equal({ + csflePath, + expectedVersion + }); + expect(events).to.deep.equal([ + [ 'mongosh:csfle-load-found', { csflePath, expectedVersion } ] + ]); + }); + + it('will look up a shared library located in /', async function() { + const csflePath = path.join(tmpdir.path, 'bin', csfleFilename); + await fs.copyFile(csfleLibraryDummy, csflePath); + expect(await getCSFLELibraryPaths(bus, fakeMongoshExecPath)).to.deep.equal({ + csflePath, + expectedVersion + }); + expect(events[0][0]).to.equal('mongosh:csfle-load-skip'); + expect(events[0][1].reason).to.match(/ENOENT|LoadLibraryW failed/); + expect(events.slice(2)).to.deep.equal([ + [ 'mongosh:csfle-load-found', { csflePath, expectedVersion } ] + ]); + }); + + it('will reject a shared library if it is not readable', async function() { + if (process.platform === 'win32') { + return this.skip(); + } + const csflePath = path.join(tmpdir.path, 'lib', csfleFilename); + await fs.copyFile(csfleLibraryDummy, csflePath); + await fs.chmod(csflePath, 0o000); + expect(await getCSFLELibraryPaths(bus, fakeMongoshExecPath)).to.deep.equal({}); + expect(events[1][0]).to.equal('mongosh:csfle-load-skip'); + expect(events[1][1].reason).to.include('EACCES'); + }); + + it('will reject a shared library if its permissions are world-writable', async function() { + if (process.platform === 'win32') { + return this.skip(); + } + const csflePath = path.join(tmpdir.path, 'lib', csfleFilename); + await fs.copyFile(csfleLibraryDummy, csflePath); + await fs.chmod(csflePath, 0o777); + expect(await getCSFLELibraryPaths(bus, fakeMongoshExecPath)).to.deep.equal({}); + expect(events[1][0]).to.equal('mongosh:csfle-load-skip'); + expect(events[1][1].reason).to.include('permissions mismatch'); + }); +}); diff --git a/packages/cli-repl/src/csfle-library-paths.ts b/packages/cli-repl/src/csfle-library-paths.ts new file mode 100644 index 0000000000..03a1eea878 --- /dev/null +++ b/packages/cli-repl/src/csfle-library-paths.ts @@ -0,0 +1,100 @@ +import path from 'path'; +import { promises as fs, constants as fsConstants } from 'fs'; +import type { MongoshBus } from '@mongosh/types'; + +export const SHARED_LIBRARY_SUFFIX = + // eslint-disable-next-line no-nested-ternary + process.platform === 'win32' ? 'dll' : + process.platform === 'darwin' ? 'dylib' : 'so'; + +export interface CSFLELibraryPathResult { + csflePath?: string; + expectedVersion?: { version: bigint; versionStr: string }; +} + +/** + * Figure out the possible shared library paths for the CSFLE shared library + * that we are supposed to use. + */ +export async function getCSFLELibraryPaths( + bus: MongoshBus, + pretendProcessExecPathForTesting: string | undefined = undefined): Promise { + const execPath = pretendProcessExecPathForTesting ?? process.execPath; + + let getCSFLESharedLibraryVersion: typeof import('mongodb-csfle-library-version'); + try { + getCSFLESharedLibraryVersion = require('mongodb-csfle-library-version'); + } catch (err) { + getCSFLESharedLibraryVersion = () => ({ version: BigInt(0), versionStr: '' }); + } + + if (execPath === process.argv[1] || pretendProcessExecPathForTesting) { + const bindir = path.dirname(execPath); + const execPathStat = await fs.stat(execPath); + for await (const libraryCandidate of [ + // Locations of the shared library in the deb and rpm packages + path.resolve(bindir, '..', 'lib64', `mongosh_csfle_v1.${SHARED_LIBRARY_SUFFIX}`), + path.resolve(bindir, '..', 'lib', `mongosh_csfle_v1.${SHARED_LIBRARY_SUFFIX}`), + // Location of the shared library in the zip and tgz packages + path.resolve(bindir, `mongosh_csfle_v1.${SHARED_LIBRARY_SUFFIX}`) + ]) { + try { + const permissionsMismatch = await ensureMatchingPermissions(libraryCandidate, execPathStat); + if (permissionsMismatch) { + bus.emit('mongosh:csfle-load-skip', { + csflePath: libraryCandidate, + reason: 'permissions mismatch', + details: permissionsMismatch + }); + continue; + } + + const version = getCSFLESharedLibraryVersion(libraryCandidate); + const result = { + csflePath: libraryCandidate, + expectedVersion: version + }; + bus.emit('mongosh:csfle-load-found', result); + return result; + } catch (err: any) { + bus.emit('mongosh:csfle-load-skip', { + csflePath: libraryCandidate, + reason: err.message + }); + } + } + } else { + bus.emit('mongosh:csfle-load-skip', { + csflePath: '', + reason: 'Skipping CSFLE library searching because this is not a single-executable mongosh' + }); + } + return {}; +} + +// Check whether permissions for a file match what we expect them to be. +// Returns 'null' in case of no mismatch and information that is useful +// for debugging/logging in the mismatch case. +async function ensureMatchingPermissions(filename: string, execPathStat: { uid: number, gid: number }): Promise { + if (process.platform === 'win32') { + // On Windows systems, there are no permissions checks that + // we could reasonably do here. + return null; + } + await fs.access(filename, fsConstants.R_OK); + const stat = await fs.stat(filename); + // On UNIX systems, only load shared libraries if they are coming + // from a user we can consider trusted (current user or the one who owns + // the mongosh binary to begin with) and they are not writable by other + // users. + if (((stat.uid !== execPathStat.uid && stat.uid !== process.getuid()) || + (stat.gid !== execPathStat.gid && stat.gid !== process.getgid()) || + stat.mode & 0o002 /* world-writable */)) { + return { + libraryStat: { uid: stat.uid, gid: stat.gid, mode: stat.mode }, + mongoshStat: { uid: execPathStat.uid, gid: stat.gid }, + currentUser: { uid: process.getuid(), gid: process.getgid() } + }; + } + return null; +} diff --git a/packages/cli-repl/src/index.ts b/packages/cli-repl/src/index.ts index 59c4f96b5d..4f141eb009 100644 --- a/packages/cli-repl/src/index.ts +++ b/packages/cli-repl/src/index.ts @@ -3,7 +3,6 @@ import CliRepl from './cli-repl'; import clr from './clr'; import { getStoragePaths } from './config-directory'; import { MONGOSH_WIKI, TELEMETRY_GREETING_MESSAGE, USAGE } from './constants'; -import { getMongocryptdPaths } from './mongocryptd-manager'; import { runSmokeTests } from './smoke-tests'; import { buildInfo } from './build-info'; @@ -17,7 +16,6 @@ export { CliRepl, parseCliArgs, getStoragePaths, - getMongocryptdPaths, runSmokeTests, buildInfo }; diff --git a/packages/cli-repl/src/mongocryptd-manager.spec.ts b/packages/cli-repl/src/mongocryptd-manager.spec.ts deleted file mode 100644 index 1e12960043..0000000000 --- a/packages/cli-repl/src/mongocryptd-manager.spec.ts +++ /dev/null @@ -1,192 +0,0 @@ -/* eslint-disable chai-friendly/no-unused-expressions */ -import Nanobus from 'nanobus'; -import { promises as fs } from 'fs'; -import path from 'path'; -import { getMongocryptdPaths, MongocryptdManager } from './mongocryptd-manager'; -import type { MongoshBus } from '@mongosh/types'; -import { ShellHomeDirectory } from './config-directory'; -import { startTestServer } from '../../../testing/integration-testing-hooks'; -import { eventually } from '../../../testing/eventually'; -import { expect } from 'chai'; - -describe('getMongocryptdPaths', () => { - it('always includes plain `mongocryptd`', async() => { - expect(await getMongocryptdPaths()).to.deep.include(['mongocryptd']); - }); -}); - -describe('MongocryptdManager', () => { - let basePath: string; - let bus: MongoshBus; - let shellHomeDirectory: ShellHomeDirectory; - let spawnPaths: string[][]; - let manager: MongocryptdManager; - let events: { event: string, data: any }[]; - const makeManager = () => { - manager = new MongocryptdManager(spawnPaths, shellHomeDirectory, bus); - return manager; - }; - - const fakeMongocryptdDir = path.resolve(__dirname, '..', 'test', 'fixtures', 'fake-mongocryptd'); - - beforeEach(() => { - const nanobus = new Nanobus(); - events = []; - nanobus.on('*', (event, data) => events.push({ event: event as string, data })); - bus = nanobus; - - spawnPaths = []; - basePath = path.resolve(__dirname, '..', '..', '..', 'tmp', 'test', `${Date.now()}`, `${Math.random()}`); - shellHomeDirectory = new ShellHomeDirectory({ - shellRoamingDataPath: basePath, - shellLocalDataPath: basePath, - shellRcPath: basePath - }); - }); - afterEach(() => { - manager?.close(); - }); - - it('does a no-op close when not initialized', () => { - expect(makeManager().close().state).to.equal(null); - }); - - for (const otherMongocryptd of ['none', 'missing', 'broken', 'weirdlog', 'broken-after']) { - for (const version of ['4.2', '4.4']) { // This refers to the log format version - for (const variant of ['withunix', 'nounix']) { - // eslint-disable-next-line no-loop-func - it(`spawns a working mongocryptd (${version}, ${variant}, other mongocryptd: ${otherMongocryptd})`, async() => { - spawnPaths = [ - [ - process.execPath, - path.resolve(fakeMongocryptdDir, `working-${version}-${variant}.js`) - ] - ]; - if (otherMongocryptd === 'missing') { - spawnPaths.unshift([ path.resolve(fakeMongocryptdDir, 'nonexistent') ]); - } - if (otherMongocryptd === 'broken') { - spawnPaths.unshift([ process.execPath, path.resolve(fakeMongocryptdDir, 'exit1') ]); - } - if (otherMongocryptd === 'weirdlog') { - spawnPaths.unshift([ process.execPath, path.resolve(fakeMongocryptdDir, 'weirdlog') ]); - } - if (otherMongocryptd === 'broken-after') { - spawnPaths.push([ process.execPath, path.resolve(fakeMongocryptdDir, 'exit1') ]); - } - expect(await makeManager().start()).to.deep.equal({ - mongocryptdURI: variant === 'nounix' ? - 'mongodb://localhost:27020' : - 'mongodb://%2Ftmp%2Fmongocryptd.sock', - mongocryptdBypassSpawn: true - }); - - const tryspawns = events.filter(({ event }) => event === 'mongosh:mongocryptd-tryspawn'); - expect(tryspawns).to.have.lengthOf( - otherMongocryptd === 'none' || otherMongocryptd === 'broken-after' ? 1 : 2); - }); - } - } - } - - it('passes relevant arguments to mongocryptd', async() => { - spawnPaths = [[process.execPath, path.resolve(fakeMongocryptdDir, 'writepidfile.js')]]; - await makeManager().start(); - const pidfile = path.join(manager.path, 'mongocryptd.pid'); - expect(JSON.parse(await fs.readFile(pidfile, 'utf8')).args).to.deep.equal([ - ...spawnPaths[0], - '--idleShutdownTimeoutSecs', '60', - '--pidfilepath', pidfile, - '--port', '0', - ...(process.platform !== 'win32' ? ['--unixSocketPrefix', path.dirname(pidfile)] : []) - ]); - }); - - it('multiple start() calls are no-ops', async() => { - spawnPaths = [[process.execPath, path.resolve(fakeMongocryptdDir, 'writepidfile.js')]]; - const manager = makeManager(); - await manager.start(); - const pid1 = manager.state.proc.pid; - await manager.start(); - expect(manager.state.proc.pid).to.equal(pid1); - }); - - it('handles synchronous throws from child_process.spawn', async() => { - spawnPaths = [['']]; - try { - await makeManager().start(); - expect.fail('missed exception'); - } catch (e: any) { - expect(e.code).to.equal('ERR_INVALID_ARG_VALUE'); - } - }); - - it('throws if no spawn paths are provided at all', async() => { - spawnPaths = []; - try { - await makeManager().start(); - expect.fail('missed exception'); - } catch (e: any) { - expect(e.name).to.equal('MongoshInternalError'); - } - }); - - it('includes stderr in the log if stdout is unparseable', async() => { - spawnPaths = [[process.execPath, path.resolve(fakeMongocryptdDir, 'weirdlog.js')]]; - try { - await makeManager().start(); - expect.fail('missed exception'); - } catch (e: any) { - expect(e.name).to.equal('MongoshInternalError'); - } - const nostdoutErrors = events.filter(({ event, data }) => { - return event === 'mongosh:mongocryptd-error' && data.cause === 'nostdout'; - }); - expect(nostdoutErrors).to.deep.equal([{ - event: 'mongosh:mongocryptd-error', - data: { cause: 'nostdout', stderr: 'Diagnostic message!\n' } - }]); - }); - - it('cleans up previously created, empty directory entries', async() => { - spawnPaths = [[process.execPath, path.resolve(fakeMongocryptdDir, 'writepidfile.js')]]; - - const manager = makeManager(); - await manager.start(); - const pidfile = path.join(manager.path, 'mongocryptd.pid'); - expect(JSON.parse(await fs.readFile(pidfile, 'utf8')).pid).to.be.a('number'); - manager.close(); - - // The file remains after close, but is gone after creating a new one: - await fs.stat(pidfile); - await makeManager().start(); - try { - await fs.stat(pidfile); - expect.fail('missed exception'); - } catch (e: any) { - expect(e.code).to.equal('ENOENT'); - } - }); - - context('with network testing', () => { - const testServer = startTestServer('shared'); - - beforeEach(async() => { - process.env.MONGOSH_TEST_PROXY_TARGET_PORT = await testServer.port(); - }); - afterEach(() => { - delete process.env.MONGOSH_TEST_PROXY_TARGET_PORT; - }); - - it('performs keepalive pings', async() => { - spawnPaths = [[process.execPath, path.resolve(fakeMongocryptdDir, 'withnetworking.js')]]; - const manager = makeManager(); - manager.idleShutdownTimeoutSecs = 1; - await manager.start(); - const pidfile = path.join(manager.path, 'mongocryptd.pid'); - await eventually(async() => { - expect(JSON.parse(await fs.readFile(pidfile, 'utf8')).connections).to.be.greaterThan(1); - }); - }); - }); -}); diff --git a/packages/cli-repl/src/mongocryptd-manager.ts b/packages/cli-repl/src/mongocryptd-manager.ts deleted file mode 100644 index a0a194265c..0000000000 --- a/packages/cli-repl/src/mongocryptd-manager.ts +++ /dev/null @@ -1,335 +0,0 @@ -import { ChildProcess, spawn } from 'child_process'; -import { promises as fs, constants as fsConstants } from 'fs'; -import { isIP } from 'net'; -import path from 'path'; -import readline from 'readline'; -import { Readable, PassThrough } from 'stream'; -import { MongoshInternalError } from '@mongosh/errors'; -import { CliServiceProvider } from '@mongosh/service-provider-server'; -import type { MongoshBus } from '@mongosh/types'; -import { parseAnyLogEntry, LogEntry } from './log-entry'; -import { ShellHomeDirectory } from './config-directory'; - -/** - * Figure out the possible executable paths for the mongocryptd - * binary that we are supposed to use. - */ -export async function getMongocryptdPaths(): Promise { - const bindir = path.dirname(process.execPath); - const result = []; - for await (const mongocryptdCandidate of [ - // Location of mongocryptd-mongosh in the deb and rpm packages - path.resolve(bindir, '..', 'libexec', 'mongocryptd-mongosh'), - // Location of mongocryptd-mongosh in the zip and tgz packages - path.resolve(bindir, 'mongocryptd-mongosh'), - path.resolve(bindir, 'mongocryptd-mongosh.exe') - ]) { - try { - await fs.access(mongocryptdCandidate, fsConstants.X_OK); - result.push([ mongocryptdCandidate ]); - } catch { /* ignore error */ } - } - return [...result, ['mongocryptd']]; -} - -/** - * The relevant information regarding the state of a mongocryptd process. - */ -type MongocryptdState = { - /** The connection string for the current mongocryptd instance. */ - uri: string; - /** The process handle for the current mongocryptd instance. */ - proc: ChildProcess; - /** An interval to prevent the mongocryptd instance from going idle. */ - interval: NodeJS.Timeout; -}; - -/** - * A helper class to manage mongocryptd child processes that we may need to spawn. - */ -export class MongocryptdManager { - spawnPaths: string[][]; - bus: MongoshBus; - path: string; - state: MongocryptdState | null; - idleShutdownTimeoutSecs = 60; - - /** - * @param spawnPaths A list of executables to spawn - * @param shellHomeDirectory A place for storing mongosh-related files - * @param bus A message bus for sharing diagnostic events about mongocryptd lifetimes - */ - constructor(spawnPaths: string[][], shellHomeDirectory: ShellHomeDirectory, bus: MongoshBus) { - this.spawnPaths = spawnPaths; - this.path = shellHomeDirectory.localPath(`mongocryptd-${process.pid}-${(Math.random() * 100000) | 0}`); - this.bus = bus; - this.state = null; - } - - /** - * Start a mongocryptd process and return matching driver options for it. - */ - async start(): Promise<{ mongocryptdURI: string, mongocryptdBypassSpawn: true }> { - if (!this.state) { - [ this.state ] = await Promise.all([ - this._spawn(), - this._cleanupOldMongocryptdDirectories() - ]); - } - - return { - mongocryptdURI: this.state.uri, - mongocryptdBypassSpawn: true - }; - } - - /** - * Stop the managed mongocryptd process, if any. This is kept synchronous - * in order to be usable inside process.on('exit') listeners. - */ - close = (): this => { - process.removeListener('exit', this.close); - if (this.state) { - this.state.proc.kill(); - clearInterval(this.state.interval); - this.state = null; - } - return this; - }; - - /** - * Create an async iterator over the individual log lines in a mongo(crypt)d - * process's stdout, while also forwarding the log events to the bus. - * - * @param stdout Any Readable stream that follows the mongodb logv2 or logv1 formats. - * @param pid The process id, used for logging. - */ - async* createLogEntryIterator(stdout: Readable, pid: number): AsyncIterable { - for await (const line of readline.createInterface({ input: stdout })) { - if (!line.trim()) { - continue; - } - try { - const logEntry = parseAnyLogEntry(line); - this.bus.emit('mongosh:mongocryptd-log', { pid, logEntry }); - yield logEntry; - } catch (error: any) { - this.bus.emit('mongosh:mongocryptd-error', { pid, cause: 'parse', error }); - break; - } - } - } - - /** - * Create a mongocryptd child process. - * - * @param spawnPath The first arguments to pass on the command line. - */ - _spawnMongocryptdProcess(spawnPath: string[]): ChildProcess { - const [ executable, ...args ] = [ - ...spawnPath, - '--idleShutdownTimeoutSecs', String(this.idleShutdownTimeoutSecs), - '--pidfilepath', path.join(this.path, 'mongocryptd.pid'), - '--port', '0', - ...(process.platform !== 'win32' ? ['--unixSocketPrefix', this.path] : []) - ]; - const proc = spawn(executable, args, { - stdio: ['inherit', 'pipe', 'pipe'] - }); - - proc.on('exit', (code, signal) => { - const logEntry = { exit: { code, signal } }; - this.bus.emit('mongosh:mongocryptd-log', { pid: proc.pid, logEntry }); - }); - return proc; - } - - /** - * Try to spawn mongocryptd using the paths passed to the constructor, - * and parse the process's log to understand on what path/port it - * is listening on. - */ - async _spawn(): Promise { - if (this.spawnPaths.length === 0) { - throw new MongoshInternalError('No mongocryptd spawn path given'); - } - - await fs.mkdir(this.path, { recursive: true, mode: 0o700 }); - process.on('exit', this.close); - - let proc: ChildProcess | undefined = undefined; - let uri = ''; - let lastError: Error | undefined = undefined; - for (const spawnPath of this.spawnPaths) { - this.bus.emit('mongosh:mongocryptd-tryspawn', { spawnPath, path: this.path }); - try { - proc = this._spawnMongocryptdProcess(spawnPath); - } catch (error: any) { - // Spawn can fail both synchronously and asynchronously. - // We log the error either way and just try the next one. - lastError = error; - this.bus.emit('mongosh:mongocryptd-error', { cause: 'spawn', error }); - continue; - } - // eslint-disable-next-line no-loop-func - proc.on('error', (error) => { - lastError = error; - this.bus.emit('mongosh:mongocryptd-error', { cause: 'spawn', error }); - }); - let stderr = ''; - // eslint-disable-next-line chai-friendly/no-unused-expressions - proc.stderr?.setEncoding('utf8').on('data', chunk => { stderr += chunk; }); - - const { pid } = proc; - - // Get an object-mode Readable stream of parsed log events. - const logEntryStream = Readable.from(this.createLogEntryIterator(proc.stdout as Readable, pid)); - const { socket, port } = await filterLogStreamForSocketAndPort(logEntryStream); - if (!socket && port === -1) { - // This likely means that stdout ended before we could get a path/port - // from it, most likely because spawning itself failed. - proc.kill(); - this.bus.emit('mongosh:mongocryptd-error', { cause: 'nostdout', stderr }); - continue; - } - - // Keep the stream going even when not being consumed in order to get - // the log events on the bus. - logEntryStream.resume(); - - // No UNIX socket means we're on Windows, where we have to use networking. - uri = !socket ? `mongodb://localhost:${port}` : `mongodb://${encodeURIComponent(socket)}`; - break; - } - if (!proc || !uri) { - throw lastError ?? new MongoshInternalError('Could not successfully spawn mongocryptd'); - } - - const interval = setInterval(async() => { - // Use half the idle timeout of the process for regular keepalive pings. - let sp; - try { - sp = await CliServiceProvider.connect(uri, { - serverSelectionTimeoutMS: this.idleShutdownTimeoutSecs * 1000 - }, {}, this.bus); - await sp.runCommandWithCheck('admin', { isMaster: 1 }); - } catch (error: any) { - this.bus.emit('mongosh:mongocryptd-error', { cause: 'ping', error }); - } finally { - if (sp !== undefined) { - await sp.close(true); - } - } - }, this.idleShutdownTimeoutSecs * 1000 / 2); - interval.unref(); - proc.unref(); - - return { uri, proc, interval }; - } - - /** - * Run when starting a new mongocryptd process. Clean up old, unused - * directories that were created by previous operations like this. - */ - async _cleanupOldMongocryptdDirectories(): Promise { - try { - const toBeRemoved = []; - for await (const dirent of await fs.opendir(path.resolve(this.path, '..'))) { - // A directory with an empty mongocryptd.pid indicates that the - // mongocryptd process in question has terminated. - if (dirent.name.startsWith('mongocryptd-') && dirent.isDirectory()) { - let size = 0; - try { - size = (await fs.stat(path.join(dirent.name, 'mongocryptd.pid'))).size; - } catch (err: any) { - if (err?.code !== 'ENOENT') { - throw err; - } - } - if (size === 0) { - toBeRemoved.push(path.join(this.path, '..', dirent.name)); - } - } - } - for (const dir of toBeRemoved) { - if (path.resolve(dir) !== path.resolve(this.path)) { - await fs.rmdir(dir, { recursive: true }); - } - } - } catch (error: any) { - this.bus.emit('mongosh:mongocryptd-error', { cause: 'cleanup', error }); - } - } -} - -/** - * Look at a log entry to figure out whether we are listening on a - * UNIX domain socket. - * - * @param logEntry A parsed mongodb log line. - * @returns The domain socket in question, or an empty string if the log line did not match. - */ -function getSocketFromLogEntry(logEntry: LogEntry): string { - let match; - // Log message id 23015 has the format - // { t: , s: 'I', c: 'NETWORK', id: 23016, ctx: 'listener', msg: '...', attr: { address: '/tmp/q/mongocryptd.sock' } } - if (logEntry.id === 23015) { - if (!isIP(logEntry.attr.address)) { - return logEntry.attr.address; - } - } - // Or, 4.2-style: I NETWORK [listener] Listening on /tmp/mongocryptd.sock - if (logEntry.id === undefined && (match = logEntry.message.match(/^Listening on (?.+)$/i))) { - const { addr } = match.groups as any; - if (!isIP(addr)) { - return addr; - } - } - return ''; -} - -/** - * Look at a log entry to figure out whether we are listening on a - * TCP port. - * - * @param logEntry A parsed mongodb log line. - * @returns The port in question, or an -1 if the log line did not match. - */ -function getPortFromLogEntry(logEntry: LogEntry): number { - let match; - // Log message id 23016 has the format - // { t: , s: 'I', c: 'NETWORK', id: 23016, ctx: 'listener', msg: '...', attr: { port: 27020 } } - if (logEntry.id === 23016) { - return logEntry.attr.port; - } - // Or, 4.2-style: I NETWORK [listener] waiting for connections on port 27020 - if (logEntry.id === undefined && (match = logEntry.message.match(/^waiting for connections on port (?\d+)$/i))) { - return +(match.groups?.port ?? '0'); - } - return -1; -} - -/** - * Go through a stream of parsed log entry objects and return the port/path - * data once found. - * - * @input A mongodb logv2/logv1 stream. - * @returns The (UNIX domain socket and) port that the target process is listening on. - */ -async function filterLogStreamForSocketAndPort(input: Readable): Promise<{ port: number, socket: string }> { - let port = -1; - let socket = ''; - const inputDuplicate: AsyncIterable = input.pipe(new PassThrough({ objectMode: true })); - - for await (const logEntry of inputDuplicate) { - if (logEntry.component !== 'NETWORK' || logEntry.context !== 'listener') { - continue; // We are only interested in listening network events - } - socket ||= getSocketFromLogEntry(logEntry); - port = getPortFromLogEntry(logEntry); - if (port !== -1) { - break; - } - } - return { socket, port }; -} diff --git a/packages/cli-repl/src/mongosh-repl.ts b/packages/cli-repl/src/mongosh-repl.ts index ad0c978f93..470d8aebdc 100644 --- a/packages/cli-repl/src/mongosh-repl.ts +++ b/packages/cli-repl/src/mongosh-repl.ts @@ -37,7 +37,7 @@ export type MongoshIOProvider = Omit, 'validateCon getHistoryFilePath(): string; exit(code?: number): Promise; readFileUTF8(filename: string): Promise<{ contents: string, absolutePath: string }>; - startMongocryptd(): Promise; + getCSFLELibraryOptions(): Promise; bugReportErrorMessageInfo?(): string | undefined; }; @@ -855,12 +855,10 @@ class MongoshNodeRepl implements EvaluationListener { } /** - * Start a mongocryptd instance that is required for automatic FLE. - * - * @returns Information about how to connect to the started mongocryptd instance. + * Get the right CSFLE shared library loading options. */ - async startMongocryptd(): Promise { - return this.ioProvider.startMongocryptd(); + async getCSFLELibraryOptions(): Promise { + return this.ioProvider.getCSFLELibraryOptions(); } /** diff --git a/packages/cli-repl/src/run.ts b/packages/cli-repl/src/run.ts index 01eef78394..30309b95f7 100644 --- a/packages/cli-repl/src/run.ts +++ b/packages/cli-repl/src/run.ts @@ -1,5 +1,6 @@ -import { CliRepl, parseCliArgs, getMongocryptdPaths, runSmokeTests, USAGE, buildInfo } from './index'; +import { CliRepl, parseCliArgs, runSmokeTests, USAGE, buildInfo } from './index'; import { getStoragePaths, getGlobalConfigPaths } from './config-directory'; +import { getCSFLELibraryPaths } from './csfle-library-paths'; import { getTlsCertificateSelector } from './tls-certificate-selector'; import { redactURICredentials } from '@mongosh/history'; import { generateConnectionInfoFromCliArgs } from '@mongosh/arg-parser'; @@ -39,22 +40,21 @@ import stream from 'stream'; console.log(JSON.stringify(buildInfo(), null, ' ')); } else if (options.smokeTests) { const smokeTestServer = process.env.MONGOSH_SMOKE_TEST_SERVER; + const csfleLibraryOpts = options.csfleLibraryPath ? [ + `--csfleLibraryPath=${options.csfleLibraryPath}` + ] : []; if (process.execPath === process.argv[1]) { // This is the compiled binary. Use only the path to it. - await runSmokeTests(smokeTestServer, process.execPath); + await runSmokeTests(smokeTestServer, process.execPath, ...csfleLibraryOpts); } else { // This is not the compiled binary. Use node + this script. - await runSmokeTests(smokeTestServer, process.execPath, process.argv[1]); + await runSmokeTests(smokeTestServer, process.execPath, process.argv[1], ...csfleLibraryOpts); } } else { - let mongocryptdSpawnPaths = [['mongocryptd']]; if (process.execPath === process.argv[1]) { // Remove the built-in Node.js listener that prints e.g. deprecation // warnings in single-binary release mode. process.removeAllListeners('warning'); - // Look for mongocryptd in the locations where our packaging would - // have put it. - mongocryptdSpawnPaths = await getMongocryptdPaths(); } // This is for testing under coverage, see the the comment in the tests @@ -98,7 +98,7 @@ import stream from 'stream'; shellCliOptions: { ...options, }, - mongocryptdSpawnPaths, + getCSFLELibraryPaths, input: process.stdin, output: process.stdout, onExit: process.exit, diff --git a/packages/cli-repl/src/smoke-tests-fle.ts b/packages/cli-repl/src/smoke-tests-fle.ts index 6339e076b7..6881cc0a54 100644 --- a/packages/cli-repl/src/smoke-tests-fle.ts +++ b/packages/cli-repl/src/smoke-tests-fle.ts @@ -1,6 +1,6 @@ /** - * Test script that verifies that automatic encryption using mongocryptd - * works when using the Mongo() object to construct the encryption key and + * Test script that verifies that automatic encryption using the CSFLE shared + * library works when using the Mongo() object to construct the encryption key and * to create an auto-encryption-aware connection. */ @@ -12,9 +12,9 @@ const assert = function(value, message) { process.exit(1); } }; -// There is no mongocryptd binary for darwin-x64 or rhel80-s390x yet. -if ((os.platform() === 'darwin' && os.arch() === 'arm64') || - (os.platform() === 'linux' && os.arch() === 's390x' && fs.readFileSync('/etc/os-release', 'utf8').includes('VERSION_ID="8'))) { +if (process.platform === 'linux' && process.arch === 's390x') { + // There is no CSFLE shared library binary for the rhel72 s390x that we test on. + // We will address this in MONGOSH-862. print('Test skipped') process.exit(0); } diff --git a/packages/cli-repl/src/smoke-tests.spec.ts b/packages/cli-repl/src/smoke-tests.spec.ts index 605feca537..55081fd956 100644 --- a/packages/cli-repl/src/smoke-tests.spec.ts +++ b/packages/cli-repl/src/smoke-tests.spec.ts @@ -1,17 +1,21 @@ import { runSmokeTests } from './'; import path from 'path'; -import { startTestServer, useBinaryPath } from '../../../testing/integration-testing-hooks'; +import { startTestServer, downloadCurrentCsfleSharedLibrary } from '../../../testing/integration-testing-hooks'; describe('smoke tests', () => { const testServer = startTestServer('shared'); - useBinaryPath(testServer); // Get mongocryptd in the PATH for this test + let csfleLibrary: string; + + before(async() => { + csfleLibrary = await downloadCurrentCsfleSharedLibrary(); + }); it('self-test passes', async() => { // Use ts-node to run the .ts files directly so nyc can pick them up for // coverage. await runSmokeTests( await testServer.connectionString(), - process.execPath, '-r', 'ts-node/register', path.resolve(__dirname, 'run.ts') + process.execPath, '-r', 'ts-node/register', path.resolve(__dirname, 'run.ts'), '--csfleLibraryPath', csfleLibrary ); }); }); diff --git a/packages/cli-repl/test/e2e-fle.spec.ts b/packages/cli-repl/test/e2e-fle.spec.ts index c3144ba1ab..f0322540c1 100644 --- a/packages/cli-repl/test/e2e-fle.spec.ts +++ b/packages/cli-repl/test/e2e-fle.spec.ts @@ -2,7 +2,12 @@ import { expect } from 'chai'; import { MongoClient } from 'mongodb'; import { TestShell } from './test-shell'; import { eventually } from '../../../testing/eventually'; -import { startTestServer, useBinaryPath, skipIfServerVersion, skipIfCommunityServer } from '../../../testing/integration-testing-hooks'; +import { + startTestServer, + skipIfServerVersion, + skipIfCommunityServer, + downloadCurrentCsfleSharedLibrary +} from '../../../testing/integration-testing-hooks'; import { makeFakeHTTPServer, fakeAWSHandlers } from '../../../testing/fake-kms'; import { once } from 'events'; import { serialize } from 'v8'; @@ -13,17 +18,25 @@ describe('FLE tests', () => { const testServer = startTestServer('not-shared', '--replicaset', '--nodes', '1'); skipIfServerVersion(testServer, '< 4.2'); // FLE only available on 4.2+ skipIfCommunityServer(testServer); // FLE is enterprise-only - useBinaryPath(testServer); // Get mongocryptd in the PATH for this test let kmsServer: ReturnType; let dbname: string; + let csfleLibrary: string; + + before(async function() { + if (process.platform === 'linux' && process.arch === 's390x') { + return this.skip(); + // There is no CSFLE shared library binary for the rhel72 s390x that we test on. + // We will address this in MONGOSH-862. + } - before(async() => { kmsServer = makeFakeHTTPServer(fakeAWSHandlers); kmsServer.listen(0); await once(kmsServer, 'listening'); + csfleLibrary = await downloadCurrentCsfleSharedLibrary(); }); after(() => { - kmsServer.close(); + // eslint-disable-next-line chai-friendly/no-unused-expressions + kmsServer?.close(); }); beforeEach(() => { kmsServer.requests = []; @@ -50,6 +63,7 @@ describe('FLE tests', () => { async function makeTestShell(): Promise { return TestShell.start({ args: [ + `--csfleLibraryPath=${csfleLibrary}`, `--awsAccessKeyId=${accessKeyId}`, `--awsSecretAccessKey=${secretAccessKey}`, `--keyVaultNamespace=${dbname}.keyVault`, @@ -140,7 +154,7 @@ describe('FLE tests', () => { it('works when a schemaMap option has been passed', async() => { const shell = TestShell.start({ - args: ['--nodb'] + args: ['--nodb', `--csfleLibraryPath=${csfleLibrary}`] }); await shell.waitForPrompt(); await shell.executeLine('local = { key: BinData(0, "kh4Gv2N8qopZQMQYMEtww/AkPsIrXNmEMxTrs3tUoTQZbZu4msdRUaR8U5fXD7A7QXYHcEvuu4WctJLoT+NvvV3eeIg3MD+K8H9SR794m/safgRHdIfy6PD+rFpvmFbY") }'); @@ -169,7 +183,7 @@ describe('FLE tests', () => { it('skips encryption when a bypassQueryAnalysis option has been passed', async() => { const shell = TestShell.start({ - args: ['--nodb'] + args: ['--nodb', `--csfleLibraryPath=${csfleLibrary}`] }); const uri = JSON.stringify(await testServer.connectionString()); @@ -240,7 +254,7 @@ describe('FLE tests', () => { skipIfServerVersion(testServer, '< 6.0'); // FLE2 only available on 6.0+ it('drops fle2 collection with all helper collections when encryptedFields options are in listCollections', async() => { - const shell = TestShell.start({ args: ['--nodb'] }); + const shell = TestShell.start({ args: ['--nodb', `--csfleLibraryPath=${csfleLibrary}`] }); const uri = JSON.stringify(await testServer.connectionString()); await shell.waitForPrompt(); @@ -303,7 +317,7 @@ describe('FLE tests', () => { it('performs KeyVault data key management as expected', async() => { const shell = TestShell.start({ - args: [await testServer.connectionString()] + args: [await testServer.connectionString(), `--csfleLibraryPath=${csfleLibrary}`] }); await shell.waitForPrompt(); // Wrapper for executeLine that expects single-line output diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/exit1.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/exit1.js deleted file mode 100644 index fb011534f4..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/exit1.js +++ /dev/null @@ -1,3 +0,0 @@ -/* eslint-disable */ -'use strict'; -process.exitCode = 1; diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/weirdlog.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/weirdlog.js deleted file mode 100644 index bbccfcdde9..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/weirdlog.js +++ /dev/null @@ -1,4 +0,0 @@ -/* eslint-disable */ -'use strict'; -console.error('Diagnostic message!'); -console.log('Hello world!'); // Hard-to-parse log message. diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/withnetworking.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/withnetworking.js deleted file mode 100644 index 5232089870..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/withnetworking.js +++ /dev/null @@ -1,16 +0,0 @@ -/* eslint-disable */ -'use strict'; -const net = require('net'); -const fs = require('fs'); -const pidfile = process.argv[process.argv.indexOf('--pidfilepath') + 1]; - -let connections = 0; -const server = net.createServer(socket => { - connections++; - fs.writeFileSync(pidfile, JSON.stringify({ pid: process.pid, connections })); - const proxied = net.connect(+process.env.MONGOSH_TEST_PROXY_TARGET_PORT); - socket.pipe(proxied).pipe(socket); -}); -server.listen(0, () => { - console.log(`{"c":"NETWORK","id":23016,"ctx":"listener","attr":{"port":${server.address().port}}}`); -}); diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.2-nounix.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.2-nounix.js deleted file mode 100644 index 21424ec029..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.2-nounix.js +++ /dev/null @@ -1,7 +0,0 @@ -/* eslint-disable */ -'use strict'; -console.log(` -2021-03-29T19:35:35.244+0200 I CONTROL [initandlisten] options: {} -2021-03-29T18:48:10.769+0200 I NETWORK [listener] Listening on 127.0.0.1 -2021-03-29T18:48:10.769+0200 I NETWORK [listener] waiting for connections on port 27020 -`); diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.2-withunix.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.2-withunix.js deleted file mode 100644 index 24112969b9..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.2-withunix.js +++ /dev/null @@ -1,8 +0,0 @@ -/* eslint-disable */ -'use strict'; -console.log(` -2021-03-29T19:35:35.244+0200 I CONTROL [initandlisten] options: {} -2021-03-29T18:48:10.769+0200 I NETWORK [listener] Listening on /tmp/mongocryptd.sock -2021-03-29T18:48:10.769+0200 I NETWORK [listener] Listening on 127.0.0.1 -2021-03-29T18:48:10.769+0200 I NETWORK [listener] waiting for connections on port 27020 -`); diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.4-nounix.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.4-nounix.js deleted file mode 100644 index d59595e296..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.4-nounix.js +++ /dev/null @@ -1,7 +0,0 @@ -/* eslint-disable */ -'use strict'; -console.log(` -{"t":{"$date":"2021-03-29T19:34:57.800+02:00"},"s":"I", "c":"CONTROL", "id":21951, "ctx":"initandlisten","msg":"Options set by command line","attr":{"options":{}}} -{"t":{"$date":"2021-03-29T18:48:32.518+02:00"},"s":"I", "c":"NETWORK", "id":23015, "ctx":"listener","msg":"Listening on","attr":{"address":"127.0.0.1"}} -{"t":{"$date":"2021-03-29T18:48:32.518+02:00"},"s":"I", "c":"NETWORK", "id":23016, "ctx":"listener","msg":"Waiting for connections","attr":{"port":27020,"ssl":"off"}} -`); diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.4-withunix.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.4-withunix.js deleted file mode 100644 index 8be68c3ec7..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/working-4.4-withunix.js +++ /dev/null @@ -1,8 +0,0 @@ -/* eslint-disable */ -'use strict'; -console.log(` -{"t":{"$date":"2021-03-29T19:34:57.800+02:00"},"s":"I", "c":"CONTROL", "id":21951, "ctx":"initandlisten","msg":"Options set by command line","attr":{"options":{}}} -{"t":{"$date":"2021-03-29T18:48:32.518+02:00"},"s":"I", "c":"NETWORK", "id":23015, "ctx":"listener","msg":"Listening on","attr":{"address":"/tmp/mongocryptd.sock"}} -{"t":{"$date":"2021-03-29T18:48:32.518+02:00"},"s":"I", "c":"NETWORK", "id":23015, "ctx":"listener","msg":"Listening on","attr":{"address":"127.0.0.1"}} -{"t":{"$date":"2021-03-29T18:48:32.518+02:00"},"s":"I", "c":"NETWORK", "id":23016, "ctx":"listener","msg":"Waiting for connections","attr":{"port":27020,"ssl":"off"}} -`); diff --git a/packages/cli-repl/test/fixtures/fake-mongocryptd/writepidfile.js b/packages/cli-repl/test/fixtures/fake-mongocryptd/writepidfile.js deleted file mode 100644 index a80bbddd2e..0000000000 --- a/packages/cli-repl/test/fixtures/fake-mongocryptd/writepidfile.js +++ /dev/null @@ -1,12 +0,0 @@ -/* eslint-disable */ -'use strict'; -const fs = require('fs'); -const pidfile = process.argv[process.argv.indexOf('--pidfilepath') + 1]; - -fs.writeFileSync(pidfile, JSON.stringify({ - pid: process.pid, - args: process.argv -})); -console.log('{"t":{"$date":"2021-03-29T18:48:32.518+02:00"},"s":"I","c":"NETWORK","id":23016,"ctx":"listener","msg":"Waiting for connections","attr":{"port":27020,"ssl":"off"}}'); - -setInterval(() => {}, 1000); diff --git a/packages/logging/src/setup-logger-and-telemetry.spec.ts b/packages/logging/src/setup-logger-and-telemetry.spec.ts index be7174f98f..a7168dd28f 100644 --- a/packages/logging/src/setup-logger-and-telemetry.spec.ts +++ b/packages/logging/src/setup-logger-and-telemetry.spec.ts @@ -68,9 +68,8 @@ describe('setupLoggerAndTelemetry', () => { bus.emit('mongosh:eval-cli-script'); bus.emit('mongosh:globalconfig-load', { filename: '/etc/mongosh.conf', found: true }); - bus.emit('mongosh:mongocryptd-tryspawn', { spawnPath: ['mongocryptd'], path: 'path' }); - bus.emit('mongosh:mongocryptd-error', { cause: 'something', error: new Error('mongocryptd error!'), stderr: 'stderr', pid: 12345 }); - bus.emit('mongosh:mongocryptd-log', { pid: 12345, logEntry: {} }); + bus.emit('mongosh:csfle-load-skip', { csflePath: 'path', reason: 'reason' }); + bus.emit('mongosh:csfle-load-found', { csflePath: 'path', expectedVersion: { versionStr: 'someversion' } }); bus.emit('mongosh-snippets:loaded', { installdir: '/' }); bus.emit('mongosh-snippets:npm-lookup', { existingVersion: 'v1.2.3' }); @@ -155,12 +154,10 @@ describe('setupLoggerAndTelemetry', () => { expect(logOutput[i++].msg).to.equal('Evaluating script passed on the command line'); expect(logOutput[i].msg).to.equal('Loading global configuration file'); expect(logOutput[i++].attr.filename).to.equal('/etc/mongosh.conf'); - expect(logOutput[i].msg).to.equal('Trying to spawn mongocryptd'); - expect(logOutput[i++].attr).to.deep.equal({ spawnPath: ['mongocryptd'], path: 'path' }); - expect(logOutput[i].msg).to.equal('Error running mongocryptd'); - expect(logOutput[i++].attr).to.deep.equal({ cause: 'something', error: 'mongocryptd error!', stderr: 'stderr', pid: 12345 }); - expect(logOutput[i].msg).to.equal('mongocryptd log message'); - expect(logOutput[i++].attr).to.deep.equal({ pid: 12345, logEntry: {} }); + expect(logOutput[i].msg).to.equal('Skipping shared library candidate'); + expect(logOutput[i++].attr).to.deep.equal({ csflePath: 'path', reason: 'reason' }); + expect(logOutput[i].msg).to.equal('Accepted shared library candidate'); + expect(logOutput[i++].attr).to.deep.equal({ csflePath: 'path', expectedVersion: 'someversion' }); expect(logOutput[i].msg).to.equal('Loaded snippets'); expect(logOutput[i++].attr).to.deep.equal({ installdir: '/' }); expect(logOutput[i].msg).to.equal('Performing npm lookup'); diff --git a/packages/logging/src/setup-logger-and-telemetry.ts b/packages/logging/src/setup-logger-and-telemetry.ts index 881d3b0dc6..27d87d6dde 100644 --- a/packages/logging/src/setup-logger-and-telemetry.ts +++ b/packages/logging/src/setup-logger-and-telemetry.ts @@ -13,9 +13,8 @@ import type { StartLoadingCliScriptsEvent, StartMongoshReplEvent, GlobalConfigFileLoadEvent, - MongocryptdTrySpawnEvent, - MongocryptdLogEvent, - MongocryptdErrorEvent, + CSFLELibrarySkipEvent, + CSFLELibraryFoundEvent, SnippetsCommandEvent, SnippetsErrorEvent, SnippetsFetchIndexErrorEvent, @@ -273,21 +272,17 @@ export function setupLoggerAndTelemetry( }); }); - bus.on('mongosh:mongocryptd-tryspawn', function(ev: MongocryptdTrySpawnEvent) { - log.info('MONGOCRYPTD', mongoLogId(1_000_000_016), 'mongocryptd', 'Trying to spawn mongocryptd', ev); + bus.on('mongosh:csfle-load-skip', function(ev: CSFLELibrarySkipEvent) { + log.info('CSFLE', mongoLogId(1_000_000_050), 'csfle', 'Skipping shared library candidate', ev); }); - bus.on('mongosh:mongocryptd-error', function(ev: MongocryptdErrorEvent) { - log.warn('MONGOCRYPTD', mongoLogId(1_000_000_017), 'mongocryptd', 'Error running mongocryptd', { - ...ev, - error: ev.error?.message + bus.on('mongosh:csfle-load-found', function(ev: CSFLELibraryFoundEvent) { + log.warn('CSFLE', mongoLogId(1_000_000_051), 'csfle', 'Accepted shared library candidate', { + csflePath: ev.csflePath, + expectedVersion: ev.expectedVersion.versionStr }); }); - bus.on('mongosh:mongocryptd-log', function(ev: MongocryptdLogEvent) { - log.info('MONGOCRYPTD', mongoLogId(1_000_000_018), 'mongocryptd', 'mongocryptd log message', ev); - }); - bus.on('mongosh-snippets:loaded', function(ev: SnippetsLoadedEvent) { log.info('MONGOSH-SNIPPETS', mongoLogId(1_000_000_019), 'snippets', 'Loaded snippets', ev); }); diff --git a/packages/node-runtime-worker-thread/src/child-process-evaluation-listener.ts b/packages/node-runtime-worker-thread/src/child-process-evaluation-listener.ts index b9eb373614..b13e178236 100644 --- a/packages/node-runtime-worker-thread/src/child-process-evaluation-listener.ts +++ b/packages/node-runtime-worker-thread/src/child-process-evaluation-listener.ts @@ -4,7 +4,7 @@ import type { WorkerRuntime } from './index'; import { RuntimeEvaluationListener } from '@mongosh/browser-runtime-core'; export class ChildProcessEvaluationListener { - exposedListener: Exposed>>; + exposedListener: Exposed>>; constructor(workerRuntime: WorkerRuntime, childProcess: ChildProcess) { this.exposedListener = exposeAll( diff --git a/packages/service-provider-core/package-lock.json b/packages/service-provider-core/package-lock.json index 40e9620046..7688cb7c45 100644 --- a/packages/service-provider-core/package-lock.json +++ b/packages/service-provider-core/package-lock.json @@ -17,7 +17,7 @@ "node": ">=12.4.0" }, "optionalDependencies": { - "mongodb-client-encryption": "^2.0.0" + "mongodb-client-encryption": "^2.2.0-alpha.0" } }, "node_modules/@types/node": { @@ -405,9 +405,9 @@ } }, "node_modules/mongodb-client-encryption": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-2.0.0.tgz", - "integrity": "sha512-NMawTgf4h5E/yLqse5MUj7fjLYFnBnPcP0ohsDc8qaK3Wpi2A27sjkJmclm0LJ7X8duaZrpuQChTLk1M8wtqxQ==", + "version": "2.2.0-alpha.0", + "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-2.2.0-alpha.0.tgz", + "integrity": "sha512-2JnWaYfEB3w84rJw3REIchyfgxMj7ZqWqkk0x9ZV3UWZ85sPcG4WMNyNm+ngys4W4omqB9PBxN4U9IqCset36Q==", "hasInstallScript": true, "optional": true, "dependencies": { @@ -1153,9 +1153,9 @@ } }, "mongodb-client-encryption": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-2.0.0.tgz", - "integrity": "sha512-NMawTgf4h5E/yLqse5MUj7fjLYFnBnPcP0ohsDc8qaK3Wpi2A27sjkJmclm0LJ7X8duaZrpuQChTLk1M8wtqxQ==", + "version": "2.2.0-alpha.0", + "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-2.2.0-alpha.0.tgz", + "integrity": "sha512-2JnWaYfEB3w84rJw3REIchyfgxMj7ZqWqkk0x9ZV3UWZ85sPcG4WMNyNm+ngys4W4omqB9PBxN4U9IqCset36Q==", "optional": true, "requires": { "bindings": "^1.5.0", diff --git a/packages/service-provider-core/package.json b/packages/service-provider-core/package.json index 1e1c1d80ba..f63abf8029 100644 --- a/packages/service-provider-core/package.json +++ b/packages/service-provider-core/package.json @@ -39,7 +39,7 @@ "mongodb-build-info": "^1.2.0" }, "optionalDependencies": { - "mongodb-client-encryption": "^2.0.0" + "mongodb-client-encryption": "^2.2.0-alpha.0" }, "dependency-check": { "entries": [ diff --git a/packages/service-provider-server/src/cli-service-provider.ts b/packages/service-provider-server/src/cli-service-provider.ts index c09a2269cd..e2e0cff8a9 100644 --- a/packages/service-provider-server/src/cli-service-provider.ts +++ b/packages/service-provider-server/src/cli-service-provider.ts @@ -213,6 +213,35 @@ class CliServiceProvider extends ServiceProviderCore implements ServiceProvider this.dbcache = new WeakMap(); try { this.fle = require('mongodb-client-encryption'); + + // Monkey-patch to work around missing NODE-4242 + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const origExtension = this.fle.extension; + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + this.fle.extension = (mongodb) => { + const exports = origExtension(mongodb); + const OrigAutoEncrypter = exports.AutoEncrypter; + exports.AutoEncrypter = class AutoEncrypter extends OrigAutoEncrypter { + _bypassQueryAnalysis?: boolean; + + constructor(client: any, options: any) { + super(client, options); + if (options?.bypassQueryAnalysis) { + this._bypassQueryAnalysis = true; + } + } + + init(callback: any) { + if (this._bypassQueryAnalysis) { + return callback(); + } + super.init(callback); + } + }; + return exports; + }; } catch { /* not empty */ } } diff --git a/packages/shell-api/src/mongo.ts b/packages/shell-api/src/mongo.ts index b1ef02a358..1b3e14b54b 100644 --- a/packages/shell-api/src/mongo.ts +++ b/packages/shell-api/src/mongo.ts @@ -192,7 +192,7 @@ export default class Mongo extends ShellApiClass { } else if (driverOptions.autoEncryption) { driverOptions.autoEncryption.extraOptions = { ...driverOptions.autoEncryption.extraOptions, - ...await this._instanceState.evaluationListener?.startMongocryptd?.() + ...await this._instanceState.evaluationListener?.getCSFLELibraryOptions?.() }; } const parentProvider = this._instanceState.initialServiceProvider; diff --git a/packages/shell-api/src/shell-instance-state.ts b/packages/shell-api/src/shell-instance-state.ts index 7a2d7d2f13..588cd0876d 100644 --- a/packages/shell-api/src/shell-instance-state.ts +++ b/packages/shell-api/src/shell-instance-state.ts @@ -98,10 +98,10 @@ export interface EvaluationListener extends Partial Promise; + getCSFLELibraryOptions?: () => Promise; } /** diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 81ea094c7c..2482d2b349 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -77,16 +77,15 @@ export interface GlobalConfigFileLoadEvent { found: boolean; } -export interface MongocryptdTrySpawnEvent { - spawnPath: string[]; - path: string; +export interface CSFLELibrarySkipEvent { + csflePath: string; + reason: string; + details?: any; } -export interface MongocryptdErrorEvent { - cause: string; - error?: Error; - stderr?: string; - pid?: number; +export interface CSFLELibraryFoundEvent { + csflePath: string; + expectedVersion: { versionStr: string }; } export interface MongocryptdLogEvent { @@ -262,17 +261,13 @@ export interface MongoshBusEventsMap extends ConnectEventMap { */ 'mongosh:eval-interrupted': () => void; /** - * Signals the start of trying to spawn a `mongocryptd` process. + * Signals that a potential CSFLE library search path was skipped. */ - 'mongosh:mongocryptd-tryspawn': (ev: MongocryptdTrySpawnEvent) => void; + 'mongosh:csfle-load-skip': (ev: CSFLELibrarySkipEvent) => void; /** - * Signals an error while interfacing with a `mongocryptd` process. + * Signals that a potential CSFLE library search path was accepted. */ - 'mongosh:mongocryptd-error': (ev: MongocryptdErrorEvent) => void; - /** - * Signals an event to be logged for a `mongocryptd` process. - */ - 'mongosh:mongocryptd-log': (ev: MongocryptdLogEvent) => void; + 'mongosh:csfle-load-found': (ev: CSFLELibraryFoundEvent) => void; /** * Signals that the CLI REPL's `close` method has completed. * _ONLY AVAILABLE FOR TESTING._ diff --git a/packaging/LICENSE-mongocryptd b/packaging/LICENSE-csfle similarity index 74% rename from packaging/LICENSE-mongocryptd rename to packaging/LICENSE-csfle index 8d34d456a9..5dba125ea2 100644 --- a/packaging/LICENSE-mongocryptd +++ b/packaging/LICENSE-csfle @@ -1,7 +1,6 @@ Customer Agreement By agreeing to an Order Form that references this Customer Agreement (this “Agreement”), or by downloading our Software for a free trial, you agree to this Agreement. If you represent an organization, you represent and warrant that you have the authority to agree to this Agreement on behalf of your organization. - 1. Definitions. The following terms have the following meanings: “Affiliate” means an organization that controls, is controlled by, or is under common control with, a party, where “control” means direct or indirect ownership of more than 50% of the voting interests of the organization. @@ -10,19 +9,17 @@ By agreeing to an Order Form that references this Customer Agreement (this “Ag “Consulting Services” means the consulting or professional services included in your Subscription. -“Customer,” “you” and “your” means the organization that agrees to an Order Form or downloads the Software for a free trial. +“Customer”, “you” and “your” means the organization that agrees to an Order Form or downloads the Software for a free trial. “Deliverable” means a work provided to you as a part of the Consulting Services, including any report. “Documentation” means the instructions, specifications and information regarding the Software available at https://docs.mongodb.com/. - -“MongoDB,” “we,” “our” and “us” means the MongoDB company that agrees to an Order Form. - + “Order Form” means an ordering document for Subscriptions signed by both parties that refers to this Agreement. -“Server” means a virtual machine or container that stores data up to the amount of RAM specified on an applicable Order Form. +“Server” means a virtual machine or container that stores data up to the amount of RAM specified on an applicable Order Form. -“Software” means the MongoDB Enterprise database software, MongoDB Ops Manager, MongoDB Charts, MongoDB Connector for Business Intelligence, and any other software included with a Subscription, including any generally available updates to such software, but excluding open source software components, each of which has its copyright notice and license included in the license file and Documentation. +“Software” means the MongoDB Enterprise database software, MongoDB Ops Manager, MongoDB Charts, MongoDB Connector for Business Intelligence, and any other software included with a Subscription, including any generally available updates to such software, but excluding (a) beta offerings and (b) open source software components, each of which has its copyright notice and license included in the license file and Documentation. “Subscription” means a subscription for our Software, Support, or Consulting Services set forth in an Order Form. @@ -30,7 +27,7 @@ By agreeing to an Order Form that references this Customer Agreement (this “Ag 2. Subscriptions. -(a) Generally. We will provide you with the Software, Support and Consulting Services included in the Subscription. We will provide you with Support in accordance with the applicable support policy available on our website, currently available at https://www.mongodb.com/support-policy. While we may modify our support policy from time to time, we will not modify it in a way that materially and adversely affects your Support. Your Affiliates may purchase Subscriptions directly from us by signing an Order Form and you may allow an Affiliate to use your Subscriptions as long as you are responsible for the Affiliate’s compliance with this Agreement. +(a) Generally. We will provide you with the Software, Support and Consulting Services included in the Subscription. We will provide you with Support in accordance with the applicable support policy available on our website, currently available at https://www.mongodb.com/support-policy. While we may modify our support policy from time to time, we will not modify it in a way that materially and adversely affects your Support. Your Affiliates may purchase Subscriptions directly from us or our Affiliates by signing an Order Form and you may allow an Affiliate to use your Subscriptions as long as you are responsible for the Affiliate’s compliance with this Agreement. (b) Free Evaluation and Development. MongoDB grants you a royalty-free, nontransferable and nonexclusive license to use and reproduce the Software in your internal environment for evaluation and development purposes. You will not use the Software for any other purpose, including testing, quality assurance or production purposes without purchasing an Enterprise Advanced Subscription. We provide the free evaluation and development license of our Software on an “AS-IS” basis without any warranty. @@ -42,11 +39,11 @@ By agreeing to an Order Form that references this Customer Agreement (this “Ag 5. Payment and Taxes. You will pay undisputed fees and reimburse any business expenses as set forth on and in accordance with an Order Form. Your payment for Subscriptions is non-refundable and you may not terminate or cancel an Order Form except as stated in this Agreement. Our fees exclude and you will pay applicable taxes and similar charges, including sales, usage, excise and value added taxes. Nothing in this Agreement requires either party to pay any income taxes or similar charges of the other party. If applicable law requires you to withhold any amount from your payment, you will provide us with copies of documents related to your withholding upon our request. -6. Confidentiality. This Agreement supersedes any applicable non-disclosure agreement between the parties with respect to your use of the Software. The receiving party will use the disclosing party’s Confidential Information only in connection with this Agreement and protect the disclosing party’s Confidential Information by using the same degree of care used to protect its own confidential information, but not less than a reasonable degree of care. The receiving party will limit disclosure of the disclosing party’s Confidential Information to its and its Affiliates’ directors, officers, employees and contractors bound to confidentiality obligations at least as protective as the confidentiality provisions in this Agreement and who have a need to know the Confidential Information. The receiving party will not disclose the disclosing party’s Confidential Information to a any other third party without the disclosing party's consent, except where required to comply with applicable law or a compulsory legal order or process, provided that the receiving party will, if legally permitted, promptly notify the disclosing party. Each party will return or destroy the other party’s Confidential Information upon written request from the other party. +6. Confidentiality. This Agreement supersedes any applicable non-disclosure agreement between the parties with respect to your Subscriptions. The receiving party will use the disclosing party’s Confidential Information only in connection with this Agreement and protect the disclosing party’s Confidential Information by using the same degree of care used to protect its own confidential information, but not less than a reasonable degree of care. The receiving party will limit disclosure of the disclosing party’s Confidential Information to its and its Affiliates’ directors, officers, employees and contractors bound to confidentiality obligations at least as protective as the confidentiality provisions in this Agreement and who have a need to know the Confidential Information. The receiving party will not disclose the disclosing party’s Confidential Information to any other third party without the disclosing party's consent, except where required to comply with applicable law or a compulsory legal order or process, provided that the receiving party will, if legally permitted, promptly notify the disclosing party. Each party will return or destroy the other party’s Confidential Information upon written request from the other party. 7. Intellectual Property. This Agreement does not transfer any right, title or interest in any intellectual property to any party, except as expressly set forth in this Agreement. You are not obligated to provide us with any suggestions or other feedback, but if you do, we may use and modify this feedback without any restriction or payment. -8. Warranties. MongoDB represents and warrants that: (a) the Software will perform substantially in accordance with the Documentation, and (b) it will perform Consulting Services and Support in a diligent and workmanlike manner consistent with industry standards. Your exclusive remedy for MongoDB’s material breach of warranty is to terminate any affected Subscription in accordance with Section 11 and receive a refund of any prepaid fees for unused Subscriptions. Except as set forth in this Section, we provide the Software, Consulting Services and Support on an “AS-IS” basis. To the fullest extent not prohibited by law, MongoDB disclaims and this Agreement excludes any implied or statutory warranty, including any warranty of title, non-infringement, merchantability or fitness for a particular purpose. +8. Warranties. MongoDB represents and warrants that: (a) the Software will perform substantially in accordance with the Documentation; and (b) it will perform Consulting Services and Support in a diligent and workmanlike manner consistent with industry standards. Your exclusive remedy for MongoDB’s material breach of warranty is to terminate any affected Subscription in accordance with Section 11 and receive a refund of any prepaid fees for unused Subscriptions. Except as set forth in this Section, we provide the Software, Consulting Services and Support on an “AS-IS” basis. To the fullest extent not prohibited by law, MongoDB disclaims and this Agreement excludes any implied or statutory warranty, including any warranty of title, non-infringement, merchantability or fitness for a particular purpose. 9. Limitation of Liability. @@ -70,4 +67,5 @@ By agreeing to an Order Form that references this Customer Agreement (this “Ag 11. Term and Termination. The term of this Agreement commences when you agree to an Order Form, or you download our Software for a free trial, and will remain in effect until terminated in accordance with this Agreement. Either party may terminate this Agreement for convenience immediately upon notice if all Order Forms under this Agreement have expired or been terminated. Neither party may terminate an Order Form for convenience. If a party fails to cure a material breach of this Agreement within 30 days after receipt of written notice of the breach, the other party may terminate this Agreement and any affected Order Form. Upon termination of an Order Form or this Agreement, you will remove the Software from all Servers covered by the terminated Subscriptions. Provisions intended by their nature to survive termination of this Agreement survive termination. During the term of this Agreement and one year following termination, we may inspect your records relating to your use of the Software or Consulting Services for the purposes of verifying compliance with this Agreement. -12. General. Notices under this Agreement will be in writing and effective on the delivery date. The parties will deliver notices by (a) email, in the case of MongoDB to legal@mongodb.com and, in the case of customer, to the email address set forth on Customer’s most recent Order Form and (b) personal delivery or courier to the address of the other party set forth on the Customer’s most recent Order Form. If you are located in North, Central or South America, New York law governs this Agreement, excluding any applicable conflict of laws rules or principles, and the parties agree to the exclusive jurisdiction of the courts in New York, New York. For customers located elsewhere, the law of England and Wales governs this Agreement, excluding any applicable conflict of laws rules or principles, and the parties agree to the exclusive jurisdiction of the courts in London, England. This Agreement does not create a partnership, agency relationship, or joint venture between the parties. The United Nations Convention for the International Sale of Goods does not apply to this Agreement. Unless you tell us otherwise in writing, we may refer to our relationship with you as a customer. Any assignment of this Agreement by you without our prior written consent will be null and void, except an assignment to an Affiliate or in connection with a merger or sale of all or substantially all of your assets or stock, provided that you may not assign this Agreement to a competitor of ours without our prior written consent. If any provision of this Agreement is unenforceable, that provision will be modified to render it enforceable to the extent possible to effect the parties’ intention and the remaining provisions will not be affected. The parties may amend this Agreement only by a written amendment signed by both parties. This Agreement incorporates any addenda or exhibits, and any Order Form, and comprises the parties’ entire agreement relating to the subject matter of this Agreement. Neither party has entered into this Agreement in reliance on any representations or warranties other than those expressly set forth in this Agreement or in an applicable Order Form. If any conflict exists between the provisions in this Agreement and any Order Form, the Order Form controls, and if any conflict exists between this Agreement and any addenda, exhibit or other agreement, this Agreement controls. A purchase order is for convenience only and any terms that govern the purchase order are of no effect. Customer’s purchase of any Subscription is not contingent on, and Customer has not relied on, the delivery of any future functionality, regardless of any communication about our products. Neither party will be liable for failures or delays in performance due to causes beyond its reasonable control. +12. General. Notices under this Agreement will be in writing and effective on the delivery date. The parties will deliver notices by (a) email, in the case of MongoDB to legal@mongodb.com and, in the case of Customer, to the email address set forth on your most recent Order Form and (b) personal delivery or courier to the address of the other party set forth on Customer’s most recent Order Form. If you are located in North, Central or South America, New York law governs this Agreement, excluding any applicable conflict of laws rules or principles, and the parties agree to the exclusive jurisdiction of the courts in New York, New York. For customers located elsewhere, the law of England and Wales governs this Agreement, excluding any applicable conflict of laws rules or principles, and the parties agree to the exclusive jurisdiction of the courts in London, England. This Agreement does not create a partnership, agency relationship, or joint venture between the parties. The United Nations Convention for the International Sale of Goods does not apply to this Agreement. Unless you tell us otherwise in writing, we may refer to our relationship with you as a customer. Any assignment of this Agreement by you without our prior written consent will be null and void, except an assignment to an Affiliate or in connection with a merger or sale of all or substantially all of your assets or stock, provided that you may not assign this Agreement to a competitor of ours without our prior written consent. If any provision of this Agreement is unenforceable, that provision will be modified to render it enforceable to the extent possible to effect the parties’ intention and the remaining provisions will not be affected. The parties may amend this Agreement only by a written amendment signed by both parties. This Agreement incorporates any addenda or exhibits, and any Order Form, and comprises the parties’ entire agreement relating to the subject matter of this Agreement. Neither party has entered into this Agreement in reliance on any representations or warranties other than those expressly set forth in this Agreement or in an applicable Order Form. If any conflict exists between the provisions in this Agreement and any Order Form, the Order Form controls, and if any conflict exists between this Agreement and any addenda, exhibit or other agreement, this Agreement controls. A purchase order is for convenience only and any terms that govern the purchase order are of no effect. Customer’s purchase of any Subscription is not contingent on, and Customer has not relied on, the delivery of any future functionality, regardless of any communication about our products. Neither party will be liable for failures or delays in performance due to causes beyond its reasonable control. + diff --git a/packaging/README b/packaging/README index 9185e6bfc5..d8192b64e6 100644 --- a/packaging/README +++ b/packaging/README @@ -1,12 +1,11 @@ This package contains `mongosh`, the mongo shell. -Additionally, it includes a helper binary, `mongocryptd` (under the name -`mongocryptd-mongosh`), as well as a manual page `mongosh.1.gz` -that can be viewed using the `man` command line utility, and their -respective licensing files. +Additionally, it includes a shared library, `mongosh_csfle_v1`, as well as a +manual page `mongosh.1.gz` that can be viewed using the `man` command line +utility, and their respective licensing files. Extract them to a suitable location. For licensing information: - mongosh: See LICENSE-mongosh -- mongocryptd: See LICENSE-mongocryptd +- mongosh_csfle_v1: See LICENSE-csfle diff --git a/packaging/msi-template/README.md b/packaging/msi-template/README.md index 9ebddd5cb6..0f53a0d089 100644 --- a/packaging/msi-template/README.md +++ b/packaging/msi-template/README.md @@ -16,4 +16,4 @@ Build by executing commands: - BuildFolder: Folder containing the binaries and license notices. Defaults to "..\\..\mongosh-_Version_-dev.0-win32" # Open Issues -1. mongosh.exe and mongocryptd-mongosh.exe should have a version numbers \ No newline at end of file +1. mongosh.exe and mongosh_csfle_v1.dll should have a version numbers diff --git a/scripts/docker/amazonlinux1-rpm.Dockerfile b/scripts/docker/amazonlinux1-rpm.Dockerfile index 6202a518b9..6dded8a70b 100644 --- a/scripts/docker/amazonlinux1-rpm.Dockerfile +++ b/scripts/docker/amazonlinux1-rpm.Dockerfile @@ -2,8 +2,9 @@ FROM amazonlinux:1 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN yum repolist RUN yum install -y /tmp/*mongosh-*.x86_64.rpm RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib64/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/amazonlinux2-rpm.Dockerfile b/scripts/docker/amazonlinux2-rpm.Dockerfile index a3ebcec60c..4492a922b0 100644 --- a/scripts/docker/amazonlinux2-rpm.Dockerfile +++ b/scripts/docker/amazonlinux2-rpm.Dockerfile @@ -2,8 +2,9 @@ FROM amazonlinux:2 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN yum repolist RUN yum install -y /tmp/*mongosh-*.x86_64.rpm RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib64/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/build.sh b/scripts/docker/build.sh index 916f6ba539..2481e4a27e 100755 --- a/scripts/docker/build.sh +++ b/scripts/docker/build.sh @@ -3,6 +3,9 @@ set -e cd "$(dirname "$0")" +# Used for verifying that we actually have a working csfle shared library +[ -x node_modules/mongodb-csfle-library-version ] || npm install + if [ x"$ARTIFACT_URL" = x"" ]; then SHA=`git rev-parse origin/main` VERSION=`git show ${SHA}:../../lerna.json | grep version | cut -d ":" -f 2 | cut -d '"' -f 2` diff --git a/scripts/docker/centos7-rpm.Dockerfile b/scripts/docker/centos7-rpm.Dockerfile index 6ff848311e..8b8e9886b4 100644 --- a/scripts/docker/centos7-rpm.Dockerfile +++ b/scripts/docker/centos7-rpm.Dockerfile @@ -2,8 +2,9 @@ FROM centos:7 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN yum repolist RUN yum install -y /tmp/*mongosh-*.x86_64.rpm RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib64/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/debian10-deb.Dockerfile b/scripts/docker/debian10-deb.Dockerfile index 10b30984e7..01baf9ea86 100644 --- a/scripts/docker/debian10-deb.Dockerfile +++ b/scripts/docker/debian10-deb.Dockerfile @@ -2,10 +2,11 @@ FROM debian:10 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN apt-get update RUN apt-get install -y man-db RUN apt-get install -y /tmp/*mongosh_*_amd64.deb RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- RUN man mongosh | grep -q tlsAllowInvalidCertificates ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/debian11-deb.Dockerfile b/scripts/docker/debian11-deb.Dockerfile index dfd73e0bb9..66018932eb 100644 --- a/scripts/docker/debian11-deb.Dockerfile +++ b/scripts/docker/debian11-deb.Dockerfile @@ -2,10 +2,11 @@ FROM debian:11 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN apt-get update RUN apt-get install -y man-db RUN apt-get install -y /tmp/*mongosh_*_amd64.deb RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- RUN man mongosh | grep -q tlsAllowInvalidCertificates ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/debian9-deb.Dockerfile b/scripts/docker/debian9-deb.Dockerfile index 7023f5b1b0..ba15bcb404 100644 --- a/scripts/docker/debian9-deb.Dockerfile +++ b/scripts/docker/debian9-deb.Dockerfile @@ -2,10 +2,11 @@ FROM debian:9 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN apt-get update RUN apt-get install -y man-db RUN apt-get install -y /tmp/*mongosh_*_amd64.deb RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- RUN man mongosh | grep -q tlsAllowInvalidCertificates ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/fedora34-rpm.Dockerfile b/scripts/docker/fedora34-rpm.Dockerfile index fdaabaf2ba..61c230e9fe 100644 --- a/scripts/docker/fedora34-rpm.Dockerfile +++ b/scripts/docker/fedora34-rpm.Dockerfile @@ -2,9 +2,10 @@ FROM fedora:34 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN yum repolist RUN yum install -y man RUN yum install -y /tmp/*mongosh-*.x86_64.rpm RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib64/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/package-lock.json b/scripts/docker/package-lock.json new file mode 100644 index 0000000000..a9413991d1 --- /dev/null +++ b/scripts/docker/package-lock.json @@ -0,0 +1,79 @@ +{ + "name": "@mongosh/docker-build-scripts", + "version": "0.0.0-dev.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "@mongosh/docker-build-scripts", + "version": "0.0.0-dev.0", + "license": "Apache-2.0", + "dependencies": { + "mongodb-csfle-library-version": "^1.0.2" + }, + "engines": { + "node": ">=12.4.0" + } + }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + }, + "node_modules/mongodb-csfle-library-version": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/mongodb-csfle-library-version/-/mongodb-csfle-library-version-1.0.2.tgz", + "integrity": "sha512-DzO4BDGh8nQUEjr7HcB9w1K1CZlfWQRA1Rkq1ROk8aJoaaEK2m++cyVHVUNzHGrYu7X1r5yqHlGxfPw5bSEU0w==", + "hasInstallScript": true, + "dependencies": { + "bindings": "^1.5.0", + "node-addon-api": "^4.3.0" + }, + "bin": { + "mongodb-csfle-library-version": "bin/mongodb-csfle-library-version.js" + } + }, + "node_modules/node-addon-api": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", + "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==" + } + }, + "dependencies": { + "bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "requires": { + "file-uri-to-path": "1.0.0" + } + }, + "file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + }, + "mongodb-csfle-library-version": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/mongodb-csfle-library-version/-/mongodb-csfle-library-version-1.0.2.tgz", + "integrity": "sha512-DzO4BDGh8nQUEjr7HcB9w1K1CZlfWQRA1Rkq1ROk8aJoaaEK2m++cyVHVUNzHGrYu7X1r5yqHlGxfPw5bSEU0w==", + "requires": { + "bindings": "^1.5.0", + "node-addon-api": "^4.3.0" + } + }, + "node-addon-api": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", + "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==" + } + } +} diff --git a/scripts/docker/package.json b/scripts/docker/package.json new file mode 100644 index 0000000000..3a17cca486 --- /dev/null +++ b/scripts/docker/package.json @@ -0,0 +1,26 @@ +{ + "name": "@mongosh/docker-build-scripts", + "version": "0.0.0-dev.0", + "private": true, + "description": "MongoDB Shell Build Docker Images", + "config": { + "unsafe-perm": true + }, + "license": "Apache-2.0", + "publishConfig": { + "access": "public" + }, + "scripts": { + "test": "exit 0", + "test-ci": "node ../../scripts/run-if-package-requested.js npm test" + }, + "mongosh": { + "variants": [] + }, + "engines": { + "node": ">=12.4.0" + }, + "dependencies": { + "mongodb-csfle-library-version": "^1.0.2" + } +} diff --git a/scripts/docker/rocky8-rpm.Dockerfile b/scripts/docker/rocky8-rpm.Dockerfile index addfa6f51a..9c9f51e2ed 100644 --- a/scripts/docker/rocky8-rpm.Dockerfile +++ b/scripts/docker/rocky8-rpm.Dockerfile @@ -2,10 +2,11 @@ FROM rockylinux:8 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN yum repolist RUN yum install -y man RUN yum install -y /tmp/*mongosh-*.x86_64.rpm RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib64/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- RUN man mongosh | grep -q tlsAllowInvalidCertificates ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/suse12-rpm.Dockerfile b/scripts/docker/suse12-rpm.Dockerfile index 005bfc091d..04c8ad1cdf 100644 --- a/scripts/docker/suse12-rpm.Dockerfile +++ b/scripts/docker/suse12-rpm.Dockerfile @@ -2,9 +2,10 @@ FROM registry.suse.com/suse/sles12sp4 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN zypper --no-gpg-checks --non-interactive addrepo https://download.opensuse.org/repositories/openSUSE:Leap:15.1:Update/standard/openSUSE:Leap:15.1:Update.repo RUN zypper --no-gpg-checks --non-interactive refresh RUN zypper --no-gpg-checks --non-interactive install /tmp/*mongosh-*.x86_64.rpm RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib64/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/suse15-rpm.Dockerfile b/scripts/docker/suse15-rpm.Dockerfile index 9c142401cf..bab3d72487 100644 --- a/scripts/docker/suse15-rpm.Dockerfile +++ b/scripts/docker/suse15-rpm.Dockerfile @@ -2,11 +2,12 @@ FROM registry.suse.com/suse/sle15 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN zypper --no-gpg-checks --non-interactive addrepo https://download.opensuse.org/repositories/openSUSE:Leap:15.1:Update/standard/openSUSE:Leap:15.1:Update.repo RUN zypper --no-gpg-checks --non-interactive refresh RUN zypper --no-gpg-checks --non-interactive install man RUN zypper --no-gpg-checks --non-interactive install /tmp/*mongosh-*.x86_64.rpm RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib64/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- RUN man mongosh | grep -q tlsAllowInvalidCertificates ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/ubuntu18.04-deb.Dockerfile b/scripts/docker/ubuntu18.04-deb.Dockerfile index c7bda1f2df..efc196e2d5 100644 --- a/scripts/docker/ubuntu18.04-deb.Dockerfile +++ b/scripts/docker/ubuntu18.04-deb.Dockerfile @@ -2,8 +2,9 @@ FROM ubuntu:18.04 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN apt-get update RUN apt-get install -y /tmp/*mongosh_*_amd64.deb RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- ENTRYPOINT [ "mongosh" ] diff --git a/scripts/docker/ubuntu20.04-deb.Dockerfile b/scripts/docker/ubuntu20.04-deb.Dockerfile index dcd01eca7c..10f98236cf 100644 --- a/scripts/docker/ubuntu20.04-deb.Dockerfile +++ b/scripts/docker/ubuntu20.04-deb.Dockerfile @@ -2,11 +2,12 @@ FROM ubuntu:20.04 ARG artifact_url="" ADD ${artifact_url} /tmp +ADD node_modules /usr/share/mongodb-csfle-library-version/node_modules RUN apt-get update RUN yes | unminimize RUN apt-get install -y man-db RUN apt-get install -y /tmp/*mongosh_*_amd64.deb RUN /usr/bin/mongosh --version -RUN /usr/libexec/mongocryptd-mongosh --version +RUN env MONGOSH_RUN_NODE_SCRIPT=1 mongosh /usr/share/mongodb-csfle-library-version/node_modules/.bin/mongodb-csfle-library-version /usr/lib/mongosh_csfle_v1.so | grep -q ^mongo_csfle_v1- RUN man mongosh | grep -q tlsAllowInvalidCertificates ENTRYPOINT [ "mongosh" ] diff --git a/scripts/no-mongocryptd.sh b/scripts/no-mongocryptd.sh deleted file mode 100755 index 84d5753d37..0000000000 --- a/scripts/no-mongocryptd.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -echo Sorry, mongocryptd is not available for this platform yet. diff --git a/testing/integration-testing-hooks.ts b/testing/integration-testing-hooks.ts index 544ec4bc1a..ecf4f87d15 100644 --- a/testing/integration-testing-hooks.ts +++ b/testing/integration-testing-hooks.ts @@ -10,6 +10,7 @@ import { URL } from 'url'; import { promisify } from 'util'; import which from 'which'; import { downloadMongoDb } from '../packages/build/src/download-mongodb'; +import { downloadCsfleLibrary } from '../packages/build/src/packaging/download-csfle-library'; const execFile = promisify(child_process.execFile); @@ -389,6 +390,13 @@ export async function ensureMongodAvailable(mongodVersion = process.env.MONGOSH_ } } +export async function downloadCurrentCsfleSharedLibrary(): Promise { + if (process.platform === 'linux') { + return await downloadCsfleLibrary(`linux-${process.arch.replace('ppc64', 'ppc64le')}` as any); + } + return downloadCsfleLibrary('host'); +} + /** * Starts a local server unless the `MONGOSH_TEST_SERVER_URL` * environment variable is set. @@ -511,29 +519,6 @@ export function skipIfApiStrict(): void { }); } -/** - * Add the server tarball's bin/ directrory to the PATH for this section. - * This enables using e.g. mongocryptd if available. - * - * describe('...', () => { - * useBinaryPath(testServer) - * }); - */ -export function useBinaryPath(server: MongodSetup): void { - let pathBefore: string; - before(async() => { - await server.start(); - pathBefore = process.env.PATH ?? ''; - const extraPath = server.bindir; - if (extraPath !== null) { - process.env.PATH += path.delimiter + extraPath; - } - }); - after(() => { - process.env.PATH = pathBefore; - }); -} - /** * Skip tests in the suite if the test server version * (configured as environment variable or the currently installed one)