Skip to content

Commit

Permalink
feat(compartment-mapper)!: Pre-load for archive integrity checks
Browse files Browse the repository at this point in the history
*BREAKING CHANGE:* Previously, `loadArchive` and `parseArchive`, when given a `computeSha512`, would accept just about any archive. Hash integrity checks for any used module occurred only after a request to import them. With this new version, all archives must use every file they contain and must pass hash integrity checks during the load or parse phase.  Consequently, if an archive requires any built-in modules ("exits"), these must be mentioned with the `modules` option to `loadArchive` or `parseArchive`, as an object whose keys are the names of the expected modules.

Refs: #3859
  • Loading branch information
kriskowal committed Feb 25, 2022
1 parent 735ff94 commit 3c28ddc
Show file tree
Hide file tree
Showing 5 changed files with 127 additions and 20 deletions.
8 changes: 8 additions & 0 deletions packages/compartment-mapper/NEWS.md
Expand Up @@ -2,6 +2,14 @@ User-visible changes to the compartment mapper:

# 0.6.7 (2022-02-21)

- *BREAKING:* The `loadArchive` and `parseArchive` functions, when given a
`computeSha512`, now check the integrity of every module in the archive, and
forbid the presence of any unused files in the archive.
So, these functions now require a `modules` option if the archive will expect
any built-in modules. The `modules` option is an object with a key for every
built-in module the archive expects.
The load and parse functions ignore corresponding values (even if they are
falsey!) but will accept the same type of object as the import function.
- The `parseArchive` function returns a promise for an archive. If provided a
`computeSha512`, regardless of whether provided `expectedSha512`, the archive
will have a `sha512` property computed from the parsed archive, for
Expand Down
108 changes: 93 additions & 15 deletions packages/compartment-mapper/src/import-archive.js
Expand Up @@ -3,7 +3,6 @@

/** @typedef {import('ses').ImportHook} ImportHook */
/** @typedef {import('./types.js').ParseFn} ParseFn */
/** @typedef {import('./types.js').ArchiveReader} ArchiveReader */
/** @typedef {import('./types.js').CompartmentDescriptor} CompartmentDescriptor */
/** @typedef {import('./types.js').Application} Application */
/** @typedef {import('./types.js').CompartmentMapDescriptor} CompartmentMapDescriptor */
Expand All @@ -15,7 +14,7 @@
/** @typedef {import('./types.js').LoadArchiveOptions} LoadArchiveOptions */
/** @typedef {import('./types.js').ExecuteOptions} ExecuteOptions */

import { readZip } from '@endo/zip';
import { ZipReader } from '@endo/zip';
import { link } from './link.js';
import { parsePreCjs } from './parse-pre-cjs.js';
import { parseJson } from './parse-json.js';
Expand All @@ -25,8 +24,9 @@ import { unpackReadPowers } from './powers.js';
import { join } from './node-module-specifier.js';
import { assertCompartmentMap } from './compartment-map.js';

// q as in quote for strings in error messages.
const q = JSON.stringify;
const DefaultCompartment = Compartment;

const { quote: q, details: d } = assert;

const textDecoder = new TextDecoder();

Expand All @@ -45,15 +45,15 @@ const parserForLanguage = {
*/

/**
* @param {ArchiveReader} archive
* @param {(path: string) => Uint8Array} get
* @param {Record<string, CompartmentDescriptor>} compartments
* @param {string} archiveLocation
* @param {HashFn} [computeSha512]
* @param {ComputeSourceLocationHook} [computeSourceLocation]
* @returns {ArchiveImportHookMaker}
*/
const makeArchiveImportHookMaker = (
archive,
get,
compartments,
archiveLocation,
computeSha512 = undefined,
Expand Down Expand Up @@ -84,7 +84,7 @@ const makeArchiveImportHookMaker = (
);
}
const moduleLocation = `${packageLocation}/${module.location}`;
const moduleBytes = await archive.read(moduleLocation);
const moduleBytes = get(moduleLocation);

if (computeSha512 !== undefined && module.sha512 !== undefined) {
const sha512 = computeSha512(moduleBytes);
Expand Down Expand Up @@ -127,12 +127,35 @@ const makeArchiveImportHookMaker = (
return makeImportHook;
};

const makeFeauxModuleExportsNamespace = Compartment => {
// @ts-ignore Unclear at time of writing why Compartment type is not
// constructible.
const compartment = new Compartment(
{},
{},
{
resolveHook() {
return '.';
},
importHook() {
return {
imports: [],
execute() {},
};
},
},
);
return compartment.module('.');
};

/**
* @param {Uint8Array} archiveBytes
* @param {string} [archiveLocation]
* @param {Object} [options]
* @param {string} [options.expectedSha512]
* @param {HashFn} [options.computeSha512]
* @param {Record<string, unknown>} [options.modules]
* @param {Compartment} [options.Compartment]
* @param {ComputeSourceLocationHook} [options.computeSourceLocation]
* @returns {Promise<Application>}
*/
Expand All @@ -145,10 +168,28 @@ export const parseArchive = async (
computeSha512 = undefined,
expectedSha512 = undefined,
computeSourceLocation = undefined,
Compartment = DefaultCompartment,
modules = undefined,
} = options;

const archive = await readZip(archiveBytes, archiveLocation);
const compartmentMapBytes = await archive.read('compartment-map.json');
const archive = new ZipReader(archiveBytes, { name: archiveLocation });

// Track all modules that get loaded, all files that are used.
const unseen = new Set(archive.files.keys());
assert(
unseen.size >= 2,
`Archive failed sanity check: should contain at least a compartment map file and one module file.`,
);

/**
* @param {string} path
*/
const get = path => {
unseen.delete(path);
return archive.read(path);
};

const compartmentMapBytes = get('compartment-map.json');

let sha512;
if (computeSha512 !== undefined) {
Expand All @@ -174,6 +215,46 @@ export const parseArchive = async (
);
assertCompartmentMap(compartmentMap);

const {
compartments,
entry: { module: moduleSpecifier },
} = compartmentMap;

// Archive integrity checks: ensure every module is pre-loaded so its hash
// gets checked, and ensure that every file in the archive is used, and
// therefore checked.
if (computeSha512 !== undefined) {
const makeImportHook = makeArchiveImportHookMaker(
get,
compartments,
archiveLocation,
computeSha512,
computeSourceLocation,
);
// A weakness of the current Compartment design is that the `modules` map
// must be given a module namespace object that passes a brand check.
// We don't have module instances for the preload phase, so we supply fake
// namespaces.
const { compartment } = link(compartmentMap, {
makeImportHook,
parserForLanguage,
modules: Object.fromEntries(
Object.keys(modules || {}).map(specifier => {
return [specifier, makeFeauxModuleExportsNamespace(Compartment)];
}),
),
Compartment,
});

await compartment.load(moduleSpecifier);
assert(
unseen.size === 0,
d`Archive contains extraneous files: ${q([...unseen])} in ${q(
archiveLocation,
)}`,
);
}

/** @type {ExecuteFn} */
const execute = options => {
const {
Expand All @@ -184,12 +265,8 @@ export const parseArchive = async (
__shimTransforms__,
Compartment,
} = options || {};
const {
compartments,
entry: { module: moduleSpecifier },
} = compartmentMap;
const makeImportHook = makeArchiveImportHookMaker(
archive,
get,
compartments,
archiveLocation,
computeSha512,
Expand Down Expand Up @@ -223,12 +300,13 @@ export const loadArchive = async (
options = {},
) => {
const { read, computeSha512 } = unpackReadPowers(readPowers);
const { expectedSha512, computeSourceLocation } = options;
const { expectedSha512, computeSourceLocation, modules } = options;
const archiveBytes = await read(archiveLocation);
return parseArchive(archiveBytes, archiveLocation, {
computeSha512,
expectedSha512,
computeSourceLocation,
modules,
});
};

Expand Down
4 changes: 3 additions & 1 deletion packages/compartment-mapper/src/types.js
Expand Up @@ -200,6 +200,8 @@ export const moduleJSDocTypes = true;
/**
* @typedef {Object} LoadArchiveOptions
* @property {string} [expectedSha512]
* @property {Record<string, unknown>} [modules]
* @property {Compartment} [Compartment]
* @property {ComputeSourceLocationHook} [computeSourceLocation]
*/

Expand All @@ -210,7 +212,7 @@ export const moduleJSDocTypes = true;
* @property {Array<Transform>} [transforms]
* @property {Array<Transform>} [__shimTransforms__]
* @property {Record<string, Object>} [modules]
* @property {typeof Compartment.prototype.constructor} [Compartment]
* @property {Compartment} [Compartment]
*/

/**
Expand Down
23 changes: 20 additions & 3 deletions packages/compartment-mapper/test/scaffold.js
Expand Up @@ -94,7 +94,17 @@ export function scaffold(
modules,
dev: true,
});
const application = await parseArchive(archive);
const application = await parseArchive(archive, '<unknown>', {
modules: Object.fromEntries(
Object.keys(modules).map((specifier, index) => {
// Replacing the namespace with an arbitrary index ensures that the
// parse phase does not depend on the type or values of the exit module
// set.
return [specifier, index];
}),
),
Compartment,
});
const { namespace } = await application.import({
globals,
globalLexicals,
Expand All @@ -116,7 +126,10 @@ export function scaffold(
const prefixArchive = new Uint8Array(archive.length + 10);
prefixArchive.set(archive, 10);

const application = await parseArchive(prefixArchive);
const application = await parseArchive(prefixArchive, '<unknown>', {
modules,
Compartment,
});
const { namespace } = await application.import({
globals,
globalLexicals,
Expand Down Expand Up @@ -145,7 +158,10 @@ export function scaffold(
modules: { builtin: true },
dev: true,
});
const application = await loadArchive(fakeRead, 'app.agar');
const application = await loadArchive(fakeRead, 'app.agar', {
modules,
Compartment,
});
const { namespace } = await application.import({
globals,
globalLexicals,
Expand Down Expand Up @@ -203,6 +219,7 @@ export function scaffold(
'memory:app.agar',
{
modules,
Compartment,
dev: true,
computeSha512,
expectedSha512,
Expand Down
4 changes: 3 additions & 1 deletion packages/compartment-mapper/test/test-main.js
Expand Up @@ -83,7 +83,9 @@ test('no dev dependencies', async t => {

await t.throwsAsync(
async () => {
const application = await loadLocation(readPowers, fixture);
const application = await loadLocation(readPowers, fixture, {
modules,
});
await application.import({
globals,
globalLexicals,
Expand Down

0 comments on commit 3c28ddc

Please sign in to comment.