diff --git a/build/main.js b/build/main.js index 4ccccab..e54bf6e 100644 --- a/build/main.js +++ b/build/main.js @@ -426,18 +426,18 @@ var require_tunnel = __commonJS({ res.statusCode ); socket.destroy(); - var error2 = new Error("tunneling socket could not be established, statusCode=" + res.statusCode); - error2.code = "ECONNRESET"; - options.request.emit("error", error2); + var error3 = new Error("tunneling socket could not be established, statusCode=" + res.statusCode); + error3.code = "ECONNRESET"; + options.request.emit("error", error3); self.removeSocket(placeholder); return; } if (head.length > 0) { debug("got illegal response body from proxy"); socket.destroy(); - var error2 = new Error("got illegal response body from proxy"); - error2.code = "ECONNRESET"; - options.request.emit("error", error2); + var error3 = new Error("got illegal response body from proxy"); + error3.code = "ECONNRESET"; + options.request.emit("error", error3); self.removeSocket(placeholder); return; } @@ -452,9 +452,9 @@ var require_tunnel = __commonJS({ cause.message, cause.stack ); - var error2 = new Error("tunneling socket could not be established, cause=" + cause.message); - error2.code = "ECONNRESET"; - options.request.emit("error", error2); + var error3 = new Error("tunneling socket could not be established, cause=" + cause.message); + error3.code = "ECONNRESET"; + options.request.emit("error", error3); self.removeSocket(placeholder); } }; @@ -5582,7 +5582,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r throw new TypeError("Body is unusable"); } const promise = createDeferredPromise(); - const errorSteps = (error2) => promise.reject(error2); + const errorSteps = (error3) => promise.reject(error3); const successSteps = (data) => { try { promise.resolve(convertBytesToJSValue(data)); @@ -5868,16 +5868,16 @@ var require_request = __commonJS({ this.onError(err); } } - onError(error2) { + onError(error3) { this.onFinally(); if (channels.error.hasSubscribers) { - channels.error.publish({ request: this, error: error2 }); + channels.error.publish({ request: this, error: error3 }); } if (this.aborted) { return; } this.aborted = true; - return this[kHandler].onError(error2); + return this[kHandler].onError(error3); } onFinally() { if (this.errorHandler) { @@ -6740,8 +6740,8 @@ var require_RedirectHandler = __commonJS({ onUpgrade(statusCode, headers, socket) { this.handler.onUpgrade(statusCode, headers, socket); } - onError(error2) { - this.handler.onError(error2); + onError(error3) { + this.handler.onError(error3); } onHeaders(statusCode, headers, resume, statusText) { this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) ? null : parseLocation(statusCode, headers); @@ -8882,7 +8882,7 @@ var require_pool = __commonJS({ this[kOptions] = { ...util.deepClone(options), connect, allowH2 }; this[kOptions].interceptors = options.interceptors ? { ...options.interceptors } : void 0; this[kFactory] = factory; - this.on("connectionError", (origin2, targets, error2) => { + this.on("connectionError", (origin2, targets, error3) => { for (const target of targets) { const idx = this[kClients].indexOf(target); if (idx !== -1) { @@ -10491,13 +10491,13 @@ var require_mock_utils = __commonJS({ if (mockDispatch2.data.callback) { mockDispatch2.data = { ...mockDispatch2.data, ...mockDispatch2.data.callback(opts) }; } - const { data: { statusCode, data, headers, trailers, error: error2 }, delay, persist } = mockDispatch2; + const { data: { statusCode, data, headers, trailers, error: error3 }, delay, persist } = mockDispatch2; const { timesInvoked, times } = mockDispatch2; mockDispatch2.consumed = !persist && timesInvoked >= times; mockDispatch2.pending = timesInvoked < times; - if (error2 !== null) { + if (error3 !== null) { deleteMockDispatch(this[kDispatches], key); - handler.onError(error2); + handler.onError(error3); return true; } if (typeof delay === "number" && delay > 0) { @@ -10535,19 +10535,19 @@ var require_mock_utils = __commonJS({ if (agent.isMockActive) { try { mockDispatch.call(this, opts, handler); - } catch (error2) { - if (error2 instanceof MockNotMatchedError) { + } catch (error3) { + if (error3 instanceof MockNotMatchedError) { const netConnect = agent[kGetNetConnect](); if (netConnect === false) { - throw new MockNotMatchedError(`${error2.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`); + throw new MockNotMatchedError(`${error3.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`); } if (checkNetConnect(netConnect, origin)) { originalDispatch.call(this, opts, handler); } else { - throw new MockNotMatchedError(`${error2.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`); + throw new MockNotMatchedError(`${error3.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`); } } else { - throw error2; + throw error3; } } } else { @@ -10710,11 +10710,11 @@ var require_mock_interceptor = __commonJS({ /** * Mock an undici request with a defined error. */ - replyWithError(error2) { - if (typeof error2 === "undefined") { + replyWithError(error3) { + if (typeof error3 === "undefined") { throw new InvalidArgumentError("error must be defined"); } - const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error: error2 }); + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error: error3 }); return new MockScope(newMockDispatch); } /** @@ -13041,17 +13041,17 @@ var require_fetch = __commonJS({ this.emit("terminated", reason); } // https://fetch.spec.whatwg.org/#fetch-controller-abort - abort(error2) { + abort(error3) { if (this.state !== "ongoing") { return; } this.state = "aborted"; - if (!error2) { - error2 = new DOMException2("The operation was aborted.", "AbortError"); + if (!error3) { + error3 = new DOMException2("The operation was aborted.", "AbortError"); } - this.serializedAbortReason = error2; - this.connection?.destroy(error2); - this.emit("terminated", error2); + this.serializedAbortReason = error3; + this.connection?.destroy(error3); + this.emit("terminated", error3); } }; function fetch2(input, init = {}) { @@ -13155,13 +13155,13 @@ var require_fetch = __commonJS({ performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis2, cacheState); } } - function abortFetch(p, request, responseObject, error2) { - if (!error2) { - error2 = new DOMException2("The operation was aborted.", "AbortError"); + function abortFetch(p, request, responseObject, error3) { + if (!error3) { + error3 = new DOMException2("The operation was aborted.", "AbortError"); } - p.reject(error2); + p.reject(error3); if (request.body != null && isReadable(request.body?.stream)) { - request.body.stream.cancel(error2).catch((err) => { + request.body.stream.cancel(error3).catch((err) => { if (err.code === "ERR_INVALID_STATE") { return; } @@ -13173,7 +13173,7 @@ var require_fetch = __commonJS({ } const response = responseObject[kState]; if (response.body != null && isReadable(response.body?.stream)) { - response.body.stream.cancel(error2).catch((err) => { + response.body.stream.cancel(error3).catch((err) => { if (err.code === "ERR_INVALID_STATE") { return; } @@ -13953,13 +13953,13 @@ var require_fetch = __commonJS({ fetchParams.controller.ended = true; this.body.push(null); }, - onError(error2) { + onError(error3) { if (this.abort) { fetchParams.controller.off("terminated", this.abort); } - this.body?.destroy(error2); - fetchParams.controller.terminate(error2); - reject(error2); + this.body?.destroy(error3); + fetchParams.controller.terminate(error3); + reject(error3); }, onUpgrade(status, headersList, socket) { if (status !== 101) { @@ -14425,8 +14425,8 @@ var require_util4 = __commonJS({ } fr[kResult] = result; fireAProgressEvent("load", fr); - } catch (error2) { - fr[kError] = error2; + } catch (error3) { + fr[kError] = error3; fireAProgressEvent("error", fr); } if (fr[kState] !== "loading") { @@ -14435,13 +14435,13 @@ var require_util4 = __commonJS({ }); break; } - } catch (error2) { + } catch (error3) { if (fr[kAborted]) { return; } queueMicrotask(() => { fr[kState] = "done"; - fr[kError] = error2; + fr[kError] = error3; fireAProgressEvent("error", fr); if (fr[kState] !== "loading") { fireAProgressEvent("loadend", fr); @@ -16441,11 +16441,11 @@ var require_connection = __commonJS({ }); } } - function onSocketError(error2) { + function onSocketError(error3) { const { ws } = this; ws[kReadyState] = states.CLOSING; if (channels.socketError.hasSubscribers) { - channels.socketError.publish(error2); + channels.socketError.publish(error3); } this.destroy(); } @@ -18077,12 +18077,12 @@ var require_oidc_utils = __commonJS({ var _a; return __awaiter(this, void 0, void 0, function* () { const httpclient = _OidcClient.createHttpClient(); - const res = yield httpclient.getJson(id_token_url).catch((error2) => { + const res = yield httpclient.getJson(id_token_url).catch((error3) => { throw new Error(`Failed to get ID Token. - Error Code : ${error2.statusCode} + Error Code : ${error3.statusCode} - Error Message: ${error2.message}`); + Error Message: ${error3.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { @@ -18103,8 +18103,8 @@ var require_oidc_utils = __commonJS({ const id_token = yield _OidcClient.getCall(id_token_url); (0, core_1.setSecret)(id_token); return id_token; - } catch (error2) { - throw new Error(`Error message: ${error2.message}`); + } catch (error3) { + throw new Error(`Error message: ${error3.message}`); } }); } @@ -19226,7 +19226,7 @@ var require_toolrunner = __commonJS({ this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); state.CheckComplete(); }); - state.on("done", (error2, exitCode) => { + state.on("done", (error3, exitCode) => { if (stdbuffer.length > 0) { this.emit("stdline", stdbuffer); } @@ -19234,8 +19234,8 @@ var require_toolrunner = __commonJS({ this.emit("errline", errbuffer); } cp.removeAllListeners(); - if (error2) { - reject(error2); + if (error3) { + reject(error3); } else { resolve(exitCode); } @@ -19330,14 +19330,14 @@ var require_toolrunner = __commonJS({ this.emit("debug", message); } _setResult() { - let error2; + let error3; if (this.processExited) { if (this.processError) { - error2 = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + error3 = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); } else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error2 = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + error3 = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); } else if (this.processStderr && this.options.failOnStdErr) { - error2 = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + error3 = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); } } if (this.timeout) { @@ -19345,7 +19345,7 @@ var require_toolrunner = __commonJS({ this.timeout = null; } this.done = true; - this.emit("done", error2, this.processExitCode); + this.emit("done", error3, this.processExitCode); } static HandleTimeout(state) { if (state.done) { @@ -19728,7 +19728,7 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); exports.setCommandEcho = setCommandEcho; function setFailed2(message) { process.exitCode = ExitCode.Failure; - error2(message); + error3(message); } exports.setFailed = setFailed2; function isDebug() { @@ -19739,10 +19739,10 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); (0, command_1.issueCommand)("debug", {}, message); } exports.debug = debug; - function error2(message, properties = {}) { + function error3(message, properties = {}) { (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } - exports.error = error2; + exports.error = error3; function warning(message, properties = {}) { (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } @@ -20044,8 +20044,8 @@ var require_add = __commonJS({ } if (kind === "error") { hook = function(method, options) { - return Promise.resolve().then(method.bind(null, options)).catch(function(error2) { - return orig(error2, options); + return Promise.resolve().then(method.bind(null, options)).catch(function(error3) { + return orig(error3, options); }); }; } @@ -20777,7 +20777,7 @@ var require_dist_node5 = __commonJS({ } if (status >= 400) { const data = await getResponseData(response); - const error2 = new import_request_error.RequestError(toErrorMessage(data), status, { + const error3 = new import_request_error.RequestError(toErrorMessage(data), status, { response: { url, status, @@ -20786,7 +20786,7 @@ var require_dist_node5 = __commonJS({ }, request: requestOptions }); - throw error2; + throw error3; } return parseSuccessResponseBody ? await getResponseData(response) : response.body; }).then((data) => { @@ -20796,17 +20796,17 @@ var require_dist_node5 = __commonJS({ headers, data }; - }).catch((error2) => { - if (error2 instanceof import_request_error.RequestError) - throw error2; - else if (error2.name === "AbortError") - throw error2; - let message = error2.message; - if (error2.name === "TypeError" && "cause" in error2) { - if (error2.cause instanceof Error) { - message = error2.cause.message; - } else if (typeof error2.cause === "string") { - message = error2.cause; + }).catch((error3) => { + if (error3 instanceof import_request_error.RequestError) + throw error3; + else if (error3.name === "AbortError") + throw error3; + let message = error3.message; + if (error3.name === "TypeError" && "cause" in error3) { + if (error3.cause instanceof Error) { + message = error3.cause.message; + } else if (typeof error3.cause === "string") { + message = error3.cause; } } throw new import_request_error.RequestError(message, 500, { @@ -23478,9 +23478,9 @@ var require_dist_node10 = __commonJS({ /<([^<>]+)>;\s*rel="next"/ ) || [])[1]; return { value: normalizedResponse }; - } catch (error2) { - if (error2.status !== 409) - throw error2; + } catch (error3) { + if (error3.status !== 409) + throw error3; url = ""; return { value: { @@ -24581,14 +24581,20 @@ async function calculateTotalDependencySizeIncrease(newVersions, removedVersions try { const metadata = await fetchPackageMetadata(dep.name, dep.version); if (!metadata || metadata.dist?.unpackedSize === void 0) { - return null; + packageSizes.set(packageKey, null); + core2.info(`No unpacked size info for ${packageKey}, skipping`); + } else { + totalSize += metadata.dist.unpackedSize; + packageSizes.set(packageKey, metadata.dist.unpackedSize); + core2.info( + `Added ${metadata.dist.unpackedSize} bytes for ${packageKey}` + ); } - totalSize += metadata.dist.unpackedSize; - packageSizes.set(packageKey, metadata.dist.unpackedSize); processedPackages.add(packageKey); - core2.info(`Added ${metadata.dist.unpackedSize} bytes for ${packageKey}`); - } catch { - return null; + } catch (e) { + core2.error( + `Error fetching package metadata for dep ${packageKey}: ` + e.message + ); } } for (const dep of removedVersions) { @@ -24599,16 +24605,20 @@ async function calculateTotalDependencySizeIncrease(newVersions, removedVersions try { const metadata = await fetchPackageMetadata(dep.name, dep.version); if (!metadata || metadata.dist?.unpackedSize === void 0) { - return null; + packageSizes.set(packageKey, null); + core2.info(`No unpacked size info for ${packageKey}, skipping`); + } else { + totalSize -= metadata.dist.unpackedSize; + packageSizes.set(packageKey, -metadata.dist.unpackedSize); + core2.info( + `Subtracted ${metadata.dist.unpackedSize} bytes for ${packageKey}` + ); } - totalSize -= metadata.dist.unpackedSize; - packageSizes.set(packageKey, -metadata.dist.unpackedSize); processedPackages.add(packageKey); - core2.info( - `Subtracted ${metadata.dist.unpackedSize} bytes for ${packageKey}` + } catch (e) { + core2.error( + `Error fetching package metadata for dep ${packageKey}: ` + e.message ); - } catch { - return null; } } return { totalSize, packageSizes }; @@ -24980,7 +24990,9 @@ async function scanForDependencySize(messages, threshold, currentDeps, baseDeps, ); const shouldShow = threshold === -1 || sizeData !== null && sizeData.totalSize >= threshold; if (shouldShow && sizeData !== null) { - const packageRows = Array.from(sizeData.packageSizes.entries()).sort(([, a], [, b]) => b - a).map(([pkg, size]) => `| ${pkg} | ${formatBytes(size)} |`).join("\n"); + const packageRows = Array.from(sizeData.packageSizes.entries()).sort(([, a], [, b]) => (b ?? Infinity) - (a ?? Infinity)).map( + ([pkg, size]) => `| ${pkg} | ${size === null ? "_Unknown_" : formatBytes(size)} |` + ).join("\n"); let alert = ""; if (threshold !== -1 && sizeData.totalSize >= threshold) { alert = `> [!WARNING] @@ -25294,9 +25306,9 @@ ${messages.join("\n\n")}`; }); core7.info("Created new dependency diff comment"); } - } catch (error2) { - if (error2 instanceof Error) { - core7.setFailed(error2.message); + } catch (error3) { + if (error3 instanceof Error) { + core7.setFailed(error3.message); } else { core7.setFailed("An unknown error occurred."); } diff --git a/src/checks/dependency-size.ts b/src/checks/dependency-size.ts index 903483a..8dcec76 100644 --- a/src/checks/dependency-size.ts +++ b/src/checks/dependency-size.ts @@ -119,8 +119,11 @@ export async function scanForDependencySize( if (shouldShow && sizeData !== null) { const packageRows = Array.from(sizeData.packageSizes.entries()) - .sort(([, a], [, b]) => b - a) - .map(([pkg, size]) => `| ${pkg} | ${formatBytes(size)} |`) + .sort(([, a], [, b]) => (b ?? Infinity) - (a ?? Infinity)) + .map( + ([pkg, size]) => + `| ${pkg} | ${size === null ? '_Unknown_' : formatBytes(size)} |` + ) .join('\n'); let alert = ''; diff --git a/src/npm.ts b/src/npm.ts index 068cba6..1f975b0 100644 --- a/src/npm.ts +++ b/src/npm.ts @@ -132,10 +132,13 @@ export async function fetchPackageMetadata( export async function calculateTotalDependencySizeIncrease( newVersions: Array<{name: string; version: string}>, removedVersions: Array<{name: string; version: string}> -): Promise<{totalSize: number; packageSizes: Map} | null> { +): Promise<{ + totalSize: number; + packageSizes: Map; +} | null> { let totalSize = 0; const processedPackages = new Set(); - const packageSizes = new Map(); + const packageSizes = new Map(); for (const dep of newVersions) { const packageKey = `${dep.name}@${dep.version}`; @@ -148,16 +151,21 @@ export async function calculateTotalDependencySizeIncrease( const metadata = await fetchPackageMetadata(dep.name, dep.version); if (!metadata || metadata.dist?.unpackedSize === undefined) { - return null; + packageSizes.set(packageKey, null); + core.info(`No unpacked size info for ${packageKey}, skipping`); + } else { + totalSize += metadata.dist.unpackedSize; + packageSizes.set(packageKey, metadata.dist.unpackedSize); + core.info( + `Added ${metadata.dist.unpackedSize} bytes for ${packageKey}` + ); } - - totalSize += metadata.dist.unpackedSize; - packageSizes.set(packageKey, metadata.dist.unpackedSize); processedPackages.add(packageKey); - - core.info(`Added ${metadata.dist.unpackedSize} bytes for ${packageKey}`); - } catch { - return null; + } catch (e) { + core.error( + `Error fetching package metadata for dep ${packageKey}: ` + + (e as Error).message + ); } } @@ -172,18 +180,21 @@ export async function calculateTotalDependencySizeIncrease( const metadata = await fetchPackageMetadata(dep.name, dep.version); if (!metadata || metadata.dist?.unpackedSize === undefined) { - return null; + packageSizes.set(packageKey, null); + core.info(`No unpacked size info for ${packageKey}, skipping`); + } else { + totalSize -= metadata.dist.unpackedSize; + packageSizes.set(packageKey, -metadata.dist.unpackedSize); + core.info( + `Subtracted ${metadata.dist.unpackedSize} bytes for ${packageKey}` + ); } - - totalSize -= metadata.dist.unpackedSize; - packageSizes.set(packageKey, -metadata.dist.unpackedSize); processedPackages.add(packageKey); - - core.info( - `Subtracted ${metadata.dist.unpackedSize} bytes for ${packageKey}` + } catch (e) { + core.error( + `Error fetching package metadata for dep ${packageKey}: ` + + (e as Error).message ); - } catch { - return null; } } diff --git a/test/npm_test.ts b/test/npm_test.ts index ee41dd3..99ee47d 100644 --- a/test/npm_test.ts +++ b/test/npm_test.ts @@ -19,7 +19,8 @@ import { getMinTrustLevel, getDependenciesFromPackageJson, type ProvenanceStatus, - type PackageMetadata + type PackageMetadata, + calculateTotalDependencySizeIncrease } from '../src/npm.js'; describe('fetchPackageMetadata', () => { @@ -72,6 +73,116 @@ describe('fetchPackageMetadata', () => { }); }); +const wrapMockMetadataResponse = (meta: unknown) => + new Response(JSON.stringify(meta), {status: 200}); + +describe('calculateTotalDependencySizeIncrease', () => { + let fetchMock: MockInstance; + let responseMap: Map; + + beforeEach(() => { + fetchMock = vi.spyOn(globalThis, 'fetch'); + responseMap = new Map(); + fetchMock.mockImplementation((url) => { + if (typeof url !== 'string') { + return Promise.resolve(new Response(null, {status: 404})); + } + return Promise.resolve( + responseMap.get(url) ?? new Response(null, {status: 404}) + ); + }); + }); + + afterEach(() => { + fetchMock.mockRestore(); + vi.clearAllMocks(); + }); + + it('returns 0 for empty version list', async () => { + const output = await calculateTotalDependencySizeIncrease([], []); + expect(output?.totalSize).toEqual(0); + expect(output?.packageSizes).toEqual(new Map()); + }); + + it('sums sizes of new-old versions correctly', async () => { + responseMap.set( + 'https://registry.npmjs.org/package-a/1.0.0', + wrapMockMetadataResponse({ + name: 'package-a', + version: '1.0.0', + dist: { + unpackedSize: 1500 + } + }) + ); + responseMap.set( + 'https://registry.npmjs.org/package-b/2.0.0', + wrapMockMetadataResponse({ + name: 'package-b', + version: '2.0.0', + dist: { + unpackedSize: 2500 + } + }) + ); + + const newVersions = [{name: 'package-a', version: '1.0.0'}]; + const removedVersions = [{name: 'package-b', version: '2.0.0'}]; + const output = await calculateTotalDependencySizeIncrease( + newVersions, + removedVersions + ); + expect(output?.totalSize).toEqual(-1000); + expect(output?.packageSizes).toEqual( + new Map([ + ['package-a@1.0.0', 1500], + ['package-b@2.0.0', -2500] + ]) + ); + }); + + it('handles missing unpackedSize gracefully', async () => { + responseMap.set( + 'https://registry.npmjs.org/package-a/1.0.0', + wrapMockMetadataResponse({ + name: 'package-a', + version: '1.0.0', + dist: { + unpackedSize: 1500 + } + }) + ); + responseMap.set( + 'https://registry.npmjs.org/package-c/1.0.0', + wrapMockMetadataResponse({ + name: 'package-c', + version: '1.0.0' + // No dist.unpackedSize + }) + ); + + const newVersions = [ + {name: 'package-a', version: '1.0.0'}, + {name: 'package-c', version: '1.0.0'} + ]; + const infoSpy = vi.mocked(core.info); + const output = await calculateTotalDependencySizeIncrease(newVersions, []); + expect(output?.totalSize).toEqual(1500); + expect(output?.packageSizes).toEqual( + new Map([ + ['package-a@1.0.0', 1500], + ['package-c@1.0.0', null] + ]) + ); + expect(infoSpy).toHaveBeenCalledWith( + 'Added 1500 bytes for package-a@1.0.0' + ); + expect(infoSpy).toHaveBeenCalledWith( + 'No unpacked size info for package-c@1.0.0, skipping' + ); + }); +}); + describe('getProvenance', () => { it('returns trusted-with-provenance for trusted publisher', () => { const meta: PackageMetadata = {