diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f85622fca..d833940e3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,7 +35,7 @@ jobs: name: Run CI Pipeline uses: SocketDev/socket-registry/.github/workflows/ci.yml@1a96ced97aaa85d61543351b90d6f463b983c46c # main with: - test-setup-script: 'pnpm --filter @socketsecurity/cli run build' + test-setup-script: 'pnpm --filter @socketsecurity/cli... run build' lint-script: 'pnpm --filter @socketsecurity/cli run check' type-check-script: 'pnpm --filter @socketsecurity/cli run type' run-test: false # Tests run in separate sharded job below. @@ -64,9 +64,8 @@ jobs: with: node-version: ${{ matrix.node-version }} - - name: Build CLI - working-directory: packages/cli - run: pnpm run build + - name: Build dependencies and CLI + run: pnpm --filter @socketsecurity/cli... run build - name: Run unit tests (shard ${{ matrix.shard }}) working-directory: packages/cli @@ -88,9 +87,8 @@ jobs: with: node-version: ${{ matrix.node-version }} - - name: Build CLI - working-directory: packages/cli - run: pnpm run build + - name: Build dependencies and CLI + run: pnpm --filter @socketsecurity/cli... run build - name: Generate cache keys for binary distributions id: cache-keys @@ -220,9 +218,8 @@ jobs: with: node-version: ${{ matrix.node-version }} - - name: Build CLI - working-directory: packages/cli - run: pnpm run build + - name: Build dependencies and CLI + run: pnpm --filter @socketsecurity/cli... run build - name: Run e2e tests working-directory: packages/cli diff --git a/package.json b/package.json index 609daf9d5..9dc95aa98 100644 --- a/package.json +++ b/package.json @@ -35,6 +35,8 @@ "@socketregistry/packageurl-js": "catalog:", "@socketregistry/yocto-spinner": "catalog:", "@socketsecurity/config": "catalog:", + "@socketsecurity/lib": "3.2.8", + "@socketsecurity/lib-external": "npm:@socketsecurity/lib@3.2.8", "@socketsecurity/registry": "catalog:", "@types/cmd-shim": "catalog:", "@types/ink": "catalog:", diff --git a/packages/bootstrap/.config/esbuild.smol.config.mjs b/packages/bootstrap/.config/esbuild.smol.config.mjs deleted file mode 100644 index e07a58a74..000000000 --- a/packages/bootstrap/.config/esbuild.smol.config.mjs +++ /dev/null @@ -1,52 +0,0 @@ -/** - * esbuild configuration for smol bootstrap. - * Transforms node:* requires to internal/* for Node.js internal bootstrap context. - */ - -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { build } from 'esbuild' -import semver from 'semver' - -import { unicodeTransformPlugin } from '@socketsecurity/build-infra/lib/esbuild-plugin-unicode-transform' - -import nodeVersionConfig from '../node-version.json' with { type: 'json' } -import socketPackageJson from '../../socket/package.json' with { type: 'json' } - -import { smolTransformPlugin } from './esbuild-plugin-smol-transform.mjs' - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const rootPath = path.resolve(__dirname, '..') - -const config = { - bundle: true, - define: { - __MIN_NODE_VERSION__: JSON.stringify(nodeVersionConfig.versionSemver), - __SOCKET_CLI_VERSION__: JSON.stringify(socketPackageJson.version), - __SOCKET_CLI_VERSION_MAJOR__: JSON.stringify(semver.major(socketPackageJson.version)), - }, - entryPoints: [path.join(rootPath, 'src', 'bootstrap-smol.mts')], - external: [], - format: 'cjs', - metafile: true, - minifyWhitespace: true, - minifyIdentifiers: true, - minifySyntax: false, - outfile: path.join(rootPath, 'dist', 'bootstrap-smol.js'), - platform: 'node', - plugins: [unicodeTransformPlugin(), smolTransformPlugin()], - target: 'node24', - treeShaking: true, - write: false, // Plugin needs to transform output. -} - -// Run build if invoked directly. -if (fileURLToPath(import.meta.url) === process.argv[1]) { - build(config).catch(error => { - console.error('smol bootstrap build failed:', error) - process.exitCode = 1 - }) -} - -export default config diff --git a/packages/bootstrap/package.json b/packages/bootstrap/package.json index 1eeac88f9..043b7bf95 100644 --- a/packages/bootstrap/package.json +++ b/packages/bootstrap/package.json @@ -7,7 +7,6 @@ ".": "./dist/index.js", "./bootstrap-npm.js": "./dist/bootstrap-npm.js", "./bootstrap-sea.js": "./dist/bootstrap-sea.js", - "./bootstrap-smol.js": "./dist/bootstrap-smol.js", "./node-version.json": "./node-version.json" }, "scripts": { @@ -22,7 +21,7 @@ "@babel/types": "catalog:", "@socketsecurity/build-infra": "workspace:*", "@socketsecurity/cli": "workspace:*", - "@socketsecurity/lib": "workspace:*", + "@socketsecurity/lib-internal": "workspace:*", "del-cli": "catalog:", "esbuild": "catalog:", "magic-string": "catalog:", diff --git a/packages/bootstrap/src/bootstrap-smol.mts b/packages/bootstrap/src/bootstrap-smol.mts deleted file mode 100644 index 0f69a91f7..000000000 --- a/packages/bootstrap/src/bootstrap-smol.mts +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Bootstrap for Socket CLI smol binary. - * - * This runs inside the smol Node.js binary via internal bootstrap. - * Uses Node.js internal/* requires (transformed by esbuild plugin). - * - * The smol binary loads this at startup via lib/internal/process/pre_execution.js. - */ - -// Load Intl polyfill FIRST for ICU-disabled builds (smol Node.js). -import '@socketsecurity/cli/src/polyfills/intl-stub/index.mts' - -import { findAndExecuteCli, getArgs, SOCKET_CLI_VERSION } from './shared/bootstrap-shared.mjs' - -/** - * Check if we should skip CLI bootstrap. - * Returns true for build tests or when showing version (no download needed). - */ -function shouldSkipCliBootstrap() { - // Skip if this is a build smoke test (binary verification during compilation). - // The CLI version doesn't exist on npm yet during build, so we can't download it. - if (process.env.SOCKET_CLI_BUILD_TEST === '1' || - process.env.SOCKET_CLI_BUILD_TEST === 'true') { - return true - } - - // Skip if user just wants to see version (--version or -v). - // No need to download CLI just to show Node.js version. - const args = getArgs() - if (args.includes('--version') || args.includes('-v')) { - return true - } - - return false -} - -async function main() { - const args = getArgs() - - // Check if user wants to see version. - if (args.includes('--version') || args.includes('-v')) { - console.log(SOCKET_CLI_VERSION) - return 0 - } - - // Skip bootstrap if we're in a build/test environment. - // During smol binary compilation, smoke tests verify Node.js works, - // but the CLI version doesn't exist on npm yet, so we can't download it. - if (shouldSkipCliBootstrap()) { - // Let Node.js continue with its normal execution. - return 0 - } - - return await findAndExecuteCli(args) -} - -// Run the bootstrap. -main() - .then((exitCode) => { - // Exit with the code returned by the CLI (or 0 if bootstrap was skipped). - if (exitCode !== 0) { - process.exit(exitCode) - } - }) - .catch((e) => { - // Use process.stderr.write() directly to avoid console access during early bootstrap. - const errorMsg = e instanceof Error ? e.message : String(e) - process.stderr.write(`Bootstrap error: ${errorMsg}\n`) - process.exit(1) - }) diff --git a/packages/bootstrap/src/index.mts b/packages/bootstrap/src/index.mts index 3e4ed681b..20a6ba53f 100644 --- a/packages/bootstrap/src/index.mts +++ b/packages/bootstrap/src/index.mts @@ -56,16 +56,6 @@ module.exports = { return loadBootstrap('bootstrap-sea.js') }, - /** - * Load smol bootstrap (for smol Node.js binary - also available compressed). - * Note: This is also embedded in base64 in the Node.js binary via - * packages/node-smol-builder, but providing compressed version for consistency. - * @returns {any} Bootstrap exports - */ - loadSmolBootstrap() { - return loadBootstrap('bootstrap-smol.js') - }, - // Re-export the loader function for custom usage. loadBootstrap, } diff --git a/packages/build-infra/lib/build-helpers.mjs b/packages/build-infra/lib/build-helpers.mjs index b1dd4fc2d..a55d8e68a 100644 --- a/packages/build-infra/lib/build-helpers.mjs +++ b/packages/build-infra/lib/build-helpers.mjs @@ -8,9 +8,9 @@ import { promises as fs } from 'node:fs' import path from 'node:path' -import binPkg from '@socketsecurity/lib/bin' -import platformPkg from '@socketsecurity/lib/constants/platform' -import spawnPkg from '@socketsecurity/lib/spawn' +import binPkg from '@socketsecurity/lib-external/bin' +import platformPkg from '@socketsecurity/lib-external/constants/platform' +import spawnPkg from '@socketsecurity/lib-external/spawn' const { whichBinSync } = binPkg const { WIN32 } = platformPkg diff --git a/packages/build-infra/lib/build-output.mjs b/packages/build-infra/lib/build-output.mjs index d11269a29..3874fb6ca 100644 --- a/packages/build-infra/lib/build-output.mjs +++ b/packages/build-infra/lib/build-output.mjs @@ -4,7 +4,7 @@ * Provides consistent, pretty logging for build processes. */ -import loggerPkg from '@socketsecurity/lib/logger' +import loggerPkg from '@socketsecurity/lib-external/logger' const logger = loggerPkg.getDefaultLogger() diff --git a/packages/build-infra/lib/cmake-builder.mjs b/packages/build-infra/lib/cmake-builder.mjs index d10afed5b..bfc76047b 100644 --- a/packages/build-infra/lib/cmake-builder.mjs +++ b/packages/build-infra/lib/cmake-builder.mjs @@ -6,8 +6,8 @@ import { cpus } from 'node:os' -import platformPkg from '@socketsecurity/lib/constants/platform' -import spawnPkg from '@socketsecurity/lib/spawn' +import platformPkg from '@socketsecurity/lib-external/constants/platform' +import spawnPkg from '@socketsecurity/lib-external/spawn' const { WIN32 } = platformPkg const { spawn } = spawnPkg diff --git a/packages/build-infra/lib/emscripten-builder.mjs b/packages/build-infra/lib/emscripten-builder.mjs index 121cae937..e4dd85c74 100644 --- a/packages/build-infra/lib/emscripten-builder.mjs +++ b/packages/build-infra/lib/emscripten-builder.mjs @@ -7,8 +7,8 @@ import { cpus } from 'node:os' import path from 'node:path' -import platformPkg from '@socketsecurity/lib/constants/platform' -import spawnPkg from '@socketsecurity/lib/spawn' +import platformPkg from '@socketsecurity/lib-external/constants/platform' +import spawnPkg from '@socketsecurity/lib-external/spawn' const { WIN32 } = platformPkg const { spawn } = spawnPkg diff --git a/packages/build-infra/lib/extraction-cache.mjs b/packages/build-infra/lib/extraction-cache.mjs index 886529943..a205e0f46 100644 --- a/packages/build-infra/lib/extraction-cache.mjs +++ b/packages/build-infra/lib/extraction-cache.mjs @@ -11,7 +11,7 @@ import { createHash } from 'node:crypto' import { existsSync, mkdirSync, readFileSync } from 'node:fs' import path from 'node:path' -import loggerPkg from '@socketsecurity/lib/logger' +import loggerPkg from '@socketsecurity/lib-external/logger' const { getDefaultLogger } = loggerPkg /** diff --git a/packages/build-infra/lib/fetch-with-retry.mjs b/packages/build-infra/lib/fetch-with-retry.mjs index 42623045c..e87ea4ab2 100644 --- a/packages/build-infra/lib/fetch-with-retry.mjs +++ b/packages/build-infra/lib/fetch-with-retry.mjs @@ -3,7 +3,7 @@ * Automatically retries on network errors and 5xx server errors. */ -import loggerPkg from '@socketsecurity/lib/logger' +import loggerPkg from '@socketsecurity/lib-external/logger' const { getDefaultLogger } = loggerPkg /** diff --git a/packages/build-infra/lib/patch-validator.mjs b/packages/build-infra/lib/patch-validator.mjs index 0196ecd4f..4ebb17f48 100644 --- a/packages/build-infra/lib/patch-validator.mjs +++ b/packages/build-infra/lib/patch-validator.mjs @@ -7,8 +7,8 @@ import { promises as fs } from 'node:fs' import path from 'node:path' -import platformPkg from '@socketsecurity/lib/constants/platform' -import spawnPkg from '@socketsecurity/lib/spawn' +import platformPkg from '@socketsecurity/lib-external/constants/platform' +import spawnPkg from '@socketsecurity/lib-external/spawn' const { WIN32 } = platformPkg const { spawn } = spawnPkg diff --git a/packages/build-infra/lib/preflight-checks.mjs b/packages/build-infra/lib/preflight-checks.mjs index e737f550a..99193c408 100644 --- a/packages/build-infra/lib/preflight-checks.mjs +++ b/packages/build-infra/lib/preflight-checks.mjs @@ -3,7 +3,7 @@ * Provides a DRY way to run common pre-build validation checks. */ -import loggerPkg from '@socketsecurity/lib/logger' +import loggerPkg from '@socketsecurity/lib-external/logger' const { getDefaultLogger } = loggerPkg import { printError, printSuccess } from './build-output.mjs' diff --git a/packages/build-infra/lib/rust-builder.mjs b/packages/build-infra/lib/rust-builder.mjs index 7c0c575a8..33149598b 100644 --- a/packages/build-infra/lib/rust-builder.mjs +++ b/packages/build-infra/lib/rust-builder.mjs @@ -7,8 +7,8 @@ import { cpus } from 'node:os' import path from 'node:path' -import platformPkg from '@socketsecurity/lib/constants/platform' -import spawnPkg from '@socketsecurity/lib/spawn' +import platformPkg from '@socketsecurity/lib-external/constants/platform' +import spawnPkg from '@socketsecurity/lib-external/spawn' const { WIN32 } = platformPkg const { spawn } = spawnPkg diff --git a/packages/build-infra/lib/script-runner.mjs b/packages/build-infra/lib/script-runner.mjs index 2803b6bbb..63da5b5dc 100644 --- a/packages/build-infra/lib/script-runner.mjs +++ b/packages/build-infra/lib/script-runner.mjs @@ -3,9 +3,9 @@ * Provides DRY helpers for running pnpm scripts, commands, and sequences. */ -import platformPkg from '@socketsecurity/lib/constants/platform' -import loggerPkg from '@socketsecurity/lib/logger' -import spawnPkg from '@socketsecurity/lib/spawn' +import platformPkg from '@socketsecurity/lib-external/constants/platform' +import loggerPkg from '@socketsecurity/lib-external/logger' +import spawnPkg from '@socketsecurity/lib-external/spawn' const { WIN32 } = platformPkg const { getDefaultLogger } = loggerPkg diff --git a/packages/build-infra/lib/tool-installer.mjs b/packages/build-infra/lib/tool-installer.mjs index 404a4be78..9e47b30b9 100644 --- a/packages/build-infra/lib/tool-installer.mjs +++ b/packages/build-infra/lib/tool-installer.mjs @@ -5,9 +5,9 @@ * using platform-specific package managers (brew, apt, choco, etc.). */ -import binPkg from '@socketsecurity/lib/bin' -import platformPkg from '@socketsecurity/lib/constants/platform' -import spawnPkg from '@socketsecurity/lib/spawn' +import binPkg from '@socketsecurity/lib-external/bin' +import platformPkg from '@socketsecurity/lib-external/constants/platform' +import spawnPkg from '@socketsecurity/lib-external/spawn' const { whichBinSync } = binPkg const { WIN32 } = platformPkg diff --git a/packages/build-infra/package.json b/packages/build-infra/package.json index 862c84659..73a06b856 100644 --- a/packages/build-infra/package.json +++ b/packages/build-infra/package.json @@ -25,7 +25,7 @@ "dependencies": { "@babel/parser": "catalog:", "@babel/traverse": "catalog:", - "@socketsecurity/lib": "workspace:*", + "@socketsecurity/lib-internal": "workspace:*", "magic-string": "catalog:" } } diff --git a/packages/cli/package.json b/packages/cli/package.json index 6f0b8bad0..89de444e1 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -75,7 +75,7 @@ "@coana-tech/cli": "catalog:", "@gitbeaker/rest": "catalog:", "@socketsecurity/build-infra": "workspace:*", - "@socketsecurity/lib": "workspace:*", + "@socketsecurity/lib-internal": "workspace:*", "@socketsecurity/sdk": "workspace:*", "ajv-dist": "catalog:", "compromise": "catalog:", diff --git a/packages/cli/scripts/build.mjs b/packages/cli/scripts/build.mjs index 641668a77..9fafb5b8d 100644 --- a/packages/cli/scripts/build.mjs +++ b/packages/cli/scripts/build.mjs @@ -16,9 +16,9 @@ import { promises as fs } from 'node:fs' import path from 'node:path' import { fileURLToPath } from 'node:url' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' const logger = getDefaultLogger() diff --git a/packages/cli/scripts/check.mjs b/packages/cli/scripts/check.mjs index e821d89d5..be121da94 100644 --- a/packages/cli/scripts/check.mjs +++ b/packages/cli/scripts/check.mjs @@ -3,11 +3,11 @@ * Runs code quality checks: ESLint and TypeScript type checking. */ -import { parseArgs } from '@socketsecurity/lib/argv/parse' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' -import { printFooter, printHeader } from '@socketsecurity/lib/stdio/header' +import { parseArgs } from '@socketsecurity/lib-external/argv/parse' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' +import { printFooter, printHeader } from '@socketsecurity/lib-external/stdio/header' const logger = getDefaultLogger() diff --git a/packages/cli/scripts/claude.mjs b/packages/cli/scripts/claude.mjs index 9f75b5293..c18694085 100644 --- a/packages/cli/scripts/claude.mjs +++ b/packages/cli/scripts/claude.mjs @@ -18,9 +18,9 @@ import { fileURLToPath } from 'node:url' import colors from 'yoctocolors-cjs' -import { parseArgs } from '@socketsecurity/lib/argv/parse' -import { safeDelete } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { parseArgs } from '@socketsecurity/lib-external/argv/parse' +import { safeDelete } from '@socketsecurity/lib-external/fs' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' const logger = getDefaultLogger() diff --git a/packages/cli/scripts/compress-cli.mjs b/packages/cli/scripts/compress-cli.mjs index af9fae085..6d04be226 100644 --- a/packages/cli/scripts/compress-cli.mjs +++ b/packages/cli/scripts/compress-cli.mjs @@ -12,7 +12,7 @@ import path from 'node:path' import { fileURLToPath } from 'node:url' import { brotliCompressSync } from 'node:zlib' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' const __dirname = path.dirname(fileURLToPath(import.meta.url)) const rootPath = path.join(__dirname, '..') diff --git a/packages/cli/scripts/cover.mjs b/packages/cli/scripts/cover.mjs index 66bbff6d6..fa215a0b2 100644 --- a/packages/cli/scripts/cover.mjs +++ b/packages/cli/scripts/cover.mjs @@ -14,11 +14,11 @@ * --summary Show only coverage summary (hide detailed output) */ -import { isQuiet, isVerbose } from '@socketsecurity/lib/argv/flags' -import { parseArgs } from '@socketsecurity/lib/argv/parse' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { isQuiet, isVerbose } from '@socketsecurity/lib-external/argv/flags' +import { parseArgs } from '@socketsecurity/lib-external/argv/parse' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' const logger = getDefaultLogger() diff --git a/packages/cli/scripts/e2e.mjs b/packages/cli/scripts/e2e.mjs index 17b730c0e..023f870bc 100644 --- a/packages/cli/scripts/e2e.mjs +++ b/packages/cli/scripts/e2e.mjs @@ -15,9 +15,9 @@ import { fileURLToPath } from 'node:url' import colors from 'yoctocolors-cjs' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' const __dirname = path.dirname(fileURLToPath(import.meta.url)) const ROOT_DIR = path.resolve(__dirname, '..') diff --git a/packages/cli/scripts/esbuild.config.mjs b/packages/cli/scripts/esbuild.config.mjs index e7345e2e4..1284775f2 100644 --- a/packages/cli/scripts/esbuild.config.mjs +++ b/packages/cli/scripts/esbuild.config.mjs @@ -7,7 +7,7 @@ import { brotliCompressSync } from 'node:zlib' import { build } from 'esbuild' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' import config from './esbuild.cli.config.mjs' diff --git a/packages/cli/scripts/extract-onnx-runtime.mjs b/packages/cli/scripts/extract-onnx-runtime.mjs index c72881769..1b9f85562 100644 --- a/packages/cli/scripts/extract-onnx-runtime.mjs +++ b/packages/cli/scripts/extract-onnx-runtime.mjs @@ -15,7 +15,7 @@ import { generateHashComment, shouldExtract, } from '@socketsecurity/build-infra/lib/extraction-cache' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' const __dirname = path.dirname(fileURLToPath(import.meta.url)) const rootPath = path.join(__dirname, '..') diff --git a/packages/cli/scripts/extract-yoga-wasm.mjs b/packages/cli/scripts/extract-yoga-wasm.mjs index 89d33c2b2..f15960cdc 100644 --- a/packages/cli/scripts/extract-yoga-wasm.mjs +++ b/packages/cli/scripts/extract-yoga-wasm.mjs @@ -15,7 +15,7 @@ import { generateHashComment, shouldExtract, } from '@socketsecurity/build-infra/lib/extraction-cache' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' const __dirname = path.dirname(fileURLToPath(import.meta.url)) const rootPath = path.join(__dirname, '..') diff --git a/packages/cli/scripts/fix.mjs b/packages/cli/scripts/fix.mjs index 849730a2f..5032676bc 100644 --- a/packages/cli/scripts/fix.mjs +++ b/packages/cli/scripts/fix.mjs @@ -13,12 +13,12 @@ * --verbose Show detailed output */ -import { isQuiet } from '@socketsecurity/lib/argv/flags' -import { parseArgs } from '@socketsecurity/lib/argv/parse' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' -import { printHeader } from '@socketsecurity/lib/stdio/header' +import { isQuiet } from '@socketsecurity/lib-external/argv/flags' +import { parseArgs } from '@socketsecurity/lib-external/argv/parse' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' +import { printHeader } from '@socketsecurity/lib-external/stdio/header' const logger = getDefaultLogger() diff --git a/packages/cli/scripts/integration.mjs b/packages/cli/scripts/integration.mjs index 26ec920ae..2580ac26f 100644 --- a/packages/cli/scripts/integration.mjs +++ b/packages/cli/scripts/integration.mjs @@ -15,9 +15,9 @@ import { fileURLToPath } from 'node:url' import colors from 'yoctocolors-cjs' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' const logger = getDefaultLogger() const __dirname = path.dirname(fileURLToPath(import.meta.url)) diff --git a/packages/cli/scripts/lint.mjs b/packages/cli/scripts/lint.mjs index a26f7034e..60a18ca7d 100644 --- a/packages/cli/scripts/lint.mjs +++ b/packages/cli/scripts/lint.mjs @@ -6,13 +6,13 @@ import { existsSync, readFileSync } from 'node:fs' import path from 'node:path' -import { isQuiet } from '@socketsecurity/lib/argv/flags' -import { parseArgs } from '@socketsecurity/lib/argv/parse' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getChangedFiles, getStagedFiles } from '@socketsecurity/lib/git' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' -import { printHeader } from '@socketsecurity/lib/stdio/header' +import { isQuiet } from '@socketsecurity/lib-external/argv/flags' +import { parseArgs } from '@socketsecurity/lib-external/argv/parse' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getChangedFiles, getStagedFiles } from '@socketsecurity/lib-external/git' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' +import { printHeader } from '@socketsecurity/lib-external/stdio/header' const logger = getDefaultLogger() diff --git a/packages/cli/scripts/test-wrapper.mjs b/packages/cli/scripts/test-wrapper.mjs index 0b8630075..34b12e8e0 100644 --- a/packages/cli/scripts/test-wrapper.mjs +++ b/packages/cli/scripts/test-wrapper.mjs @@ -13,8 +13,8 @@ import path from 'node:path' import fastGlob from 'fast-glob' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' import constants from './constants.mjs' diff --git a/packages/cli/scripts/update.mjs b/packages/cli/scripts/update.mjs index 5c83b797e..7cb68bc2a 100644 --- a/packages/cli/scripts/update.mjs +++ b/packages/cli/scripts/update.mjs @@ -12,14 +12,14 @@ */ import { runParallel } from '@socketsecurity/build-infra/lib/script-runner' -import { isQuiet, isVerbose } from '@socketsecurity/lib/argv/flags' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { isQuiet, isVerbose } from '@socketsecurity/lib-external/argv/flags' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' import { printError, printFooter, printHeader, printSuccess, -} from '@socketsecurity/lib/stdio/header' +} from '@socketsecurity/lib-external/stdio/header' async function main() { const quiet = isQuiet() diff --git a/packages/cli/scripts/validate-tests.mjs b/packages/cli/scripts/validate-tests.mjs index 91b23a52a..6a0eedb39 100644 --- a/packages/cli/scripts/validate-tests.mjs +++ b/packages/cli/scripts/validate-tests.mjs @@ -4,8 +4,8 @@ import { existsSync, promises as fs } from 'node:fs' import path from 'node:path' import { fileURLToPath } from 'node:url' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pEach } from '@socketsecurity/lib/promises' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { pEach } from '@socketsecurity/lib-external/promises' import constants from './constants.mjs' diff --git a/packages/cli/scripts/verify-package.mjs b/packages/cli/scripts/verify-package.mjs index e6d49ac21..18bb02519 100644 --- a/packages/cli/scripts/verify-package.mjs +++ b/packages/cli/scripts/verify-package.mjs @@ -5,7 +5,7 @@ import { fileURLToPath } from 'node:url' import colors from 'yoctocolors-cjs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' const __filename = fileURLToPath(import.meta.url) const __dirname = path.dirname(__filename) diff --git a/packages/cli/scripts/wasm.mjs b/packages/cli/scripts/wasm.mjs index 4c23c4e51..ff8ce5b6b 100644 --- a/packages/cli/scripts/wasm.mjs +++ b/packages/cli/scripts/wasm.mjs @@ -21,8 +21,8 @@ import { existsSync, promises as fs } from 'node:fs' import path from 'node:path' import { fileURLToPath } from 'node:url' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' const __dirname = path.dirname(fileURLToPath(import.meta.url)) const rootPath = path.join(__dirname, '..') diff --git a/packages/cli/src/cli-dispatch.mts b/packages/cli/src/cli-dispatch.mts index 39263a7b6..6962526c0 100644 --- a/packages/cli/src/cli-dispatch.mts +++ b/packages/cli/src/cli-dispatch.mts @@ -18,7 +18,7 @@ import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { waitForBootstrapHandshake } from './utils/sea/boot.mjs' diff --git a/packages/cli/src/commands/analytics/analytics-app-cli.mts b/packages/cli/src/commands/analytics/analytics-app-cli.mts index f299fcd68..10213cb9f 100644 --- a/packages/cli/src/commands/analytics/analytics-app-cli.mts +++ b/packages/cli/src/commands/analytics/analytics-app-cli.mts @@ -6,7 +6,7 @@ import { pathToFileURL } from 'node:url' import { render } from 'ink' import React from 'react' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/analytics/cmd-analytics.mts b/packages/cli/src/commands/analytics/cmd-analytics.mts index 9fc4eb3df..75db064e9 100644 --- a/packages/cli/src/commands/analytics/cmd-analytics.mts +++ b/packages/cli/src/commands/analytics/cmd-analytics.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleAnalytics } from './handle-analytics.mts' import { diff --git a/packages/cli/src/commands/analytics/output-analytics.mts b/packages/cli/src/commands/analytics/output-analytics.mts index 60659f60d..aa7951cd6 100644 --- a/packages/cli/src/commands/analytics/output-analytics.mts +++ b/packages/cli/src/commands/analytics/output-analytics.mts @@ -1,6 +1,6 @@ import fs from 'node:fs/promises' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { debugFileOp } from '../../utils/debug.mts' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/audit-log/audit-log-app-cli.mts b/packages/cli/src/commands/audit-log/audit-log-app-cli.mts index 557bf676d..92c8ee4d4 100644 --- a/packages/cli/src/commands/audit-log/audit-log-app-cli.mts +++ b/packages/cli/src/commands/audit-log/audit-log-app-cli.mts @@ -6,7 +6,7 @@ import { pathToFileURL } from 'node:url' import { render } from 'ink' import React from 'react' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/audit-log/cmd-audit-log.mts b/packages/cli/src/commands/audit-log/cmd-audit-log.mts index a265e385d..18cc88c61 100644 --- a/packages/cli/src/commands/audit-log/cmd-audit-log.mts +++ b/packages/cli/src/commands/audit-log/cmd-audit-log.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleAuditLog } from './handle-audit-log.mts' import { diff --git a/packages/cli/src/commands/audit-log/output-audit-log.mts b/packages/cli/src/commands/audit-log/output-audit-log.mts index ec239fce5..b4597757f 100644 --- a/packages/cli/src/commands/audit-log/output-audit-log.mts +++ b/packages/cli/src/commands/audit-log/output-audit-log.mts @@ -1,5 +1,5 @@ -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { FLAG_JSON, diff --git a/packages/cli/src/commands/ci/fetch-default-org-slug.mts b/packages/cli/src/commands/ci/fetch-default-org-slug.mts index 2a2336ae8..45e2e5a7e 100644 --- a/packages/cli/src/commands/ci/fetch-default-org-slug.mts +++ b/packages/cli/src/commands/ci/fetch-default-org-slug.mts @@ -1,4 +1,4 @@ -import { debug } from '@socketsecurity/lib/debug' +import { debug } from '@socketsecurity/lib-internal/debug' import ENV from '../../constants/env.mts' import { getConfigValueOrUndef } from '../../utils/config.mts' diff --git a/packages/cli/src/commands/ci/handle-ci.mts b/packages/cli/src/commands/ci/handle-ci.mts index a441b18ec..0811d8db1 100644 --- a/packages/cli/src/commands/ci/handle-ci.mts +++ b/packages/cli/src/commands/ci/handle-ci.mts @@ -1,5 +1,5 @@ -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { getDefaultOrgSlug } from './fetch-default-org-slug.mts' import { REPORT_LEVEL_ERROR } from '../../constants/reporting.mts' diff --git a/packages/cli/src/commands/console/handle-console.mts b/packages/cli/src/commands/console/handle-console.mts index 06592a745..6d5d040e3 100644 --- a/packages/cli/src/commands/console/handle-console.mts +++ b/packages/cli/src/commands/console/handle-console.mts @@ -2,7 +2,7 @@ import { render } from 'ink' import { createElement } from 'react' import colors from 'yoctocolors-cjs' -import { spawn } from '@socketsecurity/lib/spawn' +import { spawn } from '@socketsecurity/lib-internal/spawn' import { createFileDiff, diff --git a/packages/cli/src/commands/fix/cmd-fix.mts b/packages/cli/src/commands/fix/cmd-fix.mts index 65fc3a7bd..68828160d 100644 --- a/packages/cli/src/commands/fix/cmd-fix.mts +++ b/packages/cli/src/commands/fix/cmd-fix.mts @@ -2,8 +2,8 @@ import path from 'node:path' import terminalLink from 'terminal-link' -import { arrayUnique, joinOr } from '@socketsecurity/lib/arrays' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { arrayUnique, joinOr } from '@socketsecurity/lib-internal/arrays' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleFix } from './handle-fix.mts' import { DRY_RUN_NOT_SAVING, FLAG_ID } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/fix/coana-fix.mts b/packages/cli/src/commands/fix/coana-fix.mts index a4b2afae4..f3956e652 100644 --- a/packages/cli/src/commands/fix/coana-fix.mts +++ b/packages/cli/src/commands/fix/coana-fix.mts @@ -2,11 +2,11 @@ import { promises as fs } from 'node:fs' import os from 'node:os' import path from 'node:path' -import { joinAnd } from '@socketsecurity/lib/arrays' -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { readJsonSync } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { readJsonSync } from '@socketsecurity/lib-internal/fs' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { checkCiEnvVars, diff --git a/packages/cli/src/commands/fix/env-helpers.mts b/packages/cli/src/commands/fix/env-helpers.mts index 74450a198..4a2efb52b 100644 --- a/packages/cli/src/commands/fix/env-helpers.mts +++ b/packages/cli/src/commands/fix/env-helpers.mts @@ -1,6 +1,6 @@ -import { joinAnd } from '@socketsecurity/lib/arrays' -import { debug, isDebug } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { debug, isDebug } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { getSocketFixPrs } from './pull-request.mts' import ENV from '../../constants/env.mts' diff --git a/packages/cli/src/commands/fix/ghsa-tracker.mts b/packages/cli/src/commands/fix/ghsa-tracker.mts index 2911edb33..f82b31f2e 100644 --- a/packages/cli/src/commands/fix/ghsa-tracker.mts +++ b/packages/cli/src/commands/fix/ghsa-tracker.mts @@ -1,7 +1,7 @@ import path from 'node:path' -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { readJson, safeMkdir, writeJson } from '@socketsecurity/lib/fs' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { readJson, safeMkdir, writeJson } from '@socketsecurity/lib-internal/fs' import { getSocketFixBranchName } from './git.mts' diff --git a/packages/cli/src/commands/fix/git.mts b/packages/cli/src/commands/fix/git.mts index d188dd5f4..ab08ae5e2 100644 --- a/packages/cli/src/commands/fix/git.mts +++ b/packages/cli/src/commands/fix/git.mts @@ -1,4 +1,4 @@ -import { joinAnd } from '@socketsecurity/lib/arrays' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' import { SOCKET_WEBSITE_URL } from '../../constants/socket.mts' diff --git a/packages/cli/src/commands/fix/handle-fix.mts b/packages/cli/src/commands/fix/handle-fix.mts index 115fb3a5d..ae2f79875 100644 --- a/packages/cli/src/commands/fix/handle-fix.mts +++ b/packages/cli/src/commands/fix/handle-fix.mts @@ -1,6 +1,6 @@ -import { joinAnd } from '@socketsecurity/lib/arrays' -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { coanaFix } from './coana-fix.mts' import { outputFixResult } from './output-fix-result.mts' @@ -9,7 +9,7 @@ import { convertPurlToGhsas } from '../../utils/purl/to-ghsa.mts' import type { FixConfig } from './types.mts' import type { OutputKind } from '../../types.mts' -import type { Remap } from '@socketsecurity/lib/objects' +import type { Remap } from '@socketsecurity/lib-internal/objects' const logger = getDefaultLogger() const GHSA_FORMAT_REGEXP = /^GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}$/ diff --git a/packages/cli/src/commands/fix/output-fix-result.mts b/packages/cli/src/commands/fix/output-fix-result.mts index 63fc1a698..7fbde3e22 100644 --- a/packages/cli/src/commands/fix/output-fix-result.mts +++ b/packages/cli/src/commands/fix/output-fix-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdError, mdHeader } from '../../utils/output/markdown.mts' diff --git a/packages/cli/src/commands/fix/pr-lifecycle-logger.mts b/packages/cli/src/commands/fix/pr-lifecycle-logger.mts index 5c2bdba03..fd9b83f96 100644 --- a/packages/cli/src/commands/fix/pr-lifecycle-logger.mts +++ b/packages/cli/src/commands/fix/pr-lifecycle-logger.mts @@ -1,6 +1,6 @@ import colors from 'yoctocolors-cjs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' const logger = getDefaultLogger() export type PrLifecycleEvent = diff --git a/packages/cli/src/commands/fix/pull-request.mts b/packages/cli/src/commands/fix/pull-request.mts index 779aadf39..1bab2a7ad 100644 --- a/packages/cli/src/commands/fix/pull-request.mts +++ b/packages/cli/src/commands/fix/pull-request.mts @@ -1,6 +1,6 @@ -import { UNKNOWN_VALUE } from '@socketsecurity/lib/constants/core' -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { isNonEmptyString } from '@socketsecurity/lib/strings' +import { UNKNOWN_VALUE } from '@socketsecurity/lib-internal/constants/core' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { isNonEmptyString } from '@socketsecurity/lib-internal/strings' import { getSocketFixBranchPattern, @@ -26,7 +26,7 @@ import { import { createPrProvider } from '../../utils/git/provider-factory.mts' import type { OctokitResponse } from '@octokit/types' -import type { JsonContent } from '@socketsecurity/lib/fs' +import type { JsonContent } from '@socketsecurity/lib-internal/fs' export type OpenSocketFixPrOptions = { baseBranch?: string | undefined diff --git a/packages/cli/src/commands/fix/types.mts b/packages/cli/src/commands/fix/types.mts index de125eba0..02fdde02b 100644 --- a/packages/cli/src/commands/fix/types.mts +++ b/packages/cli/src/commands/fix/types.mts @@ -1,6 +1,6 @@ import type { OutputKind } from '../../types.mts' import type { RangeStyle } from '../../utils/semver.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' export type FixConfig = { applyFixes: boolean diff --git a/packages/cli/src/commands/json/output-cmd-json.mts b/packages/cli/src/commands/json/output-cmd-json.mts index 09a8c7e87..d12f2d8aa 100644 --- a/packages/cli/src/commands/json/output-cmd-json.mts +++ b/packages/cli/src/commands/json/output-cmd-json.mts @@ -1,8 +1,8 @@ import { existsSync } from 'node:fs' import path from 'node:path' -import { safeReadFileSync, safeStatsSync } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { safeReadFileSync, safeStatsSync } from '@socketsecurity/lib-internal/fs' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { REDACTED } from '../../constants/cli.mts' import ENV from '../../constants/env.mts' diff --git a/packages/cli/src/commands/login/attempt-login.mts b/packages/cli/src/commands/login/attempt-login.mts index be38b4fee..11f3c5ab4 100644 --- a/packages/cli/src/commands/login/attempt-login.mts +++ b/packages/cli/src/commands/login/attempt-login.mts @@ -1,7 +1,7 @@ -import { joinAnd } from '@socketsecurity/lib/arrays' -import { SOCKET_PUBLIC_API_TOKEN } from '@socketsecurity/lib/constants/socket' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { confirm, password, select } from '@socketsecurity/lib/stdio/prompts' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { SOCKET_PUBLIC_API_TOKEN } from '@socketsecurity/lib-internal/constants/socket' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { confirm, password, select } from '@socketsecurity/lib-internal/stdio/prompts' import { applyLogin } from './apply-login.mts' import { @@ -22,7 +22,7 @@ import { socketDocsLink } from '../../utils/terminal/link.mts' import { setupTabCompletion } from '../install/setup-tab-completion.mts' import { fetchOrganization } from '../organization/fetch-organization-list.mts' -import type { Choice } from '@socketsecurity/lib/stdio/prompts' +import type { Choice } from '@socketsecurity/lib-internal/stdio/prompts' const logger = getDefaultLogger() type OrgChoice = Choice diff --git a/packages/cli/src/commands/login/cmd-login.mts b/packages/cli/src/commands/login/cmd-login.mts index c5552a1ff..5c1071724 100644 --- a/packages/cli/src/commands/login/cmd-login.mts +++ b/packages/cli/src/commands/login/cmd-login.mts @@ -1,5 +1,5 @@ import isInteractive from '@socketregistry/is-interactive/index.cjs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { attemptLogin } from './attempt-login.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/logout/attempt-logout.mts b/packages/cli/src/commands/logout/attempt-logout.mts index 109782907..55a99f3c5 100644 --- a/packages/cli/src/commands/logout/attempt-logout.mts +++ b/packages/cli/src/commands/logout/attempt-logout.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { applyLogout } from './apply-logout.mts' import { isConfigFromFlag } from '../../utils/config.mts' diff --git a/packages/cli/src/commands/logout/cmd-logout.mts b/packages/cli/src/commands/logout/cmd-logout.mts index d3e122275..84b5856c3 100644 --- a/packages/cli/src/commands/logout/cmd-logout.mts +++ b/packages/cli/src/commands/logout/cmd-logout.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { attemptLogout } from './attempt-logout.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/manifest/cmd-manifest-auto.mts b/packages/cli/src/commands/manifest/cmd-manifest-auto.mts index c1ef9b9e6..487040613 100644 --- a/packages/cli/src/commands/manifest/cmd-manifest-auto.mts +++ b/packages/cli/src/commands/manifest/cmd-manifest-auto.mts @@ -1,7 +1,7 @@ import path from 'node:path' -import { debugDir } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { debugDir } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/manifest/cmd-manifest-cdxgen.mts b/packages/cli/src/commands/manifest/cmd-manifest-cdxgen.mts index eb6a45e0b..ad01b8762 100644 --- a/packages/cli/src/commands/manifest/cmd-manifest-cdxgen.mts +++ b/packages/cli/src/commands/manifest/cmd-manifest-cdxgen.mts @@ -1,10 +1,10 @@ import terminalLink from 'terminal-link' import yargsParse from 'yargs-parser' -import { joinAnd } from '@socketsecurity/lib/arrays' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { isPath } from '@socketsecurity/lib/path' -import { pluralize } from '@socketsecurity/lib/words' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { isPath } from '@socketsecurity/lib-internal/path' +import { pluralize } from '@socketsecurity/lib-internal/words' import { runCdxgen } from './run-cdxgen.mts' import { DRY_RUN_BAILING_NOW, FLAG_HELP } from '../../constants/cli.mjs' diff --git a/packages/cli/src/commands/manifest/cmd-manifest-conda.mts b/packages/cli/src/commands/manifest/cmd-manifest-conda.mts index 1d0526b0d..b09c38e0f 100644 --- a/packages/cli/src/commands/manifest/cmd-manifest-conda.mts +++ b/packages/cli/src/commands/manifest/cmd-manifest-conda.mts @@ -1,6 +1,6 @@ import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleManifestConda } from './handle-manifest-conda.mts' import { diff --git a/packages/cli/src/commands/manifest/cmd-manifest-gradle.mts b/packages/cli/src/commands/manifest/cmd-manifest-gradle.mts index 99be468a1..880952e25 100644 --- a/packages/cli/src/commands/manifest/cmd-manifest-gradle.mts +++ b/packages/cli/src/commands/manifest/cmd-manifest-gradle.mts @@ -1,7 +1,7 @@ import path from 'node:path' -import { debug } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { debug } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { convertGradleToMaven } from './convert-gradle-to-maven.mts' import { outputManifest } from './output-manifest.mts' diff --git a/packages/cli/src/commands/manifest/cmd-manifest-kotlin.mts b/packages/cli/src/commands/manifest/cmd-manifest-kotlin.mts index fd53fe6d1..065b9a507 100644 --- a/packages/cli/src/commands/manifest/cmd-manifest-kotlin.mts +++ b/packages/cli/src/commands/manifest/cmd-manifest-kotlin.mts @@ -1,7 +1,7 @@ import path from 'node:path' -import { debug } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { debug } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { convertGradleToMaven } from './convert-gradle-to-maven.mts' import { outputManifest } from './output-manifest.mts' diff --git a/packages/cli/src/commands/manifest/cmd-manifest-scala.mts b/packages/cli/src/commands/manifest/cmd-manifest-scala.mts index 536298fcf..356bd6c23 100644 --- a/packages/cli/src/commands/manifest/cmd-manifest-scala.mts +++ b/packages/cli/src/commands/manifest/cmd-manifest-scala.mts @@ -1,7 +1,7 @@ import path from 'node:path' -import { debug } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { debug } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { convertSbtToMaven } from './convert-sbt-to-maven.mts' import { outputManifest } from './output-manifest.mts' diff --git a/packages/cli/src/commands/manifest/cmd-manifest-setup.mts b/packages/cli/src/commands/manifest/cmd-manifest-setup.mts index f81f44215..4b6d7ac3f 100644 --- a/packages/cli/src/commands/manifest/cmd-manifest-setup.mts +++ b/packages/cli/src/commands/manifest/cmd-manifest-setup.mts @@ -1,6 +1,6 @@ import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleManifestSetup } from './handle-manifest-setup.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mjs' diff --git a/packages/cli/src/commands/manifest/convert-conda-to-requirements.mts b/packages/cli/src/commands/manifest/convert-conda-to-requirements.mts index b6bdae11d..0735f6cc1 100644 --- a/packages/cli/src/commands/manifest/convert-conda-to-requirements.mts +++ b/packages/cli/src/commands/manifest/convert-conda-to-requirements.mts @@ -1,8 +1,8 @@ import { existsSync, readFileSync } from 'node:fs' import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { stripAnsi } from '@socketsecurity/lib/strings' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { stripAnsi } from '@socketsecurity/lib-internal/strings' import type { CResult } from '../../types.mts' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/manifest/convert-gradle-to-maven.mts b/packages/cli/src/commands/manifest/convert-gradle-to-maven.mts index f728e1526..14d67e4d8 100644 --- a/packages/cli/src/commands/manifest/convert-gradle-to-maven.mts +++ b/packages/cli/src/commands/manifest/convert-gradle-to-maven.mts @@ -1,9 +1,9 @@ import fs from 'node:fs' import path from 'node:path' -import { getSpinner } from '@socketsecurity/lib/constants/process' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { spawn } from '@socketsecurity/lib-internal/spawn' import { distPath } from '../../constants/paths.mjs' diff --git a/packages/cli/src/commands/manifest/convert-sbt-to-maven.mts b/packages/cli/src/commands/manifest/convert-sbt-to-maven.mts index e887786fb..216ef9fd8 100644 --- a/packages/cli/src/commands/manifest/convert-sbt-to-maven.mts +++ b/packages/cli/src/commands/manifest/convert-sbt-to-maven.mts @@ -1,7 +1,7 @@ -import { getSpinner } from '@socketsecurity/lib/constants/process' -import { safeReadFile } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' +import { safeReadFile } from '@socketsecurity/lib-internal/fs' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { spawn } from '@socketsecurity/lib-internal/spawn' import type { ManifestResult } from './output-manifest.mts' import type { CResult, OutputKind } from '../../types.mts' diff --git a/packages/cli/src/commands/manifest/detect-manifest-actions.mts b/packages/cli/src/commands/manifest/detect-manifest-actions.mts index 6096b267d..aa5fa3c6a 100644 --- a/packages/cli/src/commands/manifest/detect-manifest-actions.mts +++ b/packages/cli/src/commands/manifest/detect-manifest-actions.mts @@ -4,7 +4,7 @@ import { existsSync } from 'node:fs' import path from 'node:path' -import { debugLog } from '@socketsecurity/lib/debug' +import { debugLog } from '@socketsecurity/lib-internal/debug' import { ENVIRONMENT_YAML, ENVIRONMENT_YML } from '../../constants/paths.mjs' import { SOCKET_JSON } from '../../constants/socket.mts' diff --git a/packages/cli/src/commands/manifest/generate_auto_manifest.mts b/packages/cli/src/commands/manifest/generate_auto_manifest.mts index 5f32be127..d94c7619d 100644 --- a/packages/cli/src/commands/manifest/generate_auto_manifest.mts +++ b/packages/cli/src/commands/manifest/generate_auto_manifest.mts @@ -1,6 +1,6 @@ import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { convertGradleToMaven } from './convert-gradle-to-maven.mts' import { convertSbtToMaven } from './convert-sbt-to-maven.mts' diff --git a/packages/cli/src/commands/manifest/output-manifest-setup.mts b/packages/cli/src/commands/manifest/output-manifest-setup.mts index 6e72ff273..d2b947037 100644 --- a/packages/cli/src/commands/manifest/output-manifest-setup.mts +++ b/packages/cli/src/commands/manifest/output-manifest-setup.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/manifest/output-manifest.mts b/packages/cli/src/commands/manifest/output-manifest.mts index d555a3607..a6af4001e 100644 --- a/packages/cli/src/commands/manifest/output-manifest.mts +++ b/packages/cli/src/commands/manifest/output-manifest.mts @@ -1,6 +1,6 @@ import fs from 'node:fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdHeader } from '../../utils/output/markdown.mts' diff --git a/packages/cli/src/commands/manifest/output-requirements.mts b/packages/cli/src/commands/manifest/output-requirements.mts index c29d5d8b3..33007e404 100644 --- a/packages/cli/src/commands/manifest/output-requirements.mts +++ b/packages/cli/src/commands/manifest/output-requirements.mts @@ -1,6 +1,6 @@ import fs from 'node:fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { REQUIREMENTS_TXT } from '../../constants/paths.mjs' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/manifest/run-cdxgen.mts b/packages/cli/src/commands/manifest/run-cdxgen.mts index 92c827966..e2008db4d 100644 --- a/packages/cli/src/commands/manifest/run-cdxgen.mts +++ b/packages/cli/src/commands/manifest/run-cdxgen.mts @@ -3,10 +3,10 @@ import path from 'node:path' import colors from 'yoctocolors-cjs' -import { NPM, PNPM, YARN } from '@socketsecurity/lib/constants/agents' -import { SOCKET_PUBLIC_API_TOKEN } from '@socketsecurity/lib/constants/socket' -import { safeDeleteSync } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { NPM, PNPM, YARN } from '@socketsecurity/lib-internal/constants/agents' +import { SOCKET_PUBLIC_API_TOKEN } from '@socketsecurity/lib-internal/constants/socket' +import { safeDeleteSync } from '@socketsecurity/lib-internal/fs' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { FLAG_HELP } from '../../constants/cli.mjs' import { diff --git a/packages/cli/src/commands/manifest/setup-manifest-config.mts b/packages/cli/src/commands/manifest/setup-manifest-config.mts index 614520bee..6ec050d71 100644 --- a/packages/cli/src/commands/manifest/setup-manifest-config.mts +++ b/packages/cli/src/commands/manifest/setup-manifest-config.mts @@ -1,9 +1,9 @@ import fs from 'node:fs' import path from 'node:path' -import { debugDir } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { input, select } from '@socketsecurity/lib/stdio/prompts' +import { debugDir } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { input, select } from '@socketsecurity/lib-internal/stdio/prompts' import { detectManifestActions } from './detect-manifest-actions.mts' import { REQUIREMENTS_TXT } from '../../constants/paths.mjs' diff --git a/packages/cli/src/commands/npm/cmd-npm.mts b/packages/cli/src/commands/npm/cmd-npm.mts index e916e5b8a..19e8aad52 100644 --- a/packages/cli/src/commands/npm/cmd-npm.mts +++ b/packages/cli/src/commands/npm/cmd-npm.mts @@ -1,7 +1,7 @@ import { createRequire } from 'node:module' -import { NPM } from '@socketsecurity/lib/constants/agents' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { NPM } from '@socketsecurity/lib-internal/constants/agents' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { DRY_RUN_BAILING_NOW, diff --git a/packages/cli/src/commands/npx/cmd-npx.mts b/packages/cli/src/commands/npx/cmd-npx.mts index 05d4c3666..d426674fc 100644 --- a/packages/cli/src/commands/npx/cmd-npx.mts +++ b/packages/cli/src/commands/npx/cmd-npx.mts @@ -1,7 +1,7 @@ import { createRequire } from 'node:module' -import { NPX } from '@socketsecurity/lib/constants/agents' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { NPX } from '@socketsecurity/lib-internal/constants/agents' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { DRY_RUN_BAILING_NOW, diff --git a/packages/cli/src/commands/oops/cmd-oops.mts b/packages/cli/src/commands/oops/cmd-oops.mts index d0aa2e693..4e1248794 100644 --- a/packages/cli/src/commands/oops/cmd-oops.mts +++ b/packages/cli/src/commands/oops/cmd-oops.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/package/cmd-package-score.mts b/packages/cli/src/commands/package/cmd-package-score.mts index 53eb796c4..1841cc502 100644 --- a/packages/cli/src/commands/package/cmd-package-score.mts +++ b/packages/cli/src/commands/package/cmd-package-score.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handlePurlDeepScore } from './handle-purl-deep-score.mts' import { parsePackageSpecifiers } from './parse-package-specifiers.mts' diff --git a/packages/cli/src/commands/package/cmd-package-shallow.mts b/packages/cli/src/commands/package/cmd-package-shallow.mts index bb9fdbab0..e5f08bf9d 100644 --- a/packages/cli/src/commands/package/cmd-package-shallow.mts +++ b/packages/cli/src/commands/package/cmd-package-shallow.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handlePurlsShallowScore } from './handle-purls-shallow-score.mts' import { parsePackageSpecifiers } from './parse-package-specifiers.mts' diff --git a/packages/cli/src/commands/package/fetch-purl-deep-score.mts b/packages/cli/src/commands/package/fetch-purl-deep-score.mts index 9e06267dc..b59af0aa8 100644 --- a/packages/cli/src/commands/package/fetch-purl-deep-score.mts +++ b/packages/cli/src/commands/package/fetch-purl-deep-score.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { queryApiSafeJson } from '../../utils/socket/api.mjs' diff --git a/packages/cli/src/commands/package/fetch-purls-shallow-score.mts b/packages/cli/src/commands/package/fetch-purls-shallow-score.mts index f50abdd0e..42d7aabf4 100644 --- a/packages/cli/src/commands/package/fetch-purls-shallow-score.mts +++ b/packages/cli/src/commands/package/fetch-purls-shallow-score.mts @@ -1,5 +1,5 @@ -import { joinAnd } from '@socketsecurity/lib/arrays' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleApiCall } from '../../utils/socket/api.mjs' import { setupSdk } from '../../utils/socket/sdk.mjs' diff --git a/packages/cli/src/commands/package/handle-purl-deep-score.mts b/packages/cli/src/commands/package/handle-purl-deep-score.mts index 3804df93b..0850c0270 100644 --- a/packages/cli/src/commands/package/handle-purl-deep-score.mts +++ b/packages/cli/src/commands/package/handle-purl-deep-score.mts @@ -1,4 +1,4 @@ -import { debug, debugDir } from '@socketsecurity/lib/debug' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' import { fetchPurlDeepScore } from './fetch-purl-deep-score.mts' import { outputPurlsDeepScore } from './output-purls-deep-score.mts' diff --git a/packages/cli/src/commands/package/handle-purls-shallow-score.mts b/packages/cli/src/commands/package/handle-purls-shallow-score.mts index 3aab031e1..3aac15847 100644 --- a/packages/cli/src/commands/package/handle-purls-shallow-score.mts +++ b/packages/cli/src/commands/package/handle-purls-shallow-score.mts @@ -1,4 +1,4 @@ -import { debug, debugDir } from '@socketsecurity/lib/debug' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' import { fetchPurlsShallowScore } from './fetch-purls-shallow-score.mts' import { outputPurlsShallowScore } from './output-purls-shallow-score.mts' diff --git a/packages/cli/src/commands/package/output-purls-deep-score.mts b/packages/cli/src/commands/package/output-purls-deep-score.mts index d57a3e1a3..199fc5237 100644 --- a/packages/cli/src/commands/package/output-purls-deep-score.mts +++ b/packages/cli/src/commands/package/output-purls-deep-score.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdTable } from '../../utils/output/markdown.mts' diff --git a/packages/cli/src/commands/package/output-purls-shallow-score.mts b/packages/cli/src/commands/package/output-purls-shallow-score.mts index a9e7e7da6..38ffb93f0 100644 --- a/packages/cli/src/commands/package/output-purls-shallow-score.mts +++ b/packages/cli/src/commands/package/output-purls-shallow-score.mts @@ -1,8 +1,8 @@ import colors from 'yoctocolors-cjs' -import { joinAnd } from '@socketsecurity/lib/arrays' -import { debug } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { debug } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/patch/cmd-patch-apply.mts b/packages/cli/src/commands/patch/cmd-patch-apply.mts index 0407f2ae2..32ad6cb33 100644 --- a/packages/cli/src/commands/patch/cmd-patch-apply.mts +++ b/packages/cli/src/commands/patch/cmd-patch-apply.mts @@ -1,12 +1,12 @@ import { existsSync } from 'node:fs' import path from 'node:path' -import { arrayUnique } from '@socketsecurity/lib/arrays' +import { arrayUnique } from '@socketsecurity/lib-internal/arrays' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +} from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchApply } from './handle-patch-apply.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-cleanup.mts b/packages/cli/src/commands/patch/cmd-patch-cleanup.mts index 8857d4818..872576eba 100644 --- a/packages/cli/src/commands/patch/cmd-patch-cleanup.mts +++ b/packages/cli/src/commands/patch/cmd-patch-cleanup.mts @@ -4,8 +4,8 @@ import path from 'node:path' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +} from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchCleanup } from './handle-patch-cleanup.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-discover.mts b/packages/cli/src/commands/patch/cmd-patch-discover.mts index 83a731fce..575600bdb 100644 --- a/packages/cli/src/commands/patch/cmd-patch-discover.mts +++ b/packages/cli/src/commands/patch/cmd-patch-discover.mts @@ -1,7 +1,7 @@ import { existsSync } from 'node:fs' import path from 'node:path' -import { getSpinner } from '@socketsecurity/lib/constants/process' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchDiscover } from './handle-patch-discover.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-download.mts b/packages/cli/src/commands/patch/cmd-patch-download.mts index b886a1d7b..d2c908ede 100644 --- a/packages/cli/src/commands/patch/cmd-patch-download.mts +++ b/packages/cli/src/commands/patch/cmd-patch-download.mts @@ -1,8 +1,8 @@ import { existsSync } from 'node:fs' import path from 'node:path' -import { DOT_SOCKET_DIR } from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +import { DOT_SOCKET_DIR } from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchDownload } from './handle-patch-download.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-get.mts b/packages/cli/src/commands/patch/cmd-patch-get.mts index af190a571..e4d72849d 100644 --- a/packages/cli/src/commands/patch/cmd-patch-get.mts +++ b/packages/cli/src/commands/patch/cmd-patch-get.mts @@ -4,8 +4,8 @@ import path from 'node:path' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +} from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchGet } from './handle-patch-get.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-info.mts b/packages/cli/src/commands/patch/cmd-patch-info.mts index 695365e27..76947f7e0 100644 --- a/packages/cli/src/commands/patch/cmd-patch-info.mts +++ b/packages/cli/src/commands/patch/cmd-patch-info.mts @@ -4,8 +4,8 @@ import path from 'node:path' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +} from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchInfo } from './handle-patch-info.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-list.mts b/packages/cli/src/commands/patch/cmd-patch-list.mts index 7db4c50e3..38bea9a23 100644 --- a/packages/cli/src/commands/patch/cmd-patch-list.mts +++ b/packages/cli/src/commands/patch/cmd-patch-list.mts @@ -4,8 +4,8 @@ import path from 'node:path' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +} from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchList } from './handle-patch-list.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-rm.mts b/packages/cli/src/commands/patch/cmd-patch-rm.mts index 3656f6bb7..63471ae5d 100644 --- a/packages/cli/src/commands/patch/cmd-patch-rm.mts +++ b/packages/cli/src/commands/patch/cmd-patch-rm.mts @@ -4,8 +4,8 @@ import path from 'node:path' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +} from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchRm } from './handle-patch-rm.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/cmd-patch-status.mts b/packages/cli/src/commands/patch/cmd-patch-status.mts index d907690f0..9e7616657 100644 --- a/packages/cli/src/commands/patch/cmd-patch-status.mts +++ b/packages/cli/src/commands/patch/cmd-patch-status.mts @@ -4,8 +4,8 @@ import path from 'node:path' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getSpinner } from '@socketsecurity/lib/constants/process' +} from '@socketsecurity/lib-internal/constants/paths' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' import { handlePatchStatus } from './handle-patch-status.mts' import { commonFlags, outputFlags } from '../../flags.mts' diff --git a/packages/cli/src/commands/patch/handle-patch-apply.mts b/packages/cli/src/commands/patch/handle-patch-apply.mts index 19b412508..6173fa12f 100644 --- a/packages/cli/src/commands/patch/handle-patch-apply.mts +++ b/packages/cli/src/commands/patch/handle-patch-apply.mts @@ -4,21 +4,21 @@ import path from 'node:path' import fastGlob from 'fast-glob' -import { joinAnd } from '@socketsecurity/lib/arrays' -import { NPM } from '@socketsecurity/lib/constants/agents' -import { UTF8 } from '@socketsecurity/lib/constants/encoding' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { NPM } from '@socketsecurity/lib-internal/constants/agents' +import { UTF8 } from '@socketsecurity/lib-internal/constants/encoding' import { DOT_SOCKET_DIR, MANIFEST_JSON, NODE_MODULES, -} from '@socketsecurity/lib/constants/paths' -import { debugDirNs } from '@socketsecurity/lib/debug' -import { readDirNames } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { readPackageJson } from '@socketsecurity/lib/packages' -import { normalizePath } from '@socketsecurity/lib/path' -import { isNonEmptyString } from '@socketsecurity/lib/strings' -import { pluralize } from '@socketsecurity/lib/words' +} from '@socketsecurity/lib-internal/constants/paths' +import { debugDirNs } from '@socketsecurity/lib-internal/debug' +import { readDirNames } from '@socketsecurity/lib-internal/fs' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { readPackageJson } from '@socketsecurity/lib-internal/packages' +import { normalizePath } from '@socketsecurity/lib-internal/path' +import { isNonEmptyString } from '@socketsecurity/lib-internal/strings' +import { pluralize } from '@socketsecurity/lib-internal/words' import { PatchManifestSchema } from './manifest-schema.mts' import { outputPatchResult } from './output-patch-result.mts' @@ -31,7 +31,7 @@ import { getPurlObject, normalizePurl } from '../../utils/purl/parse.mjs' import type { PatchRecord } from './manifest-schema.mts' import type { CResult, OutputKind } from '../../types.mts' import type { PackageURL } from '@socketregistry/packageurl-js' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' const logger = getDefaultLogger() type PatchEntry = { diff --git a/packages/cli/src/commands/patch/handle-patch-cleanup.mts b/packages/cli/src/commands/patch/handle-patch-cleanup.mts index 0c9dcc0d0..551d1cc79 100644 --- a/packages/cli/src/commands/patch/handle-patch-cleanup.mts +++ b/packages/cli/src/commands/patch/handle-patch-cleanup.mts @@ -1,14 +1,14 @@ import { promises as fs } from 'node:fs' import path from 'node:path' -import { UTF8 } from '@socketsecurity/lib/constants/encoding' +import { UTF8 } from '@socketsecurity/lib-internal/constants/encoding' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { normalizePath } from '@socketsecurity/lib/path' -import { pluralize } from '@socketsecurity/lib/words' +} from '@socketsecurity/lib-internal/constants/paths' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { normalizePath } from '@socketsecurity/lib-internal/path' +import { pluralize } from '@socketsecurity/lib-internal/words' import { PatchManifestSchema } from './manifest-schema.mts' import { outputPatchCleanupResult } from './output-patch-cleanup-result.mts' @@ -19,7 +19,7 @@ import { } from '../../utils/manifest/patch-backup.mts' import type { OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' const logger = getDefaultLogger() export interface PatchCleanupData { diff --git a/packages/cli/src/commands/patch/handle-patch-discover.mts b/packages/cli/src/commands/patch/handle-patch-discover.mts index 13f552c8d..218d6bd07 100644 --- a/packages/cli/src/commands/patch/handle-patch-discover.mts +++ b/packages/cli/src/commands/patch/handle-patch-discover.mts @@ -1,5 +1,5 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { handlePatchDownload } from './handle-patch-download.mts' import { outputPatchDiscoverResult } from './output-patch-discover-result.mts' @@ -11,7 +11,7 @@ import { fetchCreateOrgFullScan } from '../scan/fetch-create-org-full-scan.mts' import { fetchSupportedScanFileNames } from '../scan/fetch-supported-scan-file-names.mts' import type { OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' import type { SocketSdk } from '@socketsecurity/sdk' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/patch/handle-patch-download.mts b/packages/cli/src/commands/patch/handle-patch-download.mts index d012ceab0..8014f7def 100644 --- a/packages/cli/src/commands/patch/handle-patch-download.mts +++ b/packages/cli/src/commands/patch/handle-patch-download.mts @@ -34,9 +34,9 @@ import crypto from 'node:crypto' // @ts-expect-error - No type declarations available. import ssri from 'ssri' -import * as cacache from '@socketsecurity/lib/cacache' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import * as cacache from '@socketsecurity/lib-internal/cacache' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { outputPatchDownloadResult } from './output-patch-download-result.mts' import ENV from '../../constants/env.mts' @@ -45,7 +45,7 @@ import { setupSdk } from '../../utils/socket/sdk.mts' import type { OutputKind } from '../../types.mts' import type { PatchRecord } from '../../utils/manifest/patches.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' import type { SocketSdk } from '@socketsecurity/sdk' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/patch/handle-patch-get.mts b/packages/cli/src/commands/patch/handle-patch-get.mts index 795684915..e97005895 100644 --- a/packages/cli/src/commands/patch/handle-patch-get.mts +++ b/packages/cli/src/commands/patch/handle-patch-get.mts @@ -1,13 +1,13 @@ import { existsSync, promises as fs } from 'node:fs' import path from 'node:path' -import { UTF8 } from '@socketsecurity/lib/constants/encoding' +import { UTF8 } from '@socketsecurity/lib-internal/constants/encoding' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { safeMkdir } from '@socketsecurity/lib/fs' -import { normalizePath } from '@socketsecurity/lib/path' +} from '@socketsecurity/lib-internal/constants/paths' +import { safeMkdir } from '@socketsecurity/lib-internal/fs' +import { normalizePath } from '@socketsecurity/lib-internal/path' import { PatchManifestSchema } from './manifest-schema.mts' import { outputPatchGetResult } from './output-patch-get-result.mts' @@ -15,7 +15,7 @@ import { getErrorCause, InputError } from '../../utils/error/errors.mjs' import { normalizePurl } from '../../utils/purl/parse.mjs' import type { OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' export interface PatchGetData { files: string[] diff --git a/packages/cli/src/commands/patch/handle-patch-info.mts b/packages/cli/src/commands/patch/handle-patch-info.mts index afa1a3d31..1e6d93ccd 100644 --- a/packages/cli/src/commands/patch/handle-patch-info.mts +++ b/packages/cli/src/commands/patch/handle-patch-info.mts @@ -1,13 +1,13 @@ import { promises as fs } from 'node:fs' import path from 'node:path' -import { UTF8 } from '@socketsecurity/lib/constants/encoding' +import { UTF8 } from '@socketsecurity/lib-internal/constants/encoding' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { normalizePath } from '@socketsecurity/lib/path' +} from '@socketsecurity/lib-internal/constants/paths' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { normalizePath } from '@socketsecurity/lib-internal/path' import { PatchManifestSchema } from './manifest-schema.mts' import { outputPatchInfoResult } from './output-patch-info-result.mts' @@ -16,7 +16,7 @@ import { normalizePurl } from '../../utils/purl/parse.mjs' import type { PatchRecord } from './manifest-schema.mts' import type { OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' export interface PatchInfoData { description: string | undefined diff --git a/packages/cli/src/commands/patch/handle-patch-list.mts b/packages/cli/src/commands/patch/handle-patch-list.mts index e40190e9b..c3918bf6b 100644 --- a/packages/cli/src/commands/patch/handle-patch-list.mts +++ b/packages/cli/src/commands/patch/handle-patch-list.mts @@ -1,15 +1,15 @@ import { promises as fs } from 'node:fs' import path from 'node:path' -import { UTF8 } from '@socketsecurity/lib/constants/encoding' +import { UTF8 } from '@socketsecurity/lib-internal/constants/encoding' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { normalizePath } from '@socketsecurity/lib/path' -import { select } from '@socketsecurity/lib/stdio/prompts' -import { pluralize } from '@socketsecurity/lib/words' +} from '@socketsecurity/lib-internal/constants/paths' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { normalizePath } from '@socketsecurity/lib-internal/path' +import { select } from '@socketsecurity/lib-internal/stdio/prompts' +import { pluralize } from '@socketsecurity/lib-internal/words' import { handlePatchApply } from './handle-patch-apply.mts' import { PatchManifestSchema } from './manifest-schema.mts' @@ -18,7 +18,7 @@ import { getErrorCause } from '../../utils/error/errors.mjs' import { getPurlObject } from '../../utils/purl/parse.mjs' import type { OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' const logger = getDefaultLogger() export interface PatchListEntry { diff --git a/packages/cli/src/commands/patch/handle-patch-rm.mts b/packages/cli/src/commands/patch/handle-patch-rm.mts index 220257eae..7b3da1d5a 100644 --- a/packages/cli/src/commands/patch/handle-patch-rm.mts +++ b/packages/cli/src/commands/patch/handle-patch-rm.mts @@ -1,14 +1,14 @@ import { promises as fs } from 'node:fs' import path from 'node:path' -import { UTF8 } from '@socketsecurity/lib/constants/encoding' +import { UTF8 } from '@socketsecurity/lib-internal/constants/encoding' import { DOT_SOCKET_DIR, MANIFEST_JSON, -} from '@socketsecurity/lib/constants/paths' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { normalizePath } from '@socketsecurity/lib/path' -import { pluralize } from '@socketsecurity/lib/words' +} from '@socketsecurity/lib-internal/constants/paths' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { normalizePath } from '@socketsecurity/lib-internal/path' +import { pluralize } from '@socketsecurity/lib-internal/words' import { PatchManifestSchema } from './manifest-schema.mts' import { outputPatchRmResult } from './output-patch-rm-result.mts' @@ -22,7 +22,7 @@ import { removePatch } from '../../utils/manifest/patches.mts' import { normalizePurl } from '../../utils/purl/parse.mjs' import type { OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' const logger = getDefaultLogger() export interface PatchRmData { diff --git a/packages/cli/src/commands/patch/handle-patch-status.mts b/packages/cli/src/commands/patch/handle-patch-status.mts index 99dd427dd..1dcb3eaca 100644 --- a/packages/cli/src/commands/patch/handle-patch-status.mts +++ b/packages/cli/src/commands/patch/handle-patch-status.mts @@ -2,15 +2,15 @@ import crypto from 'node:crypto' import { existsSync, promises as fs } from 'node:fs' import path from 'node:path' -import { UTF8 } from '@socketsecurity/lib/constants/encoding' +import { UTF8 } from '@socketsecurity/lib-internal/constants/encoding' import { DOT_SOCKET_DIR, MANIFEST_JSON, NODE_MODULES, -} from '@socketsecurity/lib/constants/paths' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { normalizePath } from '@socketsecurity/lib/path' -import { pluralize } from '@socketsecurity/lib/words' +} from '@socketsecurity/lib-internal/constants/paths' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { normalizePath } from '@socketsecurity/lib-internal/path' +import { pluralize } from '@socketsecurity/lib-internal/words' import { PatchManifestSchema } from './manifest-schema.mts' import { outputPatchStatusResult } from './output-patch-status-result.mts' @@ -20,7 +20,7 @@ import { hasBackupForPatch } from '../../utils/manifest/patch-backup.mts' import type { PatchRecord } from './manifest-schema.mts' import type { OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' const logger = getDefaultLogger() export interface PatchStatus { diff --git a/packages/cli/src/commands/patch/output-patch-cleanup-result.mts b/packages/cli/src/commands/patch/output-patch-cleanup-result.mts index de73fe771..afb07ddce 100644 --- a/packages/cli/src/commands/patch/output-patch-cleanup-result.mts +++ b/packages/cli/src/commands/patch/output-patch-cleanup-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/patch/output-patch-discover-result.mts b/packages/cli/src/commands/patch/output-patch-discover-result.mts index 6b750e706..d43dffc0c 100644 --- a/packages/cli/src/commands/patch/output-patch-discover-result.mts +++ b/packages/cli/src/commands/patch/output-patch-discover-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { OUTPUT_JSON } from '../../constants/cli.mjs' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/patch/output-patch-download-result.mts b/packages/cli/src/commands/patch/output-patch-download-result.mts index 024405681..00b27b3eb 100644 --- a/packages/cli/src/commands/patch/output-patch-download-result.mts +++ b/packages/cli/src/commands/patch/output-patch-download-result.mts @@ -11,8 +11,8 @@ * - JSON output for automation */ -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { OUTPUT_JSON } from '../../constants/cli.mts' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/patch/output-patch-get-result.mts b/packages/cli/src/commands/patch/output-patch-get-result.mts index 2736e8912..6e23e355c 100644 --- a/packages/cli/src/commands/patch/output-patch-get-result.mts +++ b/packages/cli/src/commands/patch/output-patch-get-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/patch/output-patch-info-result.mts b/packages/cli/src/commands/patch/output-patch-info-result.mts index 52fa55423..4dd5103cf 100644 --- a/packages/cli/src/commands/patch/output-patch-info-result.mts +++ b/packages/cli/src/commands/patch/output-patch-info-result.mts @@ -1,5 +1,5 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdHeader, mdKeyValue } from '../../utils/output/markdown.mts' diff --git a/packages/cli/src/commands/patch/output-patch-list-result.mts b/packages/cli/src/commands/patch/output-patch-list-result.mts index 18842e5c9..00dff0d5e 100644 --- a/packages/cli/src/commands/patch/output-patch-list-result.mts +++ b/packages/cli/src/commands/patch/output-patch-list-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdHeader, mdKeyValue } from '../../utils/output/markdown.mts' diff --git a/packages/cli/src/commands/patch/output-patch-result.mts b/packages/cli/src/commands/patch/output-patch-result.mts index 92afa7681..b529ebc77 100644 --- a/packages/cli/src/commands/patch/output-patch-result.mts +++ b/packages/cli/src/commands/patch/output-patch-result.mts @@ -1,5 +1,5 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { OUTPUT_JSON } from '../../constants/cli.mts' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/patch/output-patch-rm-result.mts b/packages/cli/src/commands/patch/output-patch-rm-result.mts index 6ee67e194..37f10adef 100644 --- a/packages/cli/src/commands/patch/output-patch-rm-result.mts +++ b/packages/cli/src/commands/patch/output-patch-rm-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/patch/output-patch-status-result.mts b/packages/cli/src/commands/patch/output-patch-status-result.mts index ed6e753aa..40e1a1979 100644 --- a/packages/cli/src/commands/patch/output-patch-status-result.mts +++ b/packages/cli/src/commands/patch/output-patch-status-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdHeader, mdKeyValue } from '../../utils/output/markdown.mts' diff --git a/packages/cli/src/commands/pip/cmd-pip.mts b/packages/cli/src/commands/pip/cmd-pip.mts index e182097a3..c08072a3c 100644 --- a/packages/cli/src/commands/pip/cmd-pip.mts +++ b/packages/cli/src/commands/pip/cmd-pip.mts @@ -24,8 +24,8 @@ * - Python CLI: src/utils/python/standalone.mts */ -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { spawn } from '@socketsecurity/lib/spawn' +import { WIN32 } from '@socketsecurity/lib-internal/constants/platform' +import { spawn } from '@socketsecurity/lib-internal/spawn' import { commonFlags } from '../../flags.mts' import { meowOrExit } from '../../utils/cli/with-subcommands.mjs' diff --git a/packages/cli/src/commands/pnpm/cmd-pnpm.mts b/packages/cli/src/commands/pnpm/cmd-pnpm.mts index ba1b00a25..586e5e200 100644 --- a/packages/cli/src/commands/pnpm/cmd-pnpm.mts +++ b/packages/cli/src/commands/pnpm/cmd-pnpm.mts @@ -1,7 +1,7 @@ import { createRequire } from 'node:module' -import { PNPM } from '@socketsecurity/lib/constants/agents' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { PNPM } from '@socketsecurity/lib-internal/constants/agents' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { DRY_RUN_BAILING_NOW, diff --git a/packages/cli/src/commands/scan/cmd-scan-create.mts b/packages/cli/src/commands/scan/cmd-scan-create.mts index 67f38ed3c..9a0a4d7c8 100644 --- a/packages/cli/src/commands/scan/cmd-scan-create.mts +++ b/packages/cli/src/commands/scan/cmd-scan-create.mts @@ -1,8 +1,8 @@ import { existsSync, promises as fs } from 'node:fs' import path from 'node:path' -import { joinAnd } from '@socketsecurity/lib/arrays' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleCreateNewScan } from './handle-create-new-scan.mts' import { outputCreateNewScan } from './output-create-new-scan.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-del.mts b/packages/cli/src/commands/scan/cmd-scan-del.mts index e4ee1e132..b236f260e 100644 --- a/packages/cli/src/commands/scan/cmd-scan-del.mts +++ b/packages/cli/src/commands/scan/cmd-scan-del.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleDeleteScan } from './handle-delete-scan.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-diff.mts b/packages/cli/src/commands/scan/cmd-scan-diff.mts index 0116acbf4..8bd30d486 100644 --- a/packages/cli/src/commands/scan/cmd-scan-diff.mts +++ b/packages/cli/src/commands/scan/cmd-scan-diff.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleDiffScan } from './handle-diff-scan.mts' import { diff --git a/packages/cli/src/commands/scan/cmd-scan-github.mts b/packages/cli/src/commands/scan/cmd-scan-github.mts index c2b29d62e..ee4f1b9a3 100644 --- a/packages/cli/src/commands/scan/cmd-scan-github.mts +++ b/packages/cli/src/commands/scan/cmd-scan-github.mts @@ -1,6 +1,6 @@ import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleCreateGithubScan } from './handle-create-github-scan.mts' import { outputScanGithub } from './output-scan-github.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-list.mts b/packages/cli/src/commands/scan/cmd-scan-list.mts index e0c6c5ca7..079efbb18 100644 --- a/packages/cli/src/commands/scan/cmd-scan-list.mts +++ b/packages/cli/src/commands/scan/cmd-scan-list.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleListScans } from './handle-list-scans.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-metadata.mts b/packages/cli/src/commands/scan/cmd-scan-metadata.mts index 8d8050dfa..a71b9d7ff 100644 --- a/packages/cli/src/commands/scan/cmd-scan-metadata.mts +++ b/packages/cli/src/commands/scan/cmd-scan-metadata.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleOrgScanMetadata } from './handle-scan-metadata.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-reach.mts b/packages/cli/src/commands/scan/cmd-scan-reach.mts index 6d785072a..93296b9cd 100644 --- a/packages/cli/src/commands/scan/cmd-scan-reach.mts +++ b/packages/cli/src/commands/scan/cmd-scan-reach.mts @@ -1,8 +1,8 @@ import { existsSync, promises as fs } from 'node:fs' import path from 'node:path' -import { joinAnd } from '@socketsecurity/lib/arrays' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleScanReach } from './handle-scan-reach.mts' import { reachabilityFlags } from './reachability-flags.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-report.mts b/packages/cli/src/commands/scan/cmd-scan-report.mts index 8fdf0e151..5f47bf329 100644 --- a/packages/cli/src/commands/scan/cmd-scan-report.mts +++ b/packages/cli/src/commands/scan/cmd-scan-report.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleScanReport } from './handle-scan-report.mts' import { DRY_RUN_BAILING_NOW, FOLD_SETTING_NONE } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-setup.mts b/packages/cli/src/commands/scan/cmd-scan-setup.mts index 7723079a9..3129ef6dc 100644 --- a/packages/cli/src/commands/scan/cmd-scan-setup.mts +++ b/packages/cli/src/commands/scan/cmd-scan-setup.mts @@ -1,6 +1,6 @@ import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleScanConfig } from './handle-scan-config.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/scan/cmd-scan-view.mts b/packages/cli/src/commands/scan/cmd-scan-view.mts index 7e08aec94..6a96522c2 100644 --- a/packages/cli/src/commands/scan/cmd-scan-view.mts +++ b/packages/cli/src/commands/scan/cmd-scan-view.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleScanView } from './handle-scan-view.mts' import { streamScan } from './stream-scan.mts' diff --git a/packages/cli/src/commands/scan/create-scan-from-github.mts b/packages/cli/src/commands/scan/create-scan-from-github.mts index 63cfe55d2..7c732c017 100644 --- a/packages/cli/src/commands/scan/create-scan-from-github.mts +++ b/packages/cli/src/commands/scan/create-scan-from-github.mts @@ -8,10 +8,10 @@ import os from 'node:os' import path from 'node:path' import { pipeline } from 'node:stream/promises' -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { safeMkdirSync } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { confirm, select } from '@socketsecurity/lib/stdio/prompts' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { safeMkdirSync } from '@socketsecurity/lib-internal/fs' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { confirm, select } from '@socketsecurity/lib-internal/stdio/prompts' import { fetchSupportedScanFileNames } from './fetch-supported-scan-file-names.mts' import { handleCreateNewScan } from './handle-create-new-scan.mts' diff --git a/packages/cli/src/commands/scan/fetch-create-org-full-scan.mts b/packages/cli/src/commands/scan/fetch-create-org-full-scan.mts index 0dfcca3e7..37b2de928 100644 --- a/packages/cli/src/commands/scan/fetch-create-org-full-scan.mts +++ b/packages/cli/src/commands/scan/fetch-create-org-full-scan.mts @@ -3,7 +3,7 @@ import { setupSdk } from '../../utils/socket/sdk.mjs' import type { CResult } from '../../types.mts' import type { SetupSdkOptions } from '../../utils/socket/sdk.mjs' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' import type { SocketSdkSuccessResult } from '@socketsecurity/sdk' export type FetchCreateOrgFullScanConfigs = { diff --git a/packages/cli/src/commands/scan/fetch-diff-scan.mts b/packages/cli/src/commands/scan/fetch-diff-scan.mts index 3c069041e..e4635b745 100644 --- a/packages/cli/src/commands/scan/fetch-diff-scan.mts +++ b/packages/cli/src/commands/scan/fetch-diff-scan.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { queryApiSafeJson } from '../../utils/socket/api.mjs' diff --git a/packages/cli/src/commands/scan/fetch-report-data.mts b/packages/cli/src/commands/scan/fetch-report-data.mts index 173dafc52..6bd3e991c 100644 --- a/packages/cli/src/commands/scan/fetch-report-data.mts +++ b/packages/cli/src/commands/scan/fetch-report-data.mts @@ -1,6 +1,6 @@ -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { getDefaultSpinner } from '@socketsecurity/lib/spinner' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { getDefaultSpinner } from '@socketsecurity/lib-internal/spinner' const logger = getDefaultLogger() const spinner = getDefaultSpinner() diff --git a/packages/cli/src/commands/scan/fetch-scan.mts b/packages/cli/src/commands/scan/fetch-scan.mts index b375b7783..08cba2d8b 100644 --- a/packages/cli/src/commands/scan/fetch-scan.mts +++ b/packages/cli/src/commands/scan/fetch-scan.mts @@ -1,4 +1,4 @@ -import { debug, debugDir } from '@socketsecurity/lib/debug' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' import { queryApiSafeText } from '../../utils/socket/api.mjs' diff --git a/packages/cli/src/commands/scan/fetch-supported-scan-file-names.mts b/packages/cli/src/commands/scan/fetch-supported-scan-file-names.mts index 5dfb6e58a..a084a726d 100644 --- a/packages/cli/src/commands/scan/fetch-supported-scan-file-names.mts +++ b/packages/cli/src/commands/scan/fetch-supported-scan-file-names.mts @@ -3,7 +3,7 @@ import { setupSdk } from '../../utils/socket/sdk.mjs' import type { CResult } from '../../types.mts' import type { SetupSdkOptions } from '../../utils/socket/sdk.mjs' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' import type { SocketSdkSuccessResult } from '@socketsecurity/sdk' export type FetchSupportedScanFileNamesOptions = { diff --git a/packages/cli/src/commands/scan/generate-report.mts b/packages/cli/src/commands/scan/generate-report.mts index 73d2f95fa..94635ed13 100644 --- a/packages/cli/src/commands/scan/generate-report.mts +++ b/packages/cli/src/commands/scan/generate-report.mts @@ -1,4 +1,4 @@ -import { UNKNOWN_VALUE } from '@socketsecurity/lib/constants/core' +import { UNKNOWN_VALUE } from '@socketsecurity/lib-internal/constants/core' import { FOLD_SETTING_FILE, @@ -17,7 +17,7 @@ import { getSocketDevPackageOverviewUrlFromPurl } from '../../utils/socket/url.m import type { FOLD_SETTING, REPORT_LEVEL } from './types.mts' import type { CResult } from '../../types.mts' import type { SocketArtifact } from '../../utils/alert/artifact.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' import type { SocketSdkSuccessResult } from '@socketsecurity/sdk' type AlertKey = string diff --git a/packages/cli/src/commands/scan/handle-create-new-scan.mts b/packages/cli/src/commands/scan/handle-create-new-scan.mts index f7a68e522..cc8029dc6 100644 --- a/packages/cli/src/commands/scan/handle-create-new-scan.mts +++ b/packages/cli/src/commands/scan/handle-create-new-scan.mts @@ -1,9 +1,9 @@ import path from 'node:path' -import { getSpinner } from '@socketsecurity/lib/constants/process' -import { debug, debugDir } from '@socketsecurity/lib/debug' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { fetchCreateOrgFullScan } from './fetch-create-org-full-scan.mts' import { fetchSupportedScanFileNames } from './fetch-supported-scan-file-names.mts' @@ -23,7 +23,7 @@ import { generateAutoManifest } from '../manifest/generate_auto_manifest.mts' import type { ReachabilityOptions } from './perform-reachability-analysis.mts' import type { REPORT_LEVEL } from './types.mts' import type { OutputKind } from '../../types.mts' -import type { Remap } from '@socketsecurity/lib/objects' +import type { Remap } from '@socketsecurity/lib-internal/objects' const logger = getDefaultLogger() export type HandleCreateNewScanConfig = { diff --git a/packages/cli/src/commands/scan/handle-scan-reach.mts b/packages/cli/src/commands/scan/handle-scan-reach.mts index 092be05bf..a35b8cd60 100644 --- a/packages/cli/src/commands/scan/handle-scan-reach.mts +++ b/packages/cli/src/commands/scan/handle-scan-reach.mts @@ -1,6 +1,6 @@ -import { getSpinner } from '@socketsecurity/lib/constants/process' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { pluralize } from '@socketsecurity/lib/words' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { pluralize } from '@socketsecurity/lib-internal/words' import { fetchSupportedScanFileNames } from './fetch-supported-scan-file-names.mts' import { outputScanReach } from './output-scan-reach.mts' diff --git a/packages/cli/src/commands/scan/output-create-new-scan.mts b/packages/cli/src/commands/scan/output-create-new-scan.mts index 00076773a..abe7cb59c 100644 --- a/packages/cli/src/commands/scan/output-create-new-scan.mts +++ b/packages/cli/src/commands/scan/output-create-new-scan.mts @@ -1,16 +1,16 @@ import open from 'open' import terminalLink from 'terminal-link' -import { getSpinner } from '@socketsecurity/lib/constants/process' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { confirm } from '@socketsecurity/lib/stdio/prompts' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { confirm } from '@socketsecurity/lib-internal/stdio/prompts' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdHeader } from '../../utils/output/markdown.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' import type { CResult, OutputKind } from '../../types.mts' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' import type { SocketSdkSuccessResult } from '@socketsecurity/sdk' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/scan/output-delete-scan.mts b/packages/cli/src/commands/scan/output-delete-scan.mts index 2e6a2831c..6aa477900 100644 --- a/packages/cli/src/commands/scan/output-delete-scan.mts +++ b/packages/cli/src/commands/scan/output-delete-scan.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/scan/output-diff-scan.mts b/packages/cli/src/commands/scan/output-diff-scan.mts index 310b73b50..2611f6512 100644 --- a/packages/cli/src/commands/scan/output-diff-scan.mts +++ b/packages/cli/src/commands/scan/output-diff-scan.mts @@ -3,7 +3,7 @@ import util from 'node:util' import colors from 'yoctocolors-cjs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { SOCKET_WEBSITE_URL } from '../../constants/socket.mts' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/scan/output-list-scans.mts b/packages/cli/src/commands/scan/output-list-scans.mts index 82a8674c2..4b44165a7 100644 --- a/packages/cli/src/commands/scan/output-list-scans.mts +++ b/packages/cli/src/commands/scan/output-list-scans.mts @@ -1,7 +1,7 @@ import chalkTable from 'chalk-table' import colors from 'yoctocolors-cjs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/scan/output-scan-config-result.mts b/packages/cli/src/commands/scan/output-scan-config-result.mts index 623c3e9fd..d28c3ab3e 100644 --- a/packages/cli/src/commands/scan/output-scan-config-result.mts +++ b/packages/cli/src/commands/scan/output-scan-config-result.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/scan/output-scan-github.mts b/packages/cli/src/commands/scan/output-scan-github.mts index d0c10cead..84c2c1b35 100644 --- a/packages/cli/src/commands/scan/output-scan-github.mts +++ b/packages/cli/src/commands/scan/output-scan-github.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/scan/output-scan-metadata.mts b/packages/cli/src/commands/scan/output-scan-metadata.mts index f89a0708c..616cbfa03 100644 --- a/packages/cli/src/commands/scan/output-scan-metadata.mts +++ b/packages/cli/src/commands/scan/output-scan-metadata.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { mdHeader, mdKeyValue } from '../../utils/output/markdown.mts' diff --git a/packages/cli/src/commands/scan/output-scan-reach.mts b/packages/cli/src/commands/scan/output-scan-reach.mts index 9a18f27ad..c8b39157c 100644 --- a/packages/cli/src/commands/scan/output-scan-reach.mts +++ b/packages/cli/src/commands/scan/output-scan-reach.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { DOT_SOCKET_DOT_FACTS_JSON } from '../../constants/paths.mts' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/scan/output-scan-report.mts b/packages/cli/src/commands/scan/output-scan-report.mts index c43f93abc..30931738c 100644 --- a/packages/cli/src/commands/scan/output-scan-report.mts +++ b/packages/cli/src/commands/scan/output-scan-report.mts @@ -1,8 +1,8 @@ import fs from 'node:fs/promises' -import { joinAnd } from '@socketsecurity/lib/arrays' -import { getSpinner } from '@socketsecurity/lib/constants/process' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { getSpinner } from '@socketsecurity/lib-internal/constants/process' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { generateReport } from './generate-report.mts' import { diff --git a/packages/cli/src/commands/scan/output-scan-view.mts b/packages/cli/src/commands/scan/output-scan-view.mts index 1aff9b9a5..46e53b5be 100644 --- a/packages/cli/src/commands/scan/output-scan-view.mts +++ b/packages/cli/src/commands/scan/output-scan-view.mts @@ -1,6 +1,6 @@ import fs from 'node:fs/promises' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { SOCKET_WEBSITE_URL } from '../../constants/socket.mts' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/scan/perform-reachability-analysis.mts b/packages/cli/src/commands/scan/perform-reachability-analysis.mts index 1bc730fe8..05f6f70ec 100644 --- a/packages/cli/src/commands/scan/perform-reachability-analysis.mts +++ b/packages/cli/src/commands/scan/perform-reachability-analysis.mts @@ -15,7 +15,7 @@ import { fetchOrganization } from '../organization/fetch-organization-list.mts' import type { CResult } from '../../types.mts' import type { PURL_Type } from '../../utils/ecosystem/types.mjs' -import type { Spinner } from '@socketsecurity/lib/spinner' +import type { Spinner } from '@socketsecurity/lib-internal/spinner' export type ReachabilityOptions = { reachAnalysisTimeout: number diff --git a/packages/cli/src/commands/scan/setup-scan-config.mts b/packages/cli/src/commands/scan/setup-scan-config.mts index 0393f6c01..1f2ff77d3 100644 --- a/packages/cli/src/commands/scan/setup-scan-config.mts +++ b/packages/cli/src/commands/scan/setup-scan-config.mts @@ -1,8 +1,8 @@ import fs from 'node:fs' import path from 'node:path' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { input, select } from '@socketsecurity/lib/stdio/prompts' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { input, select } from '@socketsecurity/lib-internal/stdio/prompts' import ENV from '../../constants/env.mts' import { SOCKET_JSON } from '../../constants/paths.mts' diff --git a/packages/cli/src/commands/scan/stream-scan.mts b/packages/cli/src/commands/scan/stream-scan.mts index d1b14c876..a360ddd97 100644 --- a/packages/cli/src/commands/scan/stream-scan.mts +++ b/packages/cli/src/commands/scan/stream-scan.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleApiCall } from '../../utils/socket/api.mjs' import { setupSdk } from '../../utils/socket/sdk.mjs' diff --git a/packages/cli/src/commands/scan/suggest-org-slug.mts b/packages/cli/src/commands/scan/suggest-org-slug.mts index 55b09dea6..cd4ea089a 100644 --- a/packages/cli/src/commands/scan/suggest-org-slug.mts +++ b/packages/cli/src/commands/scan/suggest-org-slug.mts @@ -1,5 +1,5 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { select } from '@socketsecurity/lib/stdio/prompts' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { select } from '@socketsecurity/lib-internal/stdio/prompts' import { fetchOrganization } from '../organization/fetch-organization-list.mts' diff --git a/packages/cli/src/commands/scan/suggest-to-persist-orgslug.mts b/packages/cli/src/commands/scan/suggest-to-persist-orgslug.mts index 1b4fa77a7..8d2a4eaf8 100644 --- a/packages/cli/src/commands/scan/suggest-to-persist-orgslug.mts +++ b/packages/cli/src/commands/scan/suggest-to-persist-orgslug.mts @@ -1,5 +1,5 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { select } from '@socketsecurity/lib/stdio/prompts' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' +import { select } from '@socketsecurity/lib-internal/stdio/prompts' import { getConfigValue, updateConfigValue } from '../../utils/config.mts' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/scan/suggest_branch_slug.mts b/packages/cli/src/commands/scan/suggest_branch_slug.mts index 63be95a89..3968d43d0 100644 --- a/packages/cli/src/commands/scan/suggest_branch_slug.mts +++ b/packages/cli/src/commands/scan/suggest_branch_slug.mts @@ -1,6 +1,6 @@ -import { spawn } from '@socketsecurity/lib/spawn' -import { select } from '@socketsecurity/lib/stdio/prompts' -import { stripAnsi } from '@socketsecurity/lib/strings' +import { spawn } from '@socketsecurity/lib-internal/spawn' +import { select } from '@socketsecurity/lib-internal/stdio/prompts' +import { stripAnsi } from '@socketsecurity/lib-internal/strings' export async function suggestBranchSlug( repoDefaultBranch: string | undefined, diff --git a/packages/cli/src/commands/scan/suggest_target.mts b/packages/cli/src/commands/scan/suggest_target.mts index 538b59b4d..adb96c35c 100644 --- a/packages/cli/src/commands/scan/suggest_target.mts +++ b/packages/cli/src/commands/scan/suggest_target.mts @@ -1,4 +1,4 @@ -import { select } from '@socketsecurity/lib/stdio/prompts' +import { select } from '@socketsecurity/lib-internal/stdio/prompts' export async function suggestTarget(): Promise { // We could prefill this with sub-dirs of the current diff --git a/packages/cli/src/commands/self-update/handle-self-update.mts b/packages/cli/src/commands/self-update/handle-self-update.mts index 5543c41dc..511d27922 100644 --- a/packages/cli/src/commands/self-update/handle-self-update.mts +++ b/packages/cli/src/commands/self-update/handle-self-update.mts @@ -11,10 +11,10 @@ import path from 'node:path' import colors from 'yoctocolors-cjs' -import { detectPackageManager } from '@socketsecurity/lib/env/package-manager' -import { safeMkdir } from '@socketsecurity/lib/fs' -import { getIpcStubPath } from '@socketsecurity/lib/ipc' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { detectPackageManager } from '@socketsecurity/lib-internal/env/package-manager' +import { safeMkdir } from '@socketsecurity/lib-internal/fs' +import { getIpcStubPath } from '@socketsecurity/lib-internal/ipc' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { outputSelfUpdate } from './output-self-update.mts' import ENV from '../../constants/env.mts' diff --git a/packages/cli/src/commands/self-update/output-self-update.mts b/packages/cli/src/commands/self-update/output-self-update.mts index e6699075e..ed15af97c 100644 --- a/packages/cli/src/commands/self-update/output-self-update.mts +++ b/packages/cli/src/commands/self-update/output-self-update.mts @@ -4,7 +4,7 @@ import colors from 'yoctocolors-cjs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' const logger = getDefaultLogger() /** diff --git a/packages/cli/src/commands/threat-feed/cmd-threat-feed.mts b/packages/cli/src/commands/threat-feed/cmd-threat-feed.mts index 1352b6506..3960081fa 100644 --- a/packages/cli/src/commands/threat-feed/cmd-threat-feed.mts +++ b/packages/cli/src/commands/threat-feed/cmd-threat-feed.mts @@ -1,6 +1,6 @@ -import { joinAnd } from '@socketsecurity/lib/arrays' -import { NPM } from '@socketsecurity/lib/constants/agents' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { joinAnd } from '@socketsecurity/lib-internal/arrays' +import { NPM } from '@socketsecurity/lib-internal/constants/agents' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleThreatFeed } from './handle-threat-feed.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/threat-feed/output-threat-feed.mts b/packages/cli/src/commands/threat-feed/output-threat-feed.mts index 5de45f5cc..c01ca63f2 100644 --- a/packages/cli/src/commands/threat-feed/output-threat-feed.mts +++ b/packages/cli/src/commands/threat-feed/output-threat-feed.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' import { serializeResultJson } from '../../utils/output/result-json.mjs' diff --git a/packages/cli/src/commands/threat-feed/threat-feed-app-cli.mts b/packages/cli/src/commands/threat-feed/threat-feed-app-cli.mts index 4e1145c26..5a7b1409a 100644 --- a/packages/cli/src/commands/threat-feed/threat-feed-app-cli.mts +++ b/packages/cli/src/commands/threat-feed/threat-feed-app-cli.mts @@ -6,7 +6,7 @@ import { pathToFileURL } from 'node:url' import { render } from 'ink' import React from 'react' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' const logger = getDefaultLogger() diff --git a/packages/cli/src/commands/uninstall/cmd-uninstall-completion.mts b/packages/cli/src/commands/uninstall/cmd-uninstall-completion.mts index a406c81ad..4933ebb56 100644 --- a/packages/cli/src/commands/uninstall/cmd-uninstall-completion.mts +++ b/packages/cli/src/commands/uninstall/cmd-uninstall-completion.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { handleUninstallCompletion } from './handle-uninstall-completion.mts' import { DRY_RUN_BAILING_NOW } from '../../constants/cli.mts' diff --git a/packages/cli/src/commands/uninstall/output-uninstall-completion.mts b/packages/cli/src/commands/uninstall/output-uninstall-completion.mts index 74151804b..32d1b5dbc 100644 --- a/packages/cli/src/commands/uninstall/output-uninstall-completion.mts +++ b/packages/cli/src/commands/uninstall/output-uninstall-completion.mts @@ -1,4 +1,4 @@ -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { failMsgWithBadge } from '../../utils/error/fail-msg-with-badge.mts' diff --git a/packages/cli/src/commands/yarn/cmd-yarn.mts b/packages/cli/src/commands/yarn/cmd-yarn.mts index 0c14f7eb0..baa207f7f 100644 --- a/packages/cli/src/commands/yarn/cmd-yarn.mts +++ b/packages/cli/src/commands/yarn/cmd-yarn.mts @@ -1,7 +1,7 @@ import { createRequire } from 'node:module' -import { YARN } from '@socketsecurity/lib/constants/agents' -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { YARN } from '@socketsecurity/lib-internal/constants/agents' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import { DRY_RUN_BAILING_NOW, diff --git a/packages/cli/src/npm-cli.mts b/packages/cli/src/npm-cli.mts index 7aa80341d..e6ba2f2d8 100644 --- a/packages/cli/src/npm-cli.mts +++ b/packages/cli/src/npm-cli.mts @@ -1,6 +1,6 @@ #!/usr/bin/env node -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import shadowNpmBin from './shadow/npm/bin.mts' diff --git a/packages/cli/src/npx-cli.mts b/packages/cli/src/npx-cli.mts index 127b9b5e6..d6429e901 100644 --- a/packages/cli/src/npx-cli.mts +++ b/packages/cli/src/npx-cli.mts @@ -1,6 +1,6 @@ #!/usr/bin/env node -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import shadowNpxBin from './shadow/npx/bin.mts' diff --git a/packages/cli/src/pnpm-cli.mts b/packages/cli/src/pnpm-cli.mts index 640da74e9..b4eb64e02 100644 --- a/packages/cli/src/pnpm-cli.mts +++ b/packages/cli/src/pnpm-cli.mts @@ -1,6 +1,6 @@ #!/usr/bin/env node -import { getDefaultLogger } from '@socketsecurity/lib/logger' +import { getDefaultLogger } from '@socketsecurity/lib-internal/logger' import shadowPnpmBin from './shadow/pnpm/bin.mts' diff --git a/packages/cli/test/unit/commands/ci/handle-ci.test.mts b/packages/cli/test/unit/commands/ci/handle-ci.test.mts index 999b53042..c8c5e7af9 100644 --- a/packages/cli/test/unit/commands/ci/handle-ci.test.mts +++ b/packages/cli/test/unit/commands/ci/handle-ci.test.mts @@ -73,14 +73,14 @@ const { const mockDebugLog = vi.hoisted(() => vi.fn()) const mockIsDebug = vi.hoisted(() => vi.fn()) -vi.mock('@socketsecurity/lib/debug', () => ({ +vi.mock('@socketsecurity/lib-internal/debug', () => ({ debug: mockDebug, debugDir: mockDebugDir, debugLog: mockDebugLog, isDebug: mockIsDebug, })) -vi.mock('@socketsecurity/lib/logger', () => ({ +vi.mock('@socketsecurity/lib-internal/logger', () => ({ getDefaultLogger: () => mockLogger, logger: mockLogger, })) diff --git a/packages/cli/test/unit/commands/fix/ghsa-tracker.test.mts b/packages/cli/test/unit/commands/fix/ghsa-tracker.test.mts index 8b64659ec..6d947b4b7 100644 --- a/packages/cli/test/unit/commands/fix/ghsa-tracker.test.mts +++ b/packages/cli/test/unit/commands/fix/ghsa-tracker.test.mts @@ -59,7 +59,7 @@ vi.mock('node:fs', async () => { } }) -vi.mock('@socketsecurity/lib/fs', () => ({ +vi.mock('@socketsecurity/lib-internal/fs', () => ({ readJson: mockReadJson, safeMkdir: mockSafeMkdir, writeJson: mockWriteJson, @@ -75,7 +75,7 @@ describe('ghsa-tracker', () => { describe('loadGhsaTracker', () => { it('loads existing tracker file', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') const mockTracker: GhsaTracker = { version: 1, fixed: [ @@ -97,7 +97,7 @@ describe('ghsa-tracker', () => { }) it('creates new tracker when file does not exist', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') mockReadJson.mockRejectedValue(new Error('ENOENT')) const result = await loadGhsaTracker(mockCwd) @@ -109,7 +109,7 @@ describe('ghsa-tracker', () => { }) it('handles null tracker data', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') mockReadJson.mockResolvedValue(null) const result = await loadGhsaTracker(mockCwd) @@ -123,7 +123,7 @@ describe('ghsa-tracker', () => { describe('saveGhsaTracker', () => { it('saves tracker to file', async () => { - const { safeMkdir, writeJson } = await import('@socketsecurity/lib/fs') + const { safeMkdir, writeJson } = await import('@socketsecurity/lib-internal/fs') const tracker: GhsaTracker = { version: 1, fixed: [ @@ -149,7 +149,7 @@ describe('ghsa-tracker', () => { describe('markGhsaFixed', () => { it('adds new GHSA fix record', async () => { - const { readJson, writeJson } = await import('@socketsecurity/lib/fs') + const { readJson, writeJson } = await import('@socketsecurity/lib-internal/fs') const existingTracker: GhsaTracker = { version: 1, fixed: [], @@ -176,7 +176,7 @@ describe('ghsa-tracker', () => { }) it('replaces existing GHSA fix record', async () => { - const { readJson, writeJson } = await import('@socketsecurity/lib/fs') + const { readJson, writeJson } = await import('@socketsecurity/lib-internal/fs') const existingTracker: GhsaTracker = { version: 1, fixed: [ @@ -213,7 +213,7 @@ describe('ghsa-tracker', () => { }) it('sorts records by fixedAt descending', async () => { - const { readJson, writeJson } = await import('@socketsecurity/lib/fs') + const { readJson, writeJson } = await import('@socketsecurity/lib-internal/fs') const existingTracker: GhsaTracker = { version: 1, fixed: [ @@ -237,7 +237,7 @@ describe('ghsa-tracker', () => { }) it('handles errors gracefully', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') mockReadJson.mockRejectedValue(new Error('Permission denied')) // Should not throw. @@ -249,7 +249,7 @@ describe('ghsa-tracker', () => { describe('isGhsaFixed', () => { it('returns true for fixed GHSA', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') const tracker: GhsaTracker = { version: 1, fixed: [ @@ -270,7 +270,7 @@ describe('ghsa-tracker', () => { }) it('returns false for unfixed GHSA', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') const tracker: GhsaTracker = { version: 1, fixed: [], @@ -284,7 +284,7 @@ describe('ghsa-tracker', () => { }) it('returns false on error', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') mockReadJson.mockRejectedValue(new Error('Read error')) const result = await isGhsaFixed(mockCwd, 'GHSA-1234-5678-90ab') @@ -295,7 +295,7 @@ describe('ghsa-tracker', () => { describe('getFixedGhsas', () => { it('returns all fixed GHSA records', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') const tracker: GhsaTracker = { version: 1, fixed: [ @@ -323,7 +323,7 @@ describe('ghsa-tracker', () => { }) it('returns empty array on error', async () => { - const { readJson } = await import('@socketsecurity/lib/fs') + const { readJson } = await import('@socketsecurity/lib-internal/fs') mockReadJson.mockRejectedValue(new Error('Read error')) const result = await getFixedGhsas(mockCwd) diff --git a/packages/cli/test/unit/commands/fix/handle-fix.test.mts b/packages/cli/test/unit/commands/fix/handle-fix.test.mts index 0af0748f1..d91de2fe7 100644 --- a/packages/cli/test/unit/commands/fix/handle-fix.test.mts +++ b/packages/cli/test/unit/commands/fix/handle-fix.test.mts @@ -36,7 +36,7 @@ const mockJoinAnd = vi.hoisted(() => vi.fn(arr => arr.join(' and '))) const mockCoanaFix = vi.hoisted(() => vi.fn()) const mockOutputFixResult = vi.hoisted(() => vi.fn()) -vi.mock('@socketsecurity/lib/arrays', () => ({ +vi.mock('@socketsecurity/lib-internal/arrays', () => ({ joinAnd: mockJoinAnd, })) @@ -52,7 +52,7 @@ const mockLogger = vi.hoisted(() => ({ const mockConvertCveToGhsa = vi.hoisted(() => vi.fn()) const mockConvertPurlToGhsas = vi.hoisted(() => vi.fn()) -vi.mock('@socketsecurity/lib/logger', () => ({ +vi.mock('@socketsecurity/lib-internal/logger', () => ({ getDefaultLogger: () => mockLogger, logger: mockLogger, })) diff --git a/packages/cli/test/unit/commands/fix/pr-lifecycle-logger.test.mts b/packages/cli/test/unit/commands/fix/pr-lifecycle-logger.test.mts index 15e1eb6fd..3bb3d2174 100644 --- a/packages/cli/test/unit/commands/fix/pr-lifecycle-logger.test.mts +++ b/packages/cli/test/unit/commands/fix/pr-lifecycle-logger.test.mts @@ -40,7 +40,7 @@ const mockLogger = vi.hoisted(() => ({ fail: vi.fn(), })) -vi.mock('@socketsecurity/lib/logger', () => ({ +vi.mock('@socketsecurity/lib-internal/logger', () => ({ logger: mockLogger, getDefaultLogger: () => mockLogger, })) diff --git a/packages/cli/test/unit/commands/package/handle-purl-deep-score.test.mts b/packages/cli/test/unit/commands/package/handle-purl-deep-score.test.mts index bf898e0ae..64c2ece81 100644 --- a/packages/cli/test/unit/commands/package/handle-purl-deep-score.test.mts +++ b/packages/cli/test/unit/commands/package/handle-purl-deep-score.test.mts @@ -40,7 +40,7 @@ vi.mock('../../../../src/commands/package/fetch-purl-deep-score.mts', () => ({ vi.mock('../../../../src/commands/package/output-purls-deep-score.mts', () => ({ outputPurlsDeepScore: mockOutputPurlsDeepScore, })) -vi.mock('@socketsecurity/lib/debug', () => ({ +vi.mock('@socketsecurity/lib-internal/debug', () => ({ debug: mockDebug, debugDir: mockDebugDir, isDebug: mockIsDebug, diff --git a/packages/cli/test/unit/commands/package/handle-purls-shallow-score.test.mts b/packages/cli/test/unit/commands/package/handle-purls-shallow-score.test.mts index cc85e98e7..f48c2bbad 100644 --- a/packages/cli/test/unit/commands/package/handle-purls-shallow-score.test.mts +++ b/packages/cli/test/unit/commands/package/handle-purls-shallow-score.test.mts @@ -26,7 +26,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest' -import { debug, debugDir } from '@socketsecurity/lib/debug' +import { debug, debugDir } from '@socketsecurity/lib-internal/debug' import { fetchPurlsShallowScore } from '../../../../src/commands/package/fetch-purls-shallow-score.mts' import { handlePurlsShallowScore } from '../../../../src/commands/package/handle-purls-shallow-score.mts' @@ -51,7 +51,7 @@ vi.mock( outputPurlsShallowScore: mockOutputPurlsShallowScore, }), ) -vi.mock('@socketsecurity/lib/debug', () => ({ +vi.mock('@socketsecurity/lib-internal/debug', () => ({ _debug: mock_debug, debug: mockDebug, debugDir: mockDebugDir, diff --git a/packages/cli/test/unit/commands/scan/fetch-diff-scan.test.mts b/packages/cli/test/unit/commands/scan/fetch-diff-scan.test.mts index 42dccf290..94294dc0b 100644 --- a/packages/cli/test/unit/commands/scan/fetch-diff-scan.test.mts +++ b/packages/cli/test/unit/commands/scan/fetch-diff-scan.test.mts @@ -39,7 +39,7 @@ const mockLogger = vi.hoisted(() => ({ const mockQueryApiSafeJson = vi.hoisted(() => vi.fn()) const mockGetDefaultApiToken = vi.hoisted(() => vi.fn(() => 'test-token')) -vi.mock('@socketsecurity/lib/logger', () => ({ +vi.mock('@socketsecurity/lib-internal/logger', () => ({ getDefaultLogger: () => mockLogger, logger: mockLogger, })) diff --git a/packages/cli/test/unit/commands/scan/fetch-scan.test.mts b/packages/cli/test/unit/commands/scan/fetch-scan.test.mts index 7d108b363..0453713c9 100644 --- a/packages/cli/test/unit/commands/scan/fetch-scan.test.mts +++ b/packages/cli/test/unit/commands/scan/fetch-scan.test.mts @@ -39,7 +39,7 @@ const mockDebugDir = vi.hoisted(() => vi.fn()) const mockIsDebug = vi.hoisted(() => vi.fn()) const mockGetDefaultApiToken = vi.hoisted(() => vi.fn(() => 'test-token')) -vi.mock('@socketsecurity/lib/logger', () => ({ +vi.mock('@socketsecurity/lib-internal/logger', () => ({ getDefaultLogger: () => mockLogger, logger: mockLogger, })) @@ -48,7 +48,7 @@ vi.mock('../../../../src/utils/socket/api.mjs', () => ({ queryApiSafeText: mockQueryApiSafeText, })) -vi.mock('@socketsecurity/lib/debug', () => ({ +vi.mock('@socketsecurity/lib-internal/debug', () => ({ debug: mockDebug, debugDir: mockDebugDir, isDebug: mockIsDebug, diff --git a/packages/cli/test/unit/commands/scan/output-create-new-scan.test.mts b/packages/cli/test/unit/commands/scan/output-create-new-scan.test.mts index 67302240a..eb7d55276 100644 --- a/packages/cli/test/unit/commands/scan/output-create-new-scan.test.mts +++ b/packages/cli/test/unit/commands/scan/output-create-new-scan.test.mts @@ -47,7 +47,7 @@ const mockSerializeResultJson = vi.hoisted(() => const mockOpenDefault = vi.hoisted(() => vi.fn()) const mockConfirmFn = vi.hoisted(() => vi.fn()) -vi.mock('@socketsecurity/lib/logger', () => ({ +vi.mock('@socketsecurity/lib-internal/logger', () => ({ getDefaultLogger: () => mockLogger, logger: mockLogger, })) @@ -68,7 +68,7 @@ vi.mock('terminal-link', () => ({ default: vi.fn((text: string, url: string) => `[${text}](${url})`), })) -vi.mock('@socketsecurity/lib/stdio/prompts', () => ({ +vi.mock('@socketsecurity/lib-internal/stdio/prompts', () => ({ confirm: mockConfirmFn, })) diff --git a/packages/cli/test/unit/commands/threat-feed/output-threat-feed.test.mts b/packages/cli/test/unit/commands/threat-feed/output-threat-feed.test.mts index fe0563d63..7af0884e9 100644 --- a/packages/cli/test/unit/commands/threat-feed/output-threat-feed.test.mts +++ b/packages/cli/test/unit/commands/threat-feed/output-threat-feed.test.mts @@ -47,7 +47,7 @@ const mockLog = mockLogger.log const mockFail = mockLogger.fail const mockWarn = mockLogger.warn -vi.mock('@socketsecurity/lib/logger', () => ({ +vi.mock('@socketsecurity/lib-internal/logger', () => ({ getDefaultLogger: () => mockLogger, logger: mockLogger, })) diff --git a/packages/codet5-models-builder/README.md b/packages/codet5-models-builder/README.md deleted file mode 100644 index 851564317..000000000 --- a/packages/codet5-models-builder/README.md +++ /dev/null @@ -1,99 +0,0 @@ -# codet5-models - -CodeT5 model conversion and optimization for Socket CLI security analysis. - -## Purpose - -This package converts and optimizes CodeT5 models for use in Socket CLI: -- **Model conversion**: Convert PyTorch/Transformers models to ONNX format -- **Quantization**: Apply INT8/INT4 mixed-precision quantization -- **Optimization**: ONNX graph optimizations for inference -- **Expected savings**: 5-14MB per model through quantization - -## Build Process - -The build follows these steps: - -1. **Download models** - Fetch CodeT5 models from Hugging Face -2. **Convert to ONNX** - Export models to ONNX format -3. **Apply quantization** - Use mixed-precision INT4/INT8 quantization -4. **Optimize graphs** - Apply ONNX optimization passes -5. **Verify** - Test inference with sample inputs -6. **Export** - Copy to distribution location - -## Usage - -**Build and optimize models:** -```bash -pnpm run build -``` - -**Force rebuild (ignore checkpoints):** -```bash -pnpm run build:force -``` - -**Clean build artifacts:** -```bash -pnpm run clean -``` - -## Configuration - -Build configuration in `scripts/build.mjs`: -- **Models**: List of CodeT5 model names to process -- **Quantization strategy**: INT4/INT8 mixed precision settings -- **Optimization level**: ONNX optimization passes - -## Models - -The following CodeT5 models are converted and optimized: -- **Encoder model**: For embedding code snippets -- **Decoder model**: For generating suggestions -- **Tokenizer**: Vocabulary and tokenization rules - -## Quantization Strategy - -Mixed-precision quantization reduces model size while maintaining accuracy: -- **Attention layers**: INT8 quantization (higher precision for important computations) -- **Feed-forward layers**: INT4 quantization (lower precision, more compression) -- **Embeddings**: INT8 quantization (preserve token representations) -- **Layer norm**: FP32 (no quantization for normalization layers) - -## Output - -Optimized models are exported to: -- `build/models/encoder.onnx` - Quantized encoder model -- `build/models/decoder.onnx` - Quantized decoder model -- `build/models/tokenizer.json` - Tokenizer configuration - -## Checkpoints - -The build uses checkpoints for incremental builds: -- `downloaded` - Models downloaded from Hugging Face -- `converted` - Models converted to ONNX -- `quantized` - Quantization applied -- `optimized` - Graph optimizations applied -- `verified` - Inference tested - -Use `--force` flag to ignore checkpoints and rebuild from scratch. - -## Integration - -This package is used by Socket CLI to provide AI-powered security analysis. The optimized models are embedded in the Socket CLI distribution for offline inference. - -## Size Comparison - -Per model: -- **Original PyTorch model**: ~220 MB -- **ONNX FP32**: ~110 MB -- **ONNX INT8**: ~55 MB -- **ONNX INT4/INT8 mixed**: ~28 MB (82 MB saved) - -Total savings across all models: **~250 MB → ~80 MB** (170 MB saved, 68% reduction). - -Size savings come from: -1. ONNX format (50% smaller than PyTorch) -2. INT8 quantization (50% smaller than FP32) -3. INT4 quantization (75% smaller than FP32) -4. Graph optimizations (5-10% additional savings) diff --git a/packages/codet5-models-builder/package.json b/packages/codet5-models-builder/package.json deleted file mode 100644 index 751b56bf5..000000000 --- a/packages/codet5-models-builder/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "@socketsecurity/codet5-models-builder", - "version": "1.0.0", - "description": "CodeT5 model conversion and optimization for Socket CLI", - "type": "module", - "private": true, - "scripts": { - "build": "node scripts/build.mjs", - "build:force": "node scripts/build.mjs --force", - "clean": "node scripts/clean.mjs" - }, - "dependencies": { - "@socketsecurity/build-infra": "workspace:*", - "@socketsecurity/lib": "workspace:*" - } -} diff --git a/packages/codet5-models-builder/scripts/build.mjs b/packages/codet5-models-builder/scripts/build.mjs deleted file mode 100644 index b6db50b12..000000000 --- a/packages/codet5-models-builder/scripts/build.mjs +++ /dev/null @@ -1,305 +0,0 @@ -/** - * Build codet5-models - Convert and optimize CodeT5 models for Socket CLI. - * - * This script downloads, converts, and optimizes CodeT5 models: - * - Downloads models from Hugging Face - * - Converts to ONNX format - * - Applies INT4/INT8 mixed-precision quantization - * - Optimizes ONNX graphs - * - * Usage: - * node scripts/build.mjs # Normal build with checkpoints - * node scripts/build.mjs --force # Force rebuild (ignore checkpoints) - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' -import { - checkDiskSpace, - checkPythonVersion, - formatDuration, - getFileSize, -} from '@socketsecurity/build-infra/lib/build-helpers' -import { - printError, - printHeader, - printStep, - printSuccess, -} from '@socketsecurity/build-infra/lib/build-output' -import { - cleanCheckpoint, - createCheckpoint, - shouldRun, -} from '@socketsecurity/build-infra/lib/checkpoint-manager' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = path.dirname(__filename) - -// Parse arguments. -const args = process.argv.slice(2) -const FORCE_BUILD = args.includes('--force') - -// Configuration. -const MODEL_NAME = 'Salesforce/codet5-base' -const ROOT_DIR = path.join(__dirname, '..') -const MODELS_DIR = path.join(ROOT_DIR, '.models') -const BUILD_DIR = path.join(ROOT_DIR, 'build') -const OUTPUT_DIR = path.join(BUILD_DIR, 'models') - -/** - * Download CodeT5 models from Hugging Face. - */ -async function downloadModels() { - if (!(await shouldRun('codet5-models', 'downloaded', FORCE_BUILD))) { - return - } - - printHeader('Downloading CodeT5 Models') - printStep(`Model: ${MODEL_NAME}`) - - await fs.mkdir(MODELS_DIR, { recursive: true }) - - // Use Hugging Face CLI to download models. - await exec( - `python3 -c "from transformers import AutoTokenizer, AutoModelForSeq2SeqLM; ` + - `tokenizer = AutoTokenizer.from_pretrained('${MODEL_NAME}'); ` + - `model = AutoModelForSeq2SeqLM.from_pretrained('${MODEL_NAME}'); ` + - `tokenizer.save_pretrained('${MODELS_DIR}'); ` + - `model.save_pretrained('${MODELS_DIR}')"`, - { stdio: 'inherit' } - ) - - printSuccess('Models downloaded') - await createCheckpoint('codet5-models', 'downloaded') -} - -/** - * Convert models to ONNX format. - */ -async function convertToOnnx() { - if (!(await shouldRun('codet5-models', 'converted', FORCE_BUILD))) { - return - } - - printHeader('Converting to ONNX') - - await fs.mkdir(BUILD_DIR, { recursive: true }) - - // Convert encoder. - printStep('Converting encoder') - await exec( - `python3 -m transformers.onnx --model=${MODELS_DIR} --feature=seq2seq-lm ${BUILD_DIR}`, - { stdio: 'inherit' } - ) - - printSuccess('Models converted to ONNX') - await createCheckpoint('codet5-models', 'converted') -} - -/** - * Apply quantization to models. - */ -async function quantizeModels() { - if (!(await shouldRun('codet5-models', 'quantized', FORCE_BUILD))) { - return - } - - printHeader('Quantizing Models') - - const encoderPath = path.join(BUILD_DIR, 'encoder_model.onnx') - const decoderPath = path.join(BUILD_DIR, 'decoder_model.onnx') - - // Quantize encoder with INT8. - printStep('Quantizing encoder (INT8)') - await exec( - `python3 -c "from onnxruntime.quantization import quantize_dynamic, QuantType; ` + - `quantize_dynamic('${encoderPath}', '${encoderPath}.quant', weight_type=QuantType.QInt8)"`, - { stdio: 'inherit' } - ) - - // Quantize decoder with INT8. - printStep('Quantizing decoder (INT8)') - await exec( - `python3 -c "from onnxruntime.quantization import quantize_dynamic, QuantType; ` + - `quantize_dynamic('${decoderPath}', '${decoderPath}.quant', weight_type=QuantType.QInt8)"`, - { stdio: 'inherit' } - ) - - // Replace original models with quantized versions. - await fs.rename(`${encoderPath}.quant`, encoderPath) - await fs.rename(`${decoderPath}.quant`, decoderPath) - - const encoderSize = await getFileSize(encoderPath) - const decoderSize = await getFileSize(decoderPath) - - printStep(`Encoder: ${encoderSize}`) - printStep(`Decoder: ${decoderSize}`) - - printSuccess('Models quantized') - await createCheckpoint('codet5-models', 'quantized') -} - -/** - * Optimize ONNX graphs. - */ -async function optimizeModels() { - if (!(await shouldRun('codet5-models', 'optimized', FORCE_BUILD))) { - return - } - - printHeader('Optimizing ONNX Graphs') - - const encoderPath = path.join(BUILD_DIR, 'encoder_model.onnx') - const decoderPath = path.join(BUILD_DIR, 'decoder_model.onnx') - - // Optimize encoder. - printStep('Optimizing encoder') - await exec( - `python3 -c "from onnxruntime.transformers import optimizer; ` + - `optimizer.optimize_model('${encoderPath}', model_type='bert', num_heads=12, hidden_size=768)"`, - { stdio: 'inherit' } - ) - - // Optimize decoder. - printStep('Optimizing decoder') - await exec( - `python3 -c "from onnxruntime.transformers import optimizer; ` + - `optimizer.optimize_model('${decoderPath}', model_type='bert', num_heads=12, hidden_size=768)"`, - { stdio: 'inherit' } - ) - - printSuccess('Models optimized') - await createCheckpoint('codet5-models', 'optimized') -} - -/** - * Verify models can load and run inference. - */ -async function verifyModels() { - if (!(await shouldRun('codet5-models', 'verified', FORCE_BUILD))) { - return - } - - printHeader('Verifying Models') - - const encoderPath = path.join(BUILD_DIR, 'encoder_model.onnx') - - // Verify ONNX file is valid. - const stats = await fs.stat(encoderPath) - if (stats.size === 0) { - throw new Error('Encoder model is empty') - } - - printStep('Testing encoder inference') - await exec( - `python3 -c "import onnxruntime as ort; ` + - `sess = ort.InferenceSession('${encoderPath}'); ` + - `print('Encoder loaded successfully')"`, - { stdio: 'inherit' } - ) - - printSuccess('Models verified') - await createCheckpoint('codet5-models', 'verified') -} - -/** - * Export models to output directory. - */ -async function exportModels() { - printHeader('Exporting Models') - - await fs.mkdir(OUTPUT_DIR, { recursive: true }) - - const encoderPath = path.join(BUILD_DIR, 'encoder_model.onnx') - const decoderPath = path.join(BUILD_DIR, 'decoder_model.onnx') - const tokenizerPath = path.join(MODELS_DIR, 'tokenizer.json') - - const outputEncoder = path.join(OUTPUT_DIR, 'encoder.onnx') - const outputDecoder = path.join(OUTPUT_DIR, 'decoder.onnx') - const outputTokenizer = path.join(OUTPUT_DIR, 'tokenizer.json') - - await fs.copyFile(encoderPath, outputEncoder) - await fs.copyFile(decoderPath, outputDecoder) - - if (await fs.access(tokenizerPath).then(() => true).catch(() => false)) { - await fs.copyFile(tokenizerPath, outputTokenizer) - } - - const encoderSize = await getFileSize(outputEncoder) - const decoderSize = await getFileSize(outputDecoder) - - printStep(`Encoder: ${outputEncoder} (${encoderSize})`) - printStep(`Decoder: ${outputDecoder} (${decoderSize})`) - - printSuccess('Models exported') -} - -/** - * Main build function. - */ -async function main() { - const totalStart = Date.now() - - printHeader('🔨 Building codet5-models') - const logger = getDefaultLogger() - logger.info('Converting and optimizing CodeT5 models') - logger.info('') - - // Pre-flight checks. - printHeader('Pre-flight Checks') - - const diskOk = await checkDiskSpace(BUILD_DIR, 2 * 1024 * 1024 * 1024) - if (!diskOk) { - throw new Error('Insufficient disk space (need 2GB)') - } - - const pythonOk = await checkPythonVersion('3.8') - if (!pythonOk) { - throw new Error('Python 3.8+ required') - } - - // Check for required Python packages. - printStep('Checking Python dependencies') - try { - await execCapture('python3 -c "import transformers, onnx, onnxruntime"') - } catch { - printError( - 'Missing Python dependencies', - 'Install required packages: pip install transformers onnx onnxruntime' - ) - throw new Error('Python dependencies not installed') - } - - printSuccess('Pre-flight checks passed') - - // Build phases. - await downloadModels() - await convertToOnnx() - await quantizeModels() - await optimizeModels() - await verifyModels() - await exportModels() - - // Report completion. - const totalDuration = formatDuration(Date.now() - totalStart) - - printHeader('🎉 Build Complete!') - logger.success(`Total time: ${totalDuration}`) - logger.success(`Output: ${OUTPUT_DIR}`) - logger.info('') - logger.info('Next steps:') - logger.info(' 1. Test models with Socket CLI') - logger.info(' 2. Integrate with Socket CLI build') - logger.info('') -} - -// Run build. -main().catch((e) => { - printError('Build Failed', e) - throw e -}) diff --git a/packages/codet5-models-builder/scripts/clean.mjs b/packages/codet5-models-builder/scripts/clean.mjs deleted file mode 100644 index 5f8825fc2..000000000 --- a/packages/codet5-models-builder/scripts/clean.mjs +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Clean codet5-models build artifacts. - */ - -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { cleanCheckpoint } from '@socketsecurity/build-infra/lib/checkpoint-manager' -import { printHeader, printSuccess } from '@socketsecurity/build-infra/lib/build-output' -import { safeDelete } from '@socketsecurity/lib/fs' -import loggerPkg from '@socketsecurity/lib/logger' - -const logger = loggerPkg.getDefaultLogger() - -const __filename = fileURLToPath(import.meta.url) -const __dirname = path.dirname(__filename) - -const ROOT_DIR = path.join(__dirname, '..') -const MODELS_DIR = path.join(ROOT_DIR, '.models') -const BUILD_DIR = path.join(ROOT_DIR, 'build') - -async function main() { - printHeader('Cleaning codet5-models') - - // Remove models directory. - await safeDelete(MODELS_DIR).catch(() => {}) - - // Remove build directory. - await safeDelete(BUILD_DIR).catch(() => {}) - - // Clean checkpoints. - await cleanCheckpoint('codet5-models') - - printSuccess('Clean complete') -} - -main().catch((e) => { - logger.error('Clean failed:', e.message) - process.exit(1) -}) diff --git a/packages/lib/.config/esbuild.config.mjs b/packages/lib-internal/.config/esbuild.config.mjs similarity index 100% rename from packages/lib/.config/esbuild.config.mjs rename to packages/lib-internal/.config/esbuild.config.mjs diff --git a/packages/lib/.config/eslint.config.mjs b/packages/lib-internal/.config/eslint.config.mjs similarity index 100% rename from packages/lib/.config/eslint.config.mjs rename to packages/lib-internal/.config/eslint.config.mjs diff --git a/packages/lib/.config/knip.json b/packages/lib-internal/.config/knip.json similarity index 100% rename from packages/lib/.config/knip.json rename to packages/lib-internal/.config/knip.json diff --git a/packages/lib/.config/taze.config.mts b/packages/lib-internal/.config/taze.config.mts similarity index 100% rename from packages/lib/.config/taze.config.mts rename to packages/lib-internal/.config/taze.config.mts diff --git a/packages/lib/.config/tsconfig.check.json b/packages/lib-internal/.config/tsconfig.check.json similarity index 100% rename from packages/lib/.config/tsconfig.check.json rename to packages/lib-internal/.config/tsconfig.check.json diff --git a/packages/lib/.config/tsconfig.external-aliases.json b/packages/lib-internal/.config/tsconfig.external-aliases.json similarity index 100% rename from packages/lib/.config/tsconfig.external-aliases.json rename to packages/lib-internal/.config/tsconfig.external-aliases.json diff --git a/packages/lib/.config/vitest-global-setup.mts b/packages/lib-internal/.config/vitest-global-setup.mts similarity index 100% rename from packages/lib/.config/vitest-global-setup.mts rename to packages/lib-internal/.config/vitest-global-setup.mts diff --git a/packages/lib/.config/vitest-plugins/import-transform.mts b/packages/lib-internal/.config/vitest-plugins/import-transform.mts similarity index 100% rename from packages/lib/.config/vitest-plugins/import-transform.mts rename to packages/lib-internal/.config/vitest-plugins/import-transform.mts diff --git a/packages/lib/.config/vitest-plugins/require-transform.mts b/packages/lib-internal/.config/vitest-plugins/require-transform.mts similarity index 100% rename from packages/lib/.config/vitest-plugins/require-transform.mts rename to packages/lib-internal/.config/vitest-plugins/require-transform.mts diff --git a/packages/lib/.config/vitest-plugins/transform-utils.mts b/packages/lib-internal/.config/vitest-plugins/transform-utils.mts similarity index 100% rename from packages/lib/.config/vitest-plugins/transform-utils.mts rename to packages/lib-internal/.config/vitest-plugins/transform-utils.mts diff --git a/packages/lib/.config/vitest.config.isolated.mts b/packages/lib-internal/.config/vitest.config.isolated.mts similarity index 100% rename from packages/lib/.config/vitest.config.isolated.mts rename to packages/lib-internal/.config/vitest.config.isolated.mts diff --git a/packages/lib/.config/vitest.config.mts b/packages/lib-internal/.config/vitest.config.mts similarity index 100% rename from packages/lib/.config/vitest.config.mts rename to packages/lib-internal/.config/vitest.config.mts diff --git a/packages/lib/.config/vitest.setup.mts b/packages/lib-internal/.config/vitest.setup.mts similarity index 100% rename from packages/lib/.config/vitest.setup.mts rename to packages/lib-internal/.config/vitest.setup.mts diff --git a/packages/lib/.gitignore b/packages/lib-internal/.gitignore similarity index 100% rename from packages/lib/.gitignore rename to packages/lib-internal/.gitignore diff --git a/packages/lib/CHANGELOG.md b/packages/lib-internal/CHANGELOG.md similarity index 100% rename from packages/lib/CHANGELOG.md rename to packages/lib-internal/CHANGELOG.md diff --git a/packages/lib/LICENSE b/packages/lib-internal/LICENSE similarity index 100% rename from packages/lib/LICENSE rename to packages/lib-internal/LICENSE diff --git a/packages/lib/README.md b/packages/lib-internal/README.md similarity index 100% rename from packages/lib/README.md rename to packages/lib-internal/README.md diff --git a/packages/lib/biome.json b/packages/lib-internal/biome.json similarity index 100% rename from packages/lib/biome.json rename to packages/lib-internal/biome.json diff --git a/packages/lib/data/extensions.json b/packages/lib-internal/data/extensions.json similarity index 100% rename from packages/lib/data/extensions.json rename to packages/lib-internal/data/extensions.json diff --git a/packages/lib/docs/build.md b/packages/lib-internal/docs/build.md similarity index 100% rename from packages/lib/docs/build.md rename to packages/lib-internal/docs/build.md diff --git a/packages/lib/docs/getting-started.md b/packages/lib-internal/docs/getting-started.md similarity index 100% rename from packages/lib/docs/getting-started.md rename to packages/lib-internal/docs/getting-started.md diff --git a/packages/lib/docs/socket-lib-structure.md b/packages/lib-internal/docs/socket-lib-structure.md similarity index 100% rename from packages/lib/docs/socket-lib-structure.md rename to packages/lib-internal/docs/socket-lib-structure.md diff --git a/packages/lib/docs/themes.md b/packages/lib-internal/docs/themes.md similarity index 100% rename from packages/lib/docs/themes.md rename to packages/lib-internal/docs/themes.md diff --git a/packages/lib/package.json b/packages/lib-internal/package.json similarity index 99% rename from packages/lib/package.json rename to packages/lib-internal/package.json index 091a1f070..5ba88b720 100644 --- a/packages/lib/package.json +++ b/packages/lib-internal/package.json @@ -1,5 +1,5 @@ { - "name": "@socketsecurity/lib", + "name": "@socketsecurity/lib-internal", "version": "3.2.8", "private": true, "license": "MIT", @@ -14,7 +14,7 @@ "repository": { "type": "git", "url": "git+https://github.com/SocketDev/socket-cli.git", - "directory": "packages/lib" + "directory": "packages/lib-internal" }, "author": { "name": "Socket Inc", @@ -395,7 +395,6 @@ "default": "./dist/performance.js" }, "./plugins/babel-plugin-inline-require-calls": { - "types": "./plugins/babel-plugin-inline-require-calls.d.ts", "default": "./plugins/babel-plugin-inline-require-calls.js" }, "./process-lock": { diff --git a/packages/lib/plugins/README.md b/packages/lib-internal/plugins/README.md similarity index 100% rename from packages/lib/plugins/README.md rename to packages/lib-internal/plugins/README.md diff --git a/packages/lib/plugins/babel-plugin-inline-const-enum.mjs b/packages/lib-internal/plugins/babel-plugin-inline-const-enum.mjs similarity index 100% rename from packages/lib/plugins/babel-plugin-inline-const-enum.mjs rename to packages/lib-internal/plugins/babel-plugin-inline-const-enum.mjs diff --git a/packages/lib/plugins/babel-plugin-inline-process-env.mjs b/packages/lib-internal/plugins/babel-plugin-inline-process-env.mjs similarity index 100% rename from packages/lib/plugins/babel-plugin-inline-process-env.mjs rename to packages/lib-internal/plugins/babel-plugin-inline-process-env.mjs diff --git a/packages/lib/plugins/babel-plugin-inline-require-calls.js b/packages/lib-internal/plugins/babel-plugin-inline-require-calls.js similarity index 100% rename from packages/lib/plugins/babel-plugin-inline-require-calls.js rename to packages/lib-internal/plugins/babel-plugin-inline-require-calls.js diff --git a/packages/lib/plugins/babel-plugin-strip-debug.mjs b/packages/lib-internal/plugins/babel-plugin-strip-debug.mjs similarity index 100% rename from packages/lib/plugins/babel-plugin-strip-debug.mjs rename to packages/lib-internal/plugins/babel-plugin-strip-debug.mjs diff --git a/packages/lib/pnpm-lock.yaml b/packages/lib-internal/pnpm-lock.yaml similarity index 100% rename from packages/lib/pnpm-lock.yaml rename to packages/lib-internal/pnpm-lock.yaml diff --git a/packages/lib/scripts/babel/README.md b/packages/lib-internal/scripts/babel/README.md similarity index 100% rename from packages/lib/scripts/babel/README.md rename to packages/lib-internal/scripts/babel/README.md diff --git a/packages/lib/scripts/babel/transform-commonjs-exports.mjs b/packages/lib-internal/scripts/babel/transform-commonjs-exports.mjs similarity index 100% rename from packages/lib/scripts/babel/transform-commonjs-exports.mjs rename to packages/lib-internal/scripts/babel/transform-commonjs-exports.mjs diff --git a/packages/lib/scripts/babel/transform-set-proto-plugin.mjs b/packages/lib-internal/scripts/babel/transform-set-proto-plugin.mjs similarity index 100% rename from packages/lib/scripts/babel/transform-set-proto-plugin.mjs rename to packages/lib-internal/scripts/babel/transform-set-proto-plugin.mjs diff --git a/packages/lib/scripts/babel/transform-url-parse-plugin.mjs b/packages/lib-internal/scripts/babel/transform-url-parse-plugin.mjs similarity index 100% rename from packages/lib/scripts/babel/transform-url-parse-plugin.mjs rename to packages/lib-internal/scripts/babel/transform-url-parse-plugin.mjs diff --git a/packages/lib/scripts/build-externals.mjs b/packages/lib-internal/scripts/build-externals.mjs similarity index 100% rename from packages/lib/scripts/build-externals.mjs rename to packages/lib-internal/scripts/build-externals.mjs diff --git a/packages/lib/scripts/build-externals/bundler.mjs b/packages/lib-internal/scripts/build-externals/bundler.mjs similarity index 100% rename from packages/lib/scripts/build-externals/bundler.mjs rename to packages/lib-internal/scripts/build-externals/bundler.mjs diff --git a/packages/lib/scripts/build-externals/config.mjs b/packages/lib-internal/scripts/build-externals/config.mjs similarity index 100% rename from packages/lib/scripts/build-externals/config.mjs rename to packages/lib-internal/scripts/build-externals/config.mjs diff --git a/packages/lib/scripts/build-externals/copy-files.mjs b/packages/lib-internal/scripts/build-externals/copy-files.mjs similarity index 100% rename from packages/lib/scripts/build-externals/copy-files.mjs rename to packages/lib-internal/scripts/build-externals/copy-files.mjs diff --git a/packages/lib/scripts/build-externals/esbuild-config.mjs b/packages/lib-internal/scripts/build-externals/esbuild-config.mjs similarity index 100% rename from packages/lib/scripts/build-externals/esbuild-config.mjs rename to packages/lib-internal/scripts/build-externals/esbuild-config.mjs diff --git a/packages/lib/scripts/build-externals/local-packages.mjs b/packages/lib-internal/scripts/build-externals/local-packages.mjs similarity index 100% rename from packages/lib/scripts/build-externals/local-packages.mjs rename to packages/lib-internal/scripts/build-externals/local-packages.mjs diff --git a/packages/lib/scripts/build-externals/orchestrator.mjs b/packages/lib-internal/scripts/build-externals/orchestrator.mjs similarity index 100% rename from packages/lib/scripts/build-externals/orchestrator.mjs rename to packages/lib-internal/scripts/build-externals/orchestrator.mjs diff --git a/packages/lib/scripts/build-externals/stubs/README.md b/packages/lib-internal/scripts/build-externals/stubs/README.md similarity index 100% rename from packages/lib/scripts/build-externals/stubs/README.md rename to packages/lib-internal/scripts/build-externals/stubs/README.md diff --git a/packages/lib/scripts/build-externals/stubs/debug.cjs b/packages/lib-internal/scripts/build-externals/stubs/debug.cjs similarity index 100% rename from packages/lib/scripts/build-externals/stubs/debug.cjs rename to packages/lib-internal/scripts/build-externals/stubs/debug.cjs diff --git a/packages/lib/scripts/build-externals/stubs/empty.cjs b/packages/lib-internal/scripts/build-externals/stubs/empty.cjs similarity index 100% rename from packages/lib/scripts/build-externals/stubs/empty.cjs rename to packages/lib-internal/scripts/build-externals/stubs/empty.cjs diff --git a/packages/lib/scripts/build-externals/stubs/encoding.cjs b/packages/lib-internal/scripts/build-externals/stubs/encoding.cjs similarity index 100% rename from packages/lib/scripts/build-externals/stubs/encoding.cjs rename to packages/lib-internal/scripts/build-externals/stubs/encoding.cjs diff --git a/packages/lib/scripts/build-externals/stubs/noop.cjs b/packages/lib-internal/scripts/build-externals/stubs/noop.cjs similarity index 100% rename from packages/lib/scripts/build-externals/stubs/noop.cjs rename to packages/lib-internal/scripts/build-externals/stubs/noop.cjs diff --git a/packages/lib/scripts/build-externals/stubs/throw.cjs b/packages/lib-internal/scripts/build-externals/stubs/throw.cjs similarity index 100% rename from packages/lib/scripts/build-externals/stubs/throw.cjs rename to packages/lib-internal/scripts/build-externals/stubs/throw.cjs diff --git a/packages/lib/scripts/build-js.mjs b/packages/lib-internal/scripts/build-js.mjs similarity index 100% rename from packages/lib/scripts/build-js.mjs rename to packages/lib-internal/scripts/build-js.mjs diff --git a/packages/lib/scripts/build.mjs b/packages/lib-internal/scripts/build.mjs similarity index 100% rename from packages/lib/scripts/build.mjs rename to packages/lib-internal/scripts/build.mjs diff --git a/packages/lib/scripts/check.mjs b/packages/lib-internal/scripts/check.mjs similarity index 100% rename from packages/lib/scripts/check.mjs rename to packages/lib-internal/scripts/check.mjs diff --git a/packages/lib/scripts/claude.mjs b/packages/lib-internal/scripts/claude.mjs similarity index 100% rename from packages/lib/scripts/claude.mjs rename to packages/lib-internal/scripts/claude.mjs diff --git a/packages/lib/scripts/clean.mjs b/packages/lib-internal/scripts/clean.mjs similarity index 100% rename from packages/lib/scripts/clean.mjs rename to packages/lib-internal/scripts/clean.mjs diff --git a/packages/lib/scripts/cover.mjs b/packages/lib-internal/scripts/cover.mjs similarity index 100% rename from packages/lib/scripts/cover.mjs rename to packages/lib-internal/scripts/cover.mjs diff --git a/packages/lib/scripts/fix-build.mjs b/packages/lib-internal/scripts/fix-build.mjs similarity index 87% rename from packages/lib/scripts/fix-build.mjs rename to packages/lib-internal/scripts/fix-build.mjs index 99e510be3..08e0f9ea2 100644 --- a/packages/lib/scripts/fix-build.mjs +++ b/packages/lib-internal/scripts/fix-build.mjs @@ -1,6 +1,6 @@ /** * @fileoverview Orchestrates all post-build fix scripts. - * Runs generate-package-exports and fix-external-imports in sequence. + * Runs package exports generation, path alias fixing, and CommonJS exports fixing in sequence. */ import { isQuiet } from '#socketsecurity/lib/argv/flags' @@ -36,10 +36,6 @@ async function main() { args: ['scripts/fix-path-aliases.mjs', ...fixArgs], command: 'node', }, - { - args: ['scripts/fix-external-imports.mjs', ...fixArgs], - command: 'node', - }, { args: ['scripts/fix-commonjs-exports.mjs', ...fixArgs], command: 'node', diff --git a/packages/lib/scripts/fix-commonjs-exports.mjs b/packages/lib-internal/scripts/fix-commonjs-exports.mjs similarity index 100% rename from packages/lib/scripts/fix-commonjs-exports.mjs rename to packages/lib-internal/scripts/fix-commonjs-exports.mjs diff --git a/packages/lib/scripts/fix-path-aliases.mjs b/packages/lib-internal/scripts/fix-path-aliases.mjs similarity index 100% rename from packages/lib/scripts/fix-path-aliases.mjs rename to packages/lib-internal/scripts/fix-path-aliases.mjs diff --git a/packages/lib/scripts/generate-package-exports.mjs b/packages/lib-internal/scripts/generate-package-exports.mjs similarity index 100% rename from packages/lib/scripts/generate-package-exports.mjs rename to packages/lib-internal/scripts/generate-package-exports.mjs diff --git a/packages/lib/scripts/lint.mjs b/packages/lib-internal/scripts/lint.mjs similarity index 100% rename from packages/lib/scripts/lint.mjs rename to packages/lib-internal/scripts/lint.mjs diff --git a/packages/lib/scripts/test.mjs b/packages/lib-internal/scripts/test.mjs similarity index 100% rename from packages/lib/scripts/test.mjs rename to packages/lib-internal/scripts/test.mjs diff --git a/packages/lib/scripts/update.mjs b/packages/lib-internal/scripts/update.mjs similarity index 100% rename from packages/lib/scripts/update.mjs rename to packages/lib-internal/scripts/update.mjs diff --git a/packages/lib/scripts/utils/alias-loader.mjs b/packages/lib-internal/scripts/utils/alias-loader.mjs similarity index 100% rename from packages/lib/scripts/utils/alias-loader.mjs rename to packages/lib-internal/scripts/utils/alias-loader.mjs diff --git a/packages/lib/scripts/utils/changed-test-mapper.mjs b/packages/lib-internal/scripts/utils/changed-test-mapper.mjs similarity index 100% rename from packages/lib/scripts/utils/changed-test-mapper.mjs rename to packages/lib-internal/scripts/utils/changed-test-mapper.mjs diff --git a/packages/lib/scripts/utils/get-local-package-aliases.mjs b/packages/lib-internal/scripts/utils/get-local-package-aliases.mjs similarity index 100% rename from packages/lib/scripts/utils/get-local-package-aliases.mjs rename to packages/lib-internal/scripts/utils/get-local-package-aliases.mjs diff --git a/packages/lib/scripts/utils/interactive-runner.mjs b/packages/lib-internal/scripts/utils/interactive-runner.mjs similarity index 100% rename from packages/lib/scripts/utils/interactive-runner.mjs rename to packages/lib-internal/scripts/utils/interactive-runner.mjs diff --git a/packages/lib/scripts/utils/parse-args.mjs b/packages/lib-internal/scripts/utils/parse-args.mjs similarity index 100% rename from packages/lib/scripts/utils/parse-args.mjs rename to packages/lib-internal/scripts/utils/parse-args.mjs diff --git a/packages/lib/scripts/utils/run-command.mjs b/packages/lib-internal/scripts/utils/run-command.mjs similarity index 100% rename from packages/lib/scripts/utils/run-command.mjs rename to packages/lib-internal/scripts/utils/run-command.mjs diff --git a/packages/lib/scripts/utils/signal-exit.mjs b/packages/lib-internal/scripts/utils/signal-exit.mjs similarity index 100% rename from packages/lib/scripts/utils/signal-exit.mjs rename to packages/lib-internal/scripts/utils/signal-exit.mjs diff --git a/packages/lib/scripts/validate-esbuild-minify.mjs b/packages/lib-internal/scripts/validate-esbuild-minify.mjs similarity index 100% rename from packages/lib/scripts/validate-esbuild-minify.mjs rename to packages/lib-internal/scripts/validate-esbuild-minify.mjs diff --git a/packages/lib/scripts/validate-file-count.mjs b/packages/lib-internal/scripts/validate-file-count.mjs similarity index 100% rename from packages/lib/scripts/validate-file-count.mjs rename to packages/lib-internal/scripts/validate-file-count.mjs diff --git a/packages/lib/scripts/validate-file-size.mjs b/packages/lib-internal/scripts/validate-file-size.mjs similarity index 100% rename from packages/lib/scripts/validate-file-size.mjs rename to packages/lib-internal/scripts/validate-file-size.mjs diff --git a/packages/lib/scripts/validate-markdown-filenames.mjs b/packages/lib-internal/scripts/validate-markdown-filenames.mjs similarity index 100% rename from packages/lib/scripts/validate-markdown-filenames.mjs rename to packages/lib-internal/scripts/validate-markdown-filenames.mjs diff --git a/packages/lib/scripts/validate-no-cdn-refs.mjs b/packages/lib-internal/scripts/validate-no-cdn-refs.mjs similarity index 100% rename from packages/lib/scripts/validate-no-cdn-refs.mjs rename to packages/lib-internal/scripts/validate-no-cdn-refs.mjs diff --git a/packages/lib/scripts/validate-no-extraneous-dependencies.mjs b/packages/lib-internal/scripts/validate-no-extraneous-dependencies.mjs similarity index 100% rename from packages/lib/scripts/validate-no-extraneous-dependencies.mjs rename to packages/lib-internal/scripts/validate-no-extraneous-dependencies.mjs diff --git a/packages/lib/scripts/validate-no-link-deps.mjs b/packages/lib-internal/scripts/validate-no-link-deps.mjs similarity index 100% rename from packages/lib/scripts/validate-no-link-deps.mjs rename to packages/lib-internal/scripts/validate-no-link-deps.mjs diff --git a/packages/lib/src/abort.ts b/packages/lib-internal/src/abort.ts similarity index 100% rename from packages/lib/src/abort.ts rename to packages/lib-internal/src/abort.ts diff --git a/packages/lib/src/agent.ts b/packages/lib-internal/src/agent.ts similarity index 100% rename from packages/lib/src/agent.ts rename to packages/lib-internal/src/agent.ts diff --git a/packages/lib/src/ansi.ts b/packages/lib-internal/src/ansi.ts similarity index 100% rename from packages/lib/src/ansi.ts rename to packages/lib-internal/src/ansi.ts diff --git a/packages/lib/src/argv/flags.ts b/packages/lib-internal/src/argv/flags.ts similarity index 100% rename from packages/lib/src/argv/flags.ts rename to packages/lib-internal/src/argv/flags.ts diff --git a/packages/lib/src/argv/parse.ts b/packages/lib-internal/src/argv/parse.ts similarity index 100% rename from packages/lib/src/argv/parse.ts rename to packages/lib-internal/src/argv/parse.ts diff --git a/packages/lib/src/arrays.ts b/packages/lib-internal/src/arrays.ts similarity index 100% rename from packages/lib/src/arrays.ts rename to packages/lib-internal/src/arrays.ts diff --git a/packages/lib/src/bin.ts b/packages/lib-internal/src/bin.ts similarity index 99% rename from packages/lib/src/bin.ts rename to packages/lib-internal/src/bin.ts index d9ea3f4bd..b2322c4b2 100644 --- a/packages/lib/src/bin.ts +++ b/packages/lib-internal/src/bin.ts @@ -107,8 +107,6 @@ export interface WhichOptions { path?: string | undefined /** Path separator character. */ pathExt?: string | undefined - /** Environment variables to use. */ - env?: Record | undefined } /** diff --git a/packages/lib/src/cacache.ts b/packages/lib-internal/src/cacache.ts similarity index 100% rename from packages/lib/src/cacache.ts rename to packages/lib-internal/src/cacache.ts diff --git a/packages/lib/src/cache-with-ttl.ts b/packages/lib-internal/src/cache-with-ttl.ts similarity index 100% rename from packages/lib/src/cache-with-ttl.ts rename to packages/lib-internal/src/cache-with-ttl.ts diff --git a/packages/lib/src/constants/agents.ts b/packages/lib-internal/src/constants/agents.ts similarity index 100% rename from packages/lib/src/constants/agents.ts rename to packages/lib-internal/src/constants/agents.ts diff --git a/packages/lib/src/constants/core.ts b/packages/lib-internal/src/constants/core.ts similarity index 100% rename from packages/lib/src/constants/core.ts rename to packages/lib-internal/src/constants/core.ts diff --git a/packages/lib/src/constants/encoding.ts b/packages/lib-internal/src/constants/encoding.ts similarity index 100% rename from packages/lib/src/constants/encoding.ts rename to packages/lib-internal/src/constants/encoding.ts diff --git a/packages/lib/src/constants/github.ts b/packages/lib-internal/src/constants/github.ts similarity index 100% rename from packages/lib/src/constants/github.ts rename to packages/lib-internal/src/constants/github.ts diff --git a/packages/lib/src/constants/licenses.ts b/packages/lib-internal/src/constants/licenses.ts similarity index 100% rename from packages/lib/src/constants/licenses.ts rename to packages/lib-internal/src/constants/licenses.ts diff --git a/packages/lib/src/constants/node.ts b/packages/lib-internal/src/constants/node.ts similarity index 99% rename from packages/lib/src/constants/node.ts rename to packages/lib-internal/src/constants/node.ts index 07991e097..fe3a4c657 100644 --- a/packages/lib/src/constants/node.ts +++ b/packages/lib-internal/src/constants/node.ts @@ -24,7 +24,7 @@ let _maintainedNodeVersions: | undefined export function getMaintainedNodeVersions() { if (_maintainedNodeVersions === undefined) { - _maintainedNodeVersions = require('#lib/maintained-node-versions') + _maintainedNodeVersions = require('#lib/maintained-node-versions').default } return _maintainedNodeVersions } diff --git a/packages/lib/src/constants/packages.ts b/packages/lib-internal/src/constants/packages.ts similarity index 100% rename from packages/lib/src/constants/packages.ts rename to packages/lib-internal/src/constants/packages.ts diff --git a/packages/lib/src/constants/paths.ts b/packages/lib-internal/src/constants/paths.ts similarity index 100% rename from packages/lib/src/constants/paths.ts rename to packages/lib-internal/src/constants/paths.ts diff --git a/packages/lib/src/constants/platform.ts b/packages/lib-internal/src/constants/platform.ts similarity index 100% rename from packages/lib/src/constants/platform.ts rename to packages/lib-internal/src/constants/platform.ts diff --git a/packages/lib/src/constants/process.ts b/packages/lib-internal/src/constants/process.ts similarity index 100% rename from packages/lib/src/constants/process.ts rename to packages/lib-internal/src/constants/process.ts diff --git a/packages/lib/src/constants/socket.ts b/packages/lib-internal/src/constants/socket.ts similarity index 100% rename from packages/lib/src/constants/socket.ts rename to packages/lib-internal/src/constants/socket.ts diff --git a/packages/lib/src/constants/testing.ts b/packages/lib-internal/src/constants/testing.ts similarity index 100% rename from packages/lib/src/constants/testing.ts rename to packages/lib-internal/src/constants/testing.ts diff --git a/packages/lib/src/constants/time.ts b/packages/lib-internal/src/constants/time.ts similarity index 100% rename from packages/lib/src/constants/time.ts rename to packages/lib-internal/src/constants/time.ts diff --git a/packages/lib/src/constants/typescript.ts b/packages/lib-internal/src/constants/typescript.ts similarity index 100% rename from packages/lib/src/constants/typescript.ts rename to packages/lib-internal/src/constants/typescript.ts diff --git a/packages/lib/src/cover/code.ts b/packages/lib-internal/src/cover/code.ts similarity index 100% rename from packages/lib/src/cover/code.ts rename to packages/lib-internal/src/cover/code.ts diff --git a/packages/lib/src/cover/formatters.ts b/packages/lib-internal/src/cover/formatters.ts similarity index 100% rename from packages/lib/src/cover/formatters.ts rename to packages/lib-internal/src/cover/formatters.ts diff --git a/packages/lib/src/cover/type.ts b/packages/lib-internal/src/cover/type.ts similarity index 100% rename from packages/lib/src/cover/type.ts rename to packages/lib-internal/src/cover/type.ts diff --git a/packages/lib/src/cover/types.ts b/packages/lib-internal/src/cover/types.ts similarity index 100% rename from packages/lib/src/cover/types.ts rename to packages/lib-internal/src/cover/types.ts diff --git a/packages/lib/src/debug.ts b/packages/lib-internal/src/debug.ts similarity index 100% rename from packages/lib/src/debug.ts rename to packages/lib-internal/src/debug.ts diff --git a/packages/lib/src/dlx-binary.ts b/packages/lib-internal/src/dlx-binary.ts similarity index 100% rename from packages/lib/src/dlx-binary.ts rename to packages/lib-internal/src/dlx-binary.ts diff --git a/packages/lib/src/dlx-manifest.ts b/packages/lib-internal/src/dlx-manifest.ts similarity index 100% rename from packages/lib/src/dlx-manifest.ts rename to packages/lib-internal/src/dlx-manifest.ts diff --git a/packages/lib/src/dlx-package.ts b/packages/lib-internal/src/dlx-package.ts similarity index 100% rename from packages/lib/src/dlx-package.ts rename to packages/lib-internal/src/dlx-package.ts diff --git a/packages/lib/src/dlx.ts b/packages/lib-internal/src/dlx.ts similarity index 100% rename from packages/lib/src/dlx.ts rename to packages/lib-internal/src/dlx.ts diff --git a/packages/lib/src/effects/README.md b/packages/lib-internal/src/effects/README.md similarity index 100% rename from packages/lib/src/effects/README.md rename to packages/lib-internal/src/effects/README.md diff --git a/packages/lib/src/effects/pulse-frames.ts b/packages/lib-internal/src/effects/pulse-frames.ts similarity index 100% rename from packages/lib/src/effects/pulse-frames.ts rename to packages/lib-internal/src/effects/pulse-frames.ts diff --git a/packages/lib/src/effects/text-shimmer.ts b/packages/lib-internal/src/effects/text-shimmer.ts similarity index 100% rename from packages/lib/src/effects/text-shimmer.ts rename to packages/lib-internal/src/effects/text-shimmer.ts diff --git a/packages/lib/src/effects/types.ts b/packages/lib-internal/src/effects/types.ts similarity index 100% rename from packages/lib/src/effects/types.ts rename to packages/lib-internal/src/effects/types.ts diff --git a/packages/lib/src/effects/ultra.ts b/packages/lib-internal/src/effects/ultra.ts similarity index 100% rename from packages/lib/src/effects/ultra.ts rename to packages/lib-internal/src/effects/ultra.ts diff --git a/packages/lib/src/env.ts b/packages/lib-internal/src/env.ts similarity index 100% rename from packages/lib/src/env.ts rename to packages/lib-internal/src/env.ts diff --git a/packages/lib/src/env/README.md b/packages/lib-internal/src/env/README.md similarity index 100% rename from packages/lib/src/env/README.md rename to packages/lib-internal/src/env/README.md diff --git a/packages/lib/src/env/ci.ts b/packages/lib-internal/src/env/ci.ts similarity index 100% rename from packages/lib/src/env/ci.ts rename to packages/lib-internal/src/env/ci.ts diff --git a/packages/lib/src/env/debug.ts b/packages/lib-internal/src/env/debug.ts similarity index 100% rename from packages/lib/src/env/debug.ts rename to packages/lib-internal/src/env/debug.ts diff --git a/packages/lib/src/env/github.ts b/packages/lib-internal/src/env/github.ts similarity index 100% rename from packages/lib/src/env/github.ts rename to packages/lib-internal/src/env/github.ts diff --git a/packages/lib/src/env/helpers.ts b/packages/lib-internal/src/env/helpers.ts similarity index 100% rename from packages/lib/src/env/helpers.ts rename to packages/lib-internal/src/env/helpers.ts diff --git a/packages/lib/src/env/home.ts b/packages/lib-internal/src/env/home.ts similarity index 100% rename from packages/lib/src/env/home.ts rename to packages/lib-internal/src/env/home.ts diff --git a/packages/lib/src/env/locale.ts b/packages/lib-internal/src/env/locale.ts similarity index 100% rename from packages/lib/src/env/locale.ts rename to packages/lib-internal/src/env/locale.ts diff --git a/packages/lib/src/env/node-auth-token.ts b/packages/lib-internal/src/env/node-auth-token.ts similarity index 100% rename from packages/lib/src/env/node-auth-token.ts rename to packages/lib-internal/src/env/node-auth-token.ts diff --git a/packages/lib/src/env/node-env.ts b/packages/lib-internal/src/env/node-env.ts similarity index 100% rename from packages/lib/src/env/node-env.ts rename to packages/lib-internal/src/env/node-env.ts diff --git a/packages/lib/src/env/npm.ts b/packages/lib-internal/src/env/npm.ts similarity index 100% rename from packages/lib/src/env/npm.ts rename to packages/lib-internal/src/env/npm.ts diff --git a/packages/lib/src/env/package-manager.ts b/packages/lib-internal/src/env/package-manager.ts similarity index 100% rename from packages/lib/src/env/package-manager.ts rename to packages/lib-internal/src/env/package-manager.ts diff --git a/packages/lib/src/env/path.ts b/packages/lib-internal/src/env/path.ts similarity index 100% rename from packages/lib/src/env/path.ts rename to packages/lib-internal/src/env/path.ts diff --git a/packages/lib/src/env/pre-commit.ts b/packages/lib-internal/src/env/pre-commit.ts similarity index 100% rename from packages/lib/src/env/pre-commit.ts rename to packages/lib-internal/src/env/pre-commit.ts diff --git a/packages/lib/src/env/rewire.ts b/packages/lib-internal/src/env/rewire.ts similarity index 100% rename from packages/lib/src/env/rewire.ts rename to packages/lib-internal/src/env/rewire.ts diff --git a/packages/lib/src/env/shell.ts b/packages/lib-internal/src/env/shell.ts similarity index 100% rename from packages/lib/src/env/shell.ts rename to packages/lib-internal/src/env/shell.ts diff --git a/packages/lib/src/env/socket-cli-shadow.ts b/packages/lib-internal/src/env/socket-cli-shadow.ts similarity index 100% rename from packages/lib/src/env/socket-cli-shadow.ts rename to packages/lib-internal/src/env/socket-cli-shadow.ts diff --git a/packages/lib/src/env/socket-cli.ts b/packages/lib-internal/src/env/socket-cli.ts similarity index 100% rename from packages/lib/src/env/socket-cli.ts rename to packages/lib-internal/src/env/socket-cli.ts diff --git a/packages/lib/src/env/socket.ts b/packages/lib-internal/src/env/socket.ts similarity index 100% rename from packages/lib/src/env/socket.ts rename to packages/lib-internal/src/env/socket.ts diff --git a/packages/lib/src/env/temp-dir.ts b/packages/lib-internal/src/env/temp-dir.ts similarity index 100% rename from packages/lib/src/env/temp-dir.ts rename to packages/lib-internal/src/env/temp-dir.ts diff --git a/packages/lib/src/env/term.ts b/packages/lib-internal/src/env/term.ts similarity index 100% rename from packages/lib/src/env/term.ts rename to packages/lib-internal/src/env/term.ts diff --git a/packages/lib/src/env/test.ts b/packages/lib-internal/src/env/test.ts similarity index 100% rename from packages/lib/src/env/test.ts rename to packages/lib-internal/src/env/test.ts diff --git a/packages/lib/src/env/windows.ts b/packages/lib-internal/src/env/windows.ts similarity index 100% rename from packages/lib/src/env/windows.ts rename to packages/lib-internal/src/env/windows.ts diff --git a/packages/lib/src/env/xdg.ts b/packages/lib-internal/src/env/xdg.ts similarity index 100% rename from packages/lib/src/env/xdg.ts rename to packages/lib-internal/src/env/xdg.ts diff --git a/packages/lib/src/fs.ts b/packages/lib-internal/src/fs.ts similarity index 100% rename from packages/lib/src/fs.ts rename to packages/lib-internal/src/fs.ts diff --git a/packages/lib/src/functions.ts b/packages/lib-internal/src/functions.ts similarity index 100% rename from packages/lib/src/functions.ts rename to packages/lib-internal/src/functions.ts diff --git a/packages/lib/src/git.ts b/packages/lib-internal/src/git.ts similarity index 100% rename from packages/lib/src/git.ts rename to packages/lib-internal/src/git.ts diff --git a/packages/lib/src/github.ts b/packages/lib-internal/src/github.ts similarity index 100% rename from packages/lib/src/github.ts rename to packages/lib-internal/src/github.ts diff --git a/packages/lib/src/globs.ts b/packages/lib-internal/src/globs.ts similarity index 100% rename from packages/lib/src/globs.ts rename to packages/lib-internal/src/globs.ts diff --git a/packages/lib/src/http-request.ts b/packages/lib-internal/src/http-request.ts similarity index 100% rename from packages/lib/src/http-request.ts rename to packages/lib-internal/src/http-request.ts diff --git a/packages/lib/src/ipc.ts b/packages/lib-internal/src/ipc.ts similarity index 100% rename from packages/lib/src/ipc.ts rename to packages/lib-internal/src/ipc.ts diff --git a/packages/lib/src/json.ts b/packages/lib-internal/src/json.ts similarity index 100% rename from packages/lib/src/json.ts rename to packages/lib-internal/src/json.ts diff --git a/packages/lib/src/lifecycle-script-names.ts b/packages/lib-internal/src/lifecycle-script-names.ts similarity index 100% rename from packages/lib/src/lifecycle-script-names.ts rename to packages/lib-internal/src/lifecycle-script-names.ts diff --git a/packages/lib/src/links/index.ts b/packages/lib-internal/src/links/index.ts similarity index 100% rename from packages/lib/src/links/index.ts rename to packages/lib-internal/src/links/index.ts diff --git a/packages/lib/src/logger.ts b/packages/lib-internal/src/logger.ts similarity index 100% rename from packages/lib/src/logger.ts rename to packages/lib-internal/src/logger.ts diff --git a/packages/lib/src/maintained-node-versions.ts b/packages/lib-internal/src/maintained-node-versions.ts similarity index 100% rename from packages/lib/src/maintained-node-versions.ts rename to packages/lib-internal/src/maintained-node-versions.ts diff --git a/packages/lib/src/memoization.ts b/packages/lib-internal/src/memoization.ts similarity index 100% rename from packages/lib/src/memoization.ts rename to packages/lib-internal/src/memoization.ts diff --git a/packages/lib/src/objects.ts b/packages/lib-internal/src/objects.ts similarity index 100% rename from packages/lib/src/objects.ts rename to packages/lib-internal/src/objects.ts diff --git a/packages/lib/src/package-default-node-range.ts b/packages/lib-internal/src/package-default-node-range.ts similarity index 93% rename from packages/lib/src/package-default-node-range.ts rename to packages/lib-internal/src/package-default-node-range.ts index 4c5de27f5..a4df60ae5 100644 --- a/packages/lib/src/package-default-node-range.ts +++ b/packages/lib-internal/src/package-default-node-range.ts @@ -2,7 +2,7 @@ * @fileoverview Default Node.js version range for packages. */ -const maintainedNodeVersions = require('#lib/maintained-node-versions') +const maintainedNodeVersions = require('#lib/maintained-node-versions').default const semver = require('semver') export default `>=${semver.parse(maintainedNodeVersions.last).major}` diff --git a/packages/lib/src/package-default-socket-categories.ts b/packages/lib-internal/src/package-default-socket-categories.ts similarity index 100% rename from packages/lib/src/package-default-socket-categories.ts rename to packages/lib-internal/src/package-default-socket-categories.ts diff --git a/packages/lib/src/package-extensions.ts b/packages/lib-internal/src/package-extensions.ts similarity index 100% rename from packages/lib/src/package-extensions.ts rename to packages/lib-internal/src/package-extensions.ts diff --git a/packages/lib/src/packages.ts b/packages/lib-internal/src/packages.ts similarity index 100% rename from packages/lib/src/packages.ts rename to packages/lib-internal/src/packages.ts diff --git a/packages/lib/src/packages/README.md b/packages/lib-internal/src/packages/README.md similarity index 100% rename from packages/lib/src/packages/README.md rename to packages/lib-internal/src/packages/README.md diff --git a/packages/lib/src/packages/editable.ts b/packages/lib-internal/src/packages/editable.ts similarity index 100% rename from packages/lib/src/packages/editable.ts rename to packages/lib-internal/src/packages/editable.ts diff --git a/packages/lib/src/packages/exports.ts b/packages/lib-internal/src/packages/exports.ts similarity index 100% rename from packages/lib/src/packages/exports.ts rename to packages/lib-internal/src/packages/exports.ts diff --git a/packages/lib/src/packages/isolation.ts b/packages/lib-internal/src/packages/isolation.ts similarity index 100% rename from packages/lib/src/packages/isolation.ts rename to packages/lib-internal/src/packages/isolation.ts diff --git a/packages/lib/src/packages/licenses.ts b/packages/lib-internal/src/packages/licenses.ts similarity index 100% rename from packages/lib/src/packages/licenses.ts rename to packages/lib-internal/src/packages/licenses.ts diff --git a/packages/lib/src/packages/manifest.ts b/packages/lib-internal/src/packages/manifest.ts similarity index 100% rename from packages/lib/src/packages/manifest.ts rename to packages/lib-internal/src/packages/manifest.ts diff --git a/packages/lib/src/packages/normalize.ts b/packages/lib-internal/src/packages/normalize.ts similarity index 100% rename from packages/lib/src/packages/normalize.ts rename to packages/lib-internal/src/packages/normalize.ts diff --git a/packages/lib/src/packages/operations.ts b/packages/lib-internal/src/packages/operations.ts similarity index 100% rename from packages/lib/src/packages/operations.ts rename to packages/lib-internal/src/packages/operations.ts diff --git a/packages/lib/src/packages/paths.ts b/packages/lib-internal/src/packages/paths.ts similarity index 100% rename from packages/lib/src/packages/paths.ts rename to packages/lib-internal/src/packages/paths.ts diff --git a/packages/lib/src/packages/provenance.ts b/packages/lib-internal/src/packages/provenance.ts similarity index 100% rename from packages/lib/src/packages/provenance.ts rename to packages/lib-internal/src/packages/provenance.ts diff --git a/packages/lib/src/packages/specs.ts b/packages/lib-internal/src/packages/specs.ts similarity index 100% rename from packages/lib/src/packages/specs.ts rename to packages/lib-internal/src/packages/specs.ts diff --git a/packages/lib/src/packages/validation.ts b/packages/lib-internal/src/packages/validation.ts similarity index 100% rename from packages/lib/src/packages/validation.ts rename to packages/lib-internal/src/packages/validation.ts diff --git a/packages/lib/src/path.ts b/packages/lib-internal/src/path.ts similarity index 100% rename from packages/lib/src/path.ts rename to packages/lib-internal/src/path.ts diff --git a/packages/lib/src/paths.ts b/packages/lib-internal/src/paths.ts similarity index 100% rename from packages/lib/src/paths.ts rename to packages/lib-internal/src/paths.ts diff --git a/packages/lib/src/paths/rewire.ts b/packages/lib-internal/src/paths/rewire.ts similarity index 100% rename from packages/lib/src/paths/rewire.ts rename to packages/lib-internal/src/paths/rewire.ts diff --git a/packages/lib/src/performance.ts b/packages/lib-internal/src/performance.ts similarity index 100% rename from packages/lib/src/performance.ts rename to packages/lib-internal/src/performance.ts diff --git a/packages/lib/src/process-lock.ts b/packages/lib-internal/src/process-lock.ts similarity index 100% rename from packages/lib/src/process-lock.ts rename to packages/lib-internal/src/process-lock.ts diff --git a/packages/lib/src/promise-queue.ts b/packages/lib-internal/src/promise-queue.ts similarity index 100% rename from packages/lib/src/promise-queue.ts rename to packages/lib-internal/src/promise-queue.ts diff --git a/packages/lib/src/promises.ts b/packages/lib-internal/src/promises.ts similarity index 100% rename from packages/lib/src/promises.ts rename to packages/lib-internal/src/promises.ts diff --git a/packages/lib/src/regexps.ts b/packages/lib-internal/src/regexps.ts similarity index 100% rename from packages/lib/src/regexps.ts rename to packages/lib-internal/src/regexps.ts diff --git a/packages/lib/src/sea.ts b/packages/lib-internal/src/sea.ts similarity index 100% rename from packages/lib/src/sea.ts rename to packages/lib-internal/src/sea.ts diff --git a/packages/lib/src/shadow.ts b/packages/lib-internal/src/shadow.ts similarity index 100% rename from packages/lib/src/shadow.ts rename to packages/lib-internal/src/shadow.ts diff --git a/packages/lib/src/signal-exit.ts b/packages/lib-internal/src/signal-exit.ts similarity index 100% rename from packages/lib/src/signal-exit.ts rename to packages/lib-internal/src/signal-exit.ts diff --git a/packages/lib/src/sorts.ts b/packages/lib-internal/src/sorts.ts similarity index 100% rename from packages/lib/src/sorts.ts rename to packages/lib-internal/src/sorts.ts diff --git a/packages/lib/src/spawn.ts b/packages/lib-internal/src/spawn.ts similarity index 100% rename from packages/lib/src/spawn.ts rename to packages/lib-internal/src/spawn.ts diff --git a/packages/lib/src/spinner.ts b/packages/lib-internal/src/spinner.ts similarity index 100% rename from packages/lib/src/spinner.ts rename to packages/lib-internal/src/spinner.ts diff --git a/packages/lib/src/ssri.ts b/packages/lib-internal/src/ssri.ts similarity index 100% rename from packages/lib/src/ssri.ts rename to packages/lib-internal/src/ssri.ts diff --git a/packages/lib/src/stdio/clear.ts b/packages/lib-internal/src/stdio/clear.ts similarity index 100% rename from packages/lib/src/stdio/clear.ts rename to packages/lib-internal/src/stdio/clear.ts diff --git a/packages/lib/src/stdio/divider.ts b/packages/lib-internal/src/stdio/divider.ts similarity index 100% rename from packages/lib/src/stdio/divider.ts rename to packages/lib-internal/src/stdio/divider.ts diff --git a/packages/lib/src/stdio/footer.ts b/packages/lib-internal/src/stdio/footer.ts similarity index 100% rename from packages/lib/src/stdio/footer.ts rename to packages/lib-internal/src/stdio/footer.ts diff --git a/packages/lib/src/stdio/header.ts b/packages/lib-internal/src/stdio/header.ts similarity index 100% rename from packages/lib/src/stdio/header.ts rename to packages/lib-internal/src/stdio/header.ts diff --git a/packages/lib/src/stdio/mask.ts b/packages/lib-internal/src/stdio/mask.ts similarity index 100% rename from packages/lib/src/stdio/mask.ts rename to packages/lib-internal/src/stdio/mask.ts diff --git a/packages/lib/src/stdio/progress.ts b/packages/lib-internal/src/stdio/progress.ts similarity index 100% rename from packages/lib/src/stdio/progress.ts rename to packages/lib-internal/src/stdio/progress.ts diff --git a/packages/lib/src/stdio/prompts.ts b/packages/lib-internal/src/stdio/prompts.ts similarity index 100% rename from packages/lib/src/stdio/prompts.ts rename to packages/lib-internal/src/stdio/prompts.ts diff --git a/packages/lib/src/stdio/stderr.ts b/packages/lib-internal/src/stdio/stderr.ts similarity index 100% rename from packages/lib/src/stdio/stderr.ts rename to packages/lib-internal/src/stdio/stderr.ts diff --git a/packages/lib/src/stdio/stdout.ts b/packages/lib-internal/src/stdio/stdout.ts similarity index 100% rename from packages/lib/src/stdio/stdout.ts rename to packages/lib-internal/src/stdio/stdout.ts diff --git a/packages/lib/src/streams.ts b/packages/lib-internal/src/streams.ts similarity index 100% rename from packages/lib/src/streams.ts rename to packages/lib-internal/src/streams.ts diff --git a/packages/lib/src/strings.ts b/packages/lib-internal/src/strings.ts similarity index 100% rename from packages/lib/src/strings.ts rename to packages/lib-internal/src/strings.ts diff --git a/packages/lib/src/suppress-warnings.ts b/packages/lib-internal/src/suppress-warnings.ts similarity index 100% rename from packages/lib/src/suppress-warnings.ts rename to packages/lib-internal/src/suppress-warnings.ts diff --git a/packages/lib/src/tables.ts b/packages/lib-internal/src/tables.ts similarity index 100% rename from packages/lib/src/tables.ts rename to packages/lib-internal/src/tables.ts diff --git a/packages/lib/src/temporary-executor.ts b/packages/lib-internal/src/temporary-executor.ts similarity index 100% rename from packages/lib/src/temporary-executor.ts rename to packages/lib-internal/src/temporary-executor.ts diff --git a/packages/lib/src/themes/context.ts b/packages/lib-internal/src/themes/context.ts similarity index 100% rename from packages/lib/src/themes/context.ts rename to packages/lib-internal/src/themes/context.ts diff --git a/packages/lib/src/themes/index.ts b/packages/lib-internal/src/themes/index.ts similarity index 100% rename from packages/lib/src/themes/index.ts rename to packages/lib-internal/src/themes/index.ts diff --git a/packages/lib/src/themes/themes.ts b/packages/lib-internal/src/themes/themes.ts similarity index 100% rename from packages/lib/src/themes/themes.ts rename to packages/lib-internal/src/themes/themes.ts diff --git a/packages/lib/src/themes/types.ts b/packages/lib-internal/src/themes/types.ts similarity index 100% rename from packages/lib/src/themes/types.ts rename to packages/lib-internal/src/themes/types.ts diff --git a/packages/lib/src/themes/utils.ts b/packages/lib-internal/src/themes/utils.ts similarity index 100% rename from packages/lib/src/themes/utils.ts rename to packages/lib-internal/src/themes/utils.ts diff --git a/packages/lib/src/types.ts b/packages/lib-internal/src/types.ts similarity index 100% rename from packages/lib/src/types.ts rename to packages/lib-internal/src/types.ts diff --git a/packages/lib/src/url.ts b/packages/lib-internal/src/url.ts similarity index 100% rename from packages/lib/src/url.ts rename to packages/lib-internal/src/url.ts diff --git a/packages/lib/src/utils/get-ipc.ts b/packages/lib-internal/src/utils/get-ipc.ts similarity index 100% rename from packages/lib/src/utils/get-ipc.ts rename to packages/lib-internal/src/utils/get-ipc.ts diff --git a/packages/lib/src/validation/json-parser.ts b/packages/lib-internal/src/validation/json-parser.ts similarity index 100% rename from packages/lib/src/validation/json-parser.ts rename to packages/lib-internal/src/validation/json-parser.ts diff --git a/packages/lib/src/validation/types.ts b/packages/lib-internal/src/validation/types.ts similarity index 100% rename from packages/lib/src/validation/types.ts rename to packages/lib-internal/src/validation/types.ts diff --git a/packages/lib/src/versions.ts b/packages/lib-internal/src/versions.ts similarity index 99% rename from packages/lib/src/versions.ts rename to packages/lib-internal/src/versions.ts index 9be0b3fcb..ee4698e16 100644 --- a/packages/lib/src/versions.ts +++ b/packages/lib-internal/src/versions.ts @@ -201,6 +201,7 @@ export function versionDiff( | 'patch' | 'prepatch' | 'prerelease' + | 'release' | undefined { try { return getSemver().diff(version1, version2) || undefined diff --git a/packages/lib/src/words.ts b/packages/lib-internal/src/words.ts similarity index 100% rename from packages/lib/src/words.ts rename to packages/lib-internal/src/words.ts diff --git a/packages/lib/src/zod.ts b/packages/lib-internal/src/zod.ts similarity index 100% rename from packages/lib/src/zod.ts rename to packages/lib-internal/src/zod.ts diff --git a/packages/lib/test/abort.test.ts b/packages/lib-internal/test/abort.test.ts similarity index 100% rename from packages/lib/test/abort.test.ts rename to packages/lib-internal/test/abort.test.ts diff --git a/packages/lib/test/agent.test.ts b/packages/lib-internal/test/agent.test.ts similarity index 100% rename from packages/lib/test/agent.test.ts rename to packages/lib-internal/test/agent.test.ts diff --git a/packages/lib/test/ansi.test.ts b/packages/lib-internal/test/ansi.test.ts similarity index 100% rename from packages/lib/test/ansi.test.ts rename to packages/lib-internal/test/ansi.test.ts diff --git a/packages/lib/test/argv-flags.test.ts b/packages/lib-internal/test/argv-flags.test.ts similarity index 100% rename from packages/lib/test/argv-flags.test.ts rename to packages/lib-internal/test/argv-flags.test.ts diff --git a/packages/lib/test/argv-parse.test.ts b/packages/lib-internal/test/argv-parse.test.ts similarity index 100% rename from packages/lib/test/argv-parse.test.ts rename to packages/lib-internal/test/argv-parse.test.ts diff --git a/packages/lib/test/argv/flags.test.ts b/packages/lib-internal/test/argv/flags.test.ts similarity index 100% rename from packages/lib/test/argv/flags.test.ts rename to packages/lib-internal/test/argv/flags.test.ts diff --git a/packages/lib/test/argv/parse.test.ts b/packages/lib-internal/test/argv/parse.test.ts similarity index 100% rename from packages/lib/test/argv/parse.test.ts rename to packages/lib-internal/test/argv/parse.test.ts diff --git a/packages/lib/test/arrays.test.ts b/packages/lib-internal/test/arrays.test.ts similarity index 100% rename from packages/lib/test/arrays.test.ts rename to packages/lib-internal/test/arrays.test.ts diff --git a/packages/lib/test/bin.test.ts b/packages/lib-internal/test/bin.test.ts similarity index 100% rename from packages/lib/test/bin.test.ts rename to packages/lib-internal/test/bin.test.ts diff --git a/packages/lib/test/build-externals.test.ts b/packages/lib-internal/test/build-externals.test.ts similarity index 100% rename from packages/lib/test/build-externals.test.ts rename to packages/lib-internal/test/build-externals.test.ts diff --git a/packages/lib/test/cacache.test.ts b/packages/lib-internal/test/cacache.test.ts similarity index 100% rename from packages/lib/test/cacache.test.ts rename to packages/lib-internal/test/cacache.test.ts diff --git a/packages/lib/test/cache-with-ttl.test.ts b/packages/lib-internal/test/cache-with-ttl.test.ts similarity index 100% rename from packages/lib/test/cache-with-ttl.test.ts rename to packages/lib-internal/test/cache-with-ttl.test.ts diff --git a/packages/lib/test/constants/agents.test.ts b/packages/lib-internal/test/constants/agents.test.ts similarity index 100% rename from packages/lib/test/constants/agents.test.ts rename to packages/lib-internal/test/constants/agents.test.ts diff --git a/packages/lib/test/constants/core.test.ts b/packages/lib-internal/test/constants/core.test.ts similarity index 100% rename from packages/lib/test/constants/core.test.ts rename to packages/lib-internal/test/constants/core.test.ts diff --git a/packages/lib/test/constants/encoding.test.ts b/packages/lib-internal/test/constants/encoding.test.ts similarity index 100% rename from packages/lib/test/constants/encoding.test.ts rename to packages/lib-internal/test/constants/encoding.test.ts diff --git a/packages/lib/test/constants/github.test.ts b/packages/lib-internal/test/constants/github.test.ts similarity index 100% rename from packages/lib/test/constants/github.test.ts rename to packages/lib-internal/test/constants/github.test.ts diff --git a/packages/lib/test/constants/licenses.test.ts b/packages/lib-internal/test/constants/licenses.test.ts similarity index 100% rename from packages/lib/test/constants/licenses.test.ts rename to packages/lib-internal/test/constants/licenses.test.ts diff --git a/packages/lib/test/constants/node.test.ts b/packages/lib-internal/test/constants/node.test.ts similarity index 100% rename from packages/lib/test/constants/node.test.ts rename to packages/lib-internal/test/constants/node.test.ts diff --git a/packages/lib/test/constants/packages.test.ts b/packages/lib-internal/test/constants/packages.test.ts similarity index 100% rename from packages/lib/test/constants/packages.test.ts rename to packages/lib-internal/test/constants/packages.test.ts diff --git a/packages/lib/test/constants/paths.test.ts b/packages/lib-internal/test/constants/paths.test.ts similarity index 100% rename from packages/lib/test/constants/paths.test.ts rename to packages/lib-internal/test/constants/paths.test.ts diff --git a/packages/lib/test/constants/platform.test.ts b/packages/lib-internal/test/constants/platform.test.ts similarity index 100% rename from packages/lib/test/constants/platform.test.ts rename to packages/lib-internal/test/constants/platform.test.ts diff --git a/packages/lib/test/constants/process.test.ts b/packages/lib-internal/test/constants/process.test.ts similarity index 100% rename from packages/lib/test/constants/process.test.ts rename to packages/lib-internal/test/constants/process.test.ts diff --git a/packages/lib/test/constants/socket.test.ts b/packages/lib-internal/test/constants/socket.test.ts similarity index 100% rename from packages/lib/test/constants/socket.test.ts rename to packages/lib-internal/test/constants/socket.test.ts diff --git a/packages/lib/test/constants/testing.test.ts b/packages/lib-internal/test/constants/testing.test.ts similarity index 100% rename from packages/lib/test/constants/testing.test.ts rename to packages/lib-internal/test/constants/testing.test.ts diff --git a/packages/lib/test/constants/time.test.ts b/packages/lib-internal/test/constants/time.test.ts similarity index 100% rename from packages/lib/test/constants/time.test.ts rename to packages/lib-internal/test/constants/time.test.ts diff --git a/packages/lib/test/constants/typescript.test.ts b/packages/lib-internal/test/constants/typescript.test.ts similarity index 100% rename from packages/lib/test/constants/typescript.test.ts rename to packages/lib-internal/test/constants/typescript.test.ts diff --git a/packages/lib/test/debug.test.ts b/packages/lib-internal/test/debug.test.ts similarity index 100% rename from packages/lib/test/debug.test.ts rename to packages/lib-internal/test/debug.test.ts diff --git a/packages/lib/test/dlx-binary.test.ts b/packages/lib-internal/test/dlx-binary.test.ts similarity index 100% rename from packages/lib/test/dlx-binary.test.ts rename to packages/lib-internal/test/dlx-binary.test.ts diff --git a/packages/lib/test/dlx-manifest.test.ts b/packages/lib-internal/test/dlx-manifest.test.ts similarity index 100% rename from packages/lib/test/dlx-manifest.test.ts rename to packages/lib-internal/test/dlx-manifest.test.ts diff --git a/packages/lib/test/dlx-package.test.ts b/packages/lib-internal/test/dlx-package.test.ts similarity index 100% rename from packages/lib/test/dlx-package.test.ts rename to packages/lib-internal/test/dlx-package.test.ts diff --git a/packages/lib/test/dlx.test.ts b/packages/lib-internal/test/dlx.test.ts similarity index 100% rename from packages/lib/test/dlx.test.ts rename to packages/lib-internal/test/dlx.test.ts diff --git a/packages/lib/test/effects/pulse-frames.test.ts b/packages/lib-internal/test/effects/pulse-frames.test.ts similarity index 100% rename from packages/lib/test/effects/pulse-frames.test.ts rename to packages/lib-internal/test/effects/pulse-frames.test.ts diff --git a/packages/lib/test/effects/text-shimmer.test.ts b/packages/lib-internal/test/effects/text-shimmer.test.ts similarity index 100% rename from packages/lib/test/effects/text-shimmer.test.ts rename to packages/lib-internal/test/effects/text-shimmer.test.ts diff --git a/packages/lib/test/effects/ultra.test.ts b/packages/lib-internal/test/effects/ultra.test.ts similarity index 100% rename from packages/lib/test/effects/ultra.test.ts rename to packages/lib-internal/test/effects/ultra.test.ts diff --git a/packages/lib/test/env.test.ts b/packages/lib-internal/test/env.test.ts similarity index 100% rename from packages/lib/test/env.test.ts rename to packages/lib-internal/test/env.test.ts diff --git a/packages/lib/test/env/ci.test.ts b/packages/lib-internal/test/env/ci.test.ts similarity index 100% rename from packages/lib/test/env/ci.test.ts rename to packages/lib-internal/test/env/ci.test.ts diff --git a/packages/lib/test/env/debug.test.ts b/packages/lib-internal/test/env/debug.test.ts similarity index 100% rename from packages/lib/test/env/debug.test.ts rename to packages/lib-internal/test/env/debug.test.ts diff --git a/packages/lib/test/env/github.test.ts b/packages/lib-internal/test/env/github.test.ts similarity index 100% rename from packages/lib/test/env/github.test.ts rename to packages/lib-internal/test/env/github.test.ts diff --git a/packages/lib/test/env/helpers.test.ts b/packages/lib-internal/test/env/helpers.test.ts similarity index 100% rename from packages/lib/test/env/helpers.test.ts rename to packages/lib-internal/test/env/helpers.test.ts diff --git a/packages/lib/test/env/home.test.ts b/packages/lib-internal/test/env/home.test.ts similarity index 100% rename from packages/lib/test/env/home.test.ts rename to packages/lib-internal/test/env/home.test.ts diff --git a/packages/lib/test/env/locale.test.ts b/packages/lib-internal/test/env/locale.test.ts similarity index 100% rename from packages/lib/test/env/locale.test.ts rename to packages/lib-internal/test/env/locale.test.ts diff --git a/packages/lib/test/env/node-auth-token.test.ts b/packages/lib-internal/test/env/node-auth-token.test.ts similarity index 100% rename from packages/lib/test/env/node-auth-token.test.ts rename to packages/lib-internal/test/env/node-auth-token.test.ts diff --git a/packages/lib/test/env/node-env.test.ts b/packages/lib-internal/test/env/node-env.test.ts similarity index 100% rename from packages/lib/test/env/node-env.test.ts rename to packages/lib-internal/test/env/node-env.test.ts diff --git a/packages/lib/test/env/npm.test.ts b/packages/lib-internal/test/env/npm.test.ts similarity index 100% rename from packages/lib/test/env/npm.test.ts rename to packages/lib-internal/test/env/npm.test.ts diff --git a/packages/lib/test/env/path.test.ts b/packages/lib-internal/test/env/path.test.ts similarity index 100% rename from packages/lib/test/env/path.test.ts rename to packages/lib-internal/test/env/path.test.ts diff --git a/packages/lib/test/env/pre-commit.test.ts b/packages/lib-internal/test/env/pre-commit.test.ts similarity index 100% rename from packages/lib/test/env/pre-commit.test.ts rename to packages/lib-internal/test/env/pre-commit.test.ts diff --git a/packages/lib/test/env/rewire.test.ts b/packages/lib-internal/test/env/rewire.test.ts similarity index 100% rename from packages/lib/test/env/rewire.test.ts rename to packages/lib-internal/test/env/rewire.test.ts diff --git a/packages/lib/test/env/shell.test.ts b/packages/lib-internal/test/env/shell.test.ts similarity index 100% rename from packages/lib/test/env/shell.test.ts rename to packages/lib-internal/test/env/shell.test.ts diff --git a/packages/lib/test/env/socket-cli-shadow.test.ts b/packages/lib-internal/test/env/socket-cli-shadow.test.ts similarity index 100% rename from packages/lib/test/env/socket-cli-shadow.test.ts rename to packages/lib-internal/test/env/socket-cli-shadow.test.ts diff --git a/packages/lib/test/env/socket-cli.test.ts b/packages/lib-internal/test/env/socket-cli.test.ts similarity index 100% rename from packages/lib/test/env/socket-cli.test.ts rename to packages/lib-internal/test/env/socket-cli.test.ts diff --git a/packages/lib/test/env/socket.test.ts b/packages/lib-internal/test/env/socket.test.ts similarity index 100% rename from packages/lib/test/env/socket.test.ts rename to packages/lib-internal/test/env/socket.test.ts diff --git a/packages/lib/test/env/temp-dir.test.ts b/packages/lib-internal/test/env/temp-dir.test.ts similarity index 100% rename from packages/lib/test/env/temp-dir.test.ts rename to packages/lib-internal/test/env/temp-dir.test.ts diff --git a/packages/lib/test/env/term.test.ts b/packages/lib-internal/test/env/term.test.ts similarity index 100% rename from packages/lib/test/env/term.test.ts rename to packages/lib-internal/test/env/term.test.ts diff --git a/packages/lib/test/env/test.test.ts b/packages/lib-internal/test/env/test.test.ts similarity index 100% rename from packages/lib/test/env/test.test.ts rename to packages/lib-internal/test/env/test.test.ts diff --git a/packages/lib/test/env/windows.test.ts b/packages/lib-internal/test/env/windows.test.ts similarity index 100% rename from packages/lib/test/env/windows.test.ts rename to packages/lib-internal/test/env/windows.test.ts diff --git a/packages/lib/test/env/xdg.test.ts b/packages/lib-internal/test/env/xdg.test.ts similarity index 100% rename from packages/lib/test/env/xdg.test.ts rename to packages/lib-internal/test/env/xdg.test.ts diff --git a/packages/lib/test/fs-additional.test.ts b/packages/lib-internal/test/fs-additional.test.ts similarity index 100% rename from packages/lib/test/fs-additional.test.ts rename to packages/lib-internal/test/fs-additional.test.ts diff --git a/packages/lib/test/fs.test.ts b/packages/lib-internal/test/fs.test.ts similarity index 100% rename from packages/lib/test/fs.test.ts rename to packages/lib-internal/test/fs.test.ts diff --git a/packages/lib/test/functions.test.ts b/packages/lib-internal/test/functions.test.ts similarity index 100% rename from packages/lib/test/functions.test.ts rename to packages/lib-internal/test/functions.test.ts diff --git a/packages/lib/test/git-extended.test.ts b/packages/lib-internal/test/git-extended.test.ts similarity index 100% rename from packages/lib/test/git-extended.test.ts rename to packages/lib-internal/test/git-extended.test.ts diff --git a/packages/lib/test/git.test.ts b/packages/lib-internal/test/git.test.ts similarity index 100% rename from packages/lib/test/git.test.ts rename to packages/lib-internal/test/git.test.ts diff --git a/packages/lib/test/github.test.ts b/packages/lib-internal/test/github.test.ts similarity index 100% rename from packages/lib/test/github.test.ts rename to packages/lib-internal/test/github.test.ts diff --git a/packages/lib/test/globs.test.ts b/packages/lib-internal/test/globs.test.ts similarity index 100% rename from packages/lib/test/globs.test.ts rename to packages/lib-internal/test/globs.test.ts diff --git a/packages/lib/test/http-request.test.ts b/packages/lib-internal/test/http-request.test.ts similarity index 100% rename from packages/lib/test/http-request.test.ts rename to packages/lib-internal/test/http-request.test.ts diff --git a/packages/lib/test/ipc.test.ts b/packages/lib-internal/test/ipc.test.ts similarity index 100% rename from packages/lib/test/ipc.test.ts rename to packages/lib-internal/test/ipc.test.ts diff --git a/packages/lib/test/isolated/logger.test.ts b/packages/lib-internal/test/isolated/logger.test.ts similarity index 100% rename from packages/lib/test/isolated/logger.test.ts rename to packages/lib-internal/test/isolated/logger.test.ts diff --git a/packages/lib/test/isolated/themes.test.ts b/packages/lib-internal/test/isolated/themes.test.ts similarity index 100% rename from packages/lib/test/isolated/themes.test.ts rename to packages/lib-internal/test/isolated/themes.test.ts diff --git a/packages/lib/test/json.test.ts b/packages/lib-internal/test/json.test.ts similarity index 100% rename from packages/lib/test/json.test.ts rename to packages/lib-internal/test/json.test.ts diff --git a/packages/lib/test/logger-advanced.test.ts b/packages/lib-internal/test/logger-advanced.test.ts similarity index 100% rename from packages/lib/test/logger-advanced.test.ts rename to packages/lib-internal/test/logger-advanced.test.ts diff --git a/packages/lib/test/logger-core.test.ts b/packages/lib-internal/test/logger-core.test.ts similarity index 100% rename from packages/lib/test/logger-core.test.ts rename to packages/lib-internal/test/logger-core.test.ts diff --git a/packages/lib/test/logger-default.test.ts b/packages/lib-internal/test/logger-default.test.ts similarity index 100% rename from packages/lib/test/logger-default.test.ts rename to packages/lib-internal/test/logger-default.test.ts diff --git a/packages/lib/test/maintained-node-versions.test.ts b/packages/lib-internal/test/maintained-node-versions.test.ts similarity index 100% rename from packages/lib/test/maintained-node-versions.test.ts rename to packages/lib-internal/test/maintained-node-versions.test.ts diff --git a/packages/lib/test/memoization.test.ts b/packages/lib-internal/test/memoization.test.ts similarity index 100% rename from packages/lib/test/memoization.test.ts rename to packages/lib-internal/test/memoization.test.ts diff --git a/packages/lib/test/objects.test.ts b/packages/lib-internal/test/objects.test.ts similarity index 100% rename from packages/lib/test/objects.test.ts rename to packages/lib-internal/test/objects.test.ts diff --git a/packages/lib/test/packages/editable.test.ts b/packages/lib-internal/test/packages/editable.test.ts similarity index 100% rename from packages/lib/test/packages/editable.test.ts rename to packages/lib-internal/test/packages/editable.test.ts diff --git a/packages/lib/test/packages/licenses.test.ts b/packages/lib-internal/test/packages/licenses.test.ts similarity index 100% rename from packages/lib/test/packages/licenses.test.ts rename to packages/lib-internal/test/packages/licenses.test.ts diff --git a/packages/lib/test/packages/operations.test.ts b/packages/lib-internal/test/packages/operations.test.ts similarity index 100% rename from packages/lib/test/packages/operations.test.ts rename to packages/lib-internal/test/packages/operations.test.ts diff --git a/packages/lib/test/packages/paths.test.ts b/packages/lib-internal/test/packages/paths.test.ts similarity index 100% rename from packages/lib/test/packages/paths.test.ts rename to packages/lib-internal/test/packages/paths.test.ts diff --git a/packages/lib/test/packages/validation.test.ts b/packages/lib-internal/test/packages/validation.test.ts similarity index 100% rename from packages/lib/test/packages/validation.test.ts rename to packages/lib-internal/test/packages/validation.test.ts diff --git a/packages/lib/test/path.test.ts b/packages/lib-internal/test/path.test.ts similarity index 100% rename from packages/lib/test/path.test.ts rename to packages/lib-internal/test/path.test.ts diff --git a/packages/lib/test/paths.test.ts b/packages/lib-internal/test/paths.test.ts similarity index 100% rename from packages/lib/test/paths.test.ts rename to packages/lib-internal/test/paths.test.ts diff --git a/packages/lib/test/performance.test.ts b/packages/lib-internal/test/performance.test.ts similarity index 100% rename from packages/lib/test/performance.test.ts rename to packages/lib-internal/test/performance.test.ts diff --git a/packages/lib/test/process-lock.test.ts b/packages/lib-internal/test/process-lock.test.ts similarity index 100% rename from packages/lib/test/process-lock.test.ts rename to packages/lib-internal/test/process-lock.test.ts diff --git a/packages/lib/test/promise-queue.test.ts b/packages/lib-internal/test/promise-queue.test.ts similarity index 100% rename from packages/lib/test/promise-queue.test.ts rename to packages/lib-internal/test/promise-queue.test.ts diff --git a/packages/lib/test/promises.test.ts b/packages/lib-internal/test/promises.test.ts similarity index 100% rename from packages/lib/test/promises.test.ts rename to packages/lib-internal/test/promises.test.ts diff --git a/packages/lib/test/prompts.test.ts b/packages/lib-internal/test/prompts.test.ts similarity index 100% rename from packages/lib/test/prompts.test.ts rename to packages/lib-internal/test/prompts.test.ts diff --git a/packages/lib/test/regexps.test.ts b/packages/lib-internal/test/regexps.test.ts similarity index 100% rename from packages/lib/test/regexps.test.ts rename to packages/lib-internal/test/regexps.test.ts diff --git a/packages/lib/test/sea.test.ts b/packages/lib-internal/test/sea.test.ts similarity index 100% rename from packages/lib/test/sea.test.ts rename to packages/lib-internal/test/sea.test.ts diff --git a/packages/lib/test/shadow.test.ts b/packages/lib-internal/test/shadow.test.ts similarity index 100% rename from packages/lib/test/shadow.test.ts rename to packages/lib-internal/test/shadow.test.ts diff --git a/packages/lib/test/signal-exit.test.ts b/packages/lib-internal/test/signal-exit.test.ts similarity index 100% rename from packages/lib/test/signal-exit.test.ts rename to packages/lib-internal/test/signal-exit.test.ts diff --git a/packages/lib/test/sorts.test.ts b/packages/lib-internal/test/sorts.test.ts similarity index 100% rename from packages/lib/test/sorts.test.ts rename to packages/lib-internal/test/sorts.test.ts diff --git a/packages/lib/test/spawn.test.ts b/packages/lib-internal/test/spawn.test.ts similarity index 100% rename from packages/lib/test/spawn.test.ts rename to packages/lib-internal/test/spawn.test.ts diff --git a/packages/lib/test/spinner.test.ts b/packages/lib-internal/test/spinner.test.ts similarity index 100% rename from packages/lib/test/spinner.test.ts rename to packages/lib-internal/test/spinner.test.ts diff --git a/packages/lib/test/ssri.test.ts b/packages/lib-internal/test/ssri.test.ts similarity index 100% rename from packages/lib/test/ssri.test.ts rename to packages/lib-internal/test/ssri.test.ts diff --git a/packages/lib/test/stdio/clear.test.ts b/packages/lib-internal/test/stdio/clear.test.ts similarity index 100% rename from packages/lib/test/stdio/clear.test.ts rename to packages/lib-internal/test/stdio/clear.test.ts diff --git a/packages/lib/test/stdio/footer.test.ts b/packages/lib-internal/test/stdio/footer.test.ts similarity index 100% rename from packages/lib/test/stdio/footer.test.ts rename to packages/lib-internal/test/stdio/footer.test.ts diff --git a/packages/lib/test/stdio/mask.test.ts b/packages/lib-internal/test/stdio/mask.test.ts similarity index 100% rename from packages/lib/test/stdio/mask.test.ts rename to packages/lib-internal/test/stdio/mask.test.ts diff --git a/packages/lib/test/stdio/prompts.test.ts b/packages/lib-internal/test/stdio/prompts.test.ts similarity index 100% rename from packages/lib/test/stdio/prompts.test.ts rename to packages/lib-internal/test/stdio/prompts.test.ts diff --git a/packages/lib/test/stdio/stderr.test.ts b/packages/lib-internal/test/stdio/stderr.test.ts similarity index 100% rename from packages/lib/test/stdio/stderr.test.ts rename to packages/lib-internal/test/stdio/stderr.test.ts diff --git a/packages/lib/test/stdio/stdout.test.ts b/packages/lib-internal/test/stdio/stdout.test.ts similarity index 100% rename from packages/lib/test/stdio/stdout.test.ts rename to packages/lib-internal/test/stdio/stdout.test.ts diff --git a/packages/lib/test/streams.test.ts b/packages/lib-internal/test/streams.test.ts similarity index 100% rename from packages/lib/test/streams.test.ts rename to packages/lib-internal/test/streams.test.ts diff --git a/packages/lib/test/strings.test.ts b/packages/lib-internal/test/strings.test.ts similarity index 100% rename from packages/lib/test/strings.test.ts rename to packages/lib-internal/test/strings.test.ts diff --git a/packages/lib/test/tables.test.ts b/packages/lib-internal/test/tables.test.ts similarity index 100% rename from packages/lib/test/tables.test.ts rename to packages/lib-internal/test/tables.test.ts diff --git a/packages/lib/test/temporary-executor.test.ts b/packages/lib-internal/test/temporary-executor.test.ts similarity index 100% rename from packages/lib/test/temporary-executor.test.ts rename to packages/lib-internal/test/temporary-executor.test.ts diff --git a/packages/lib/test/themes/context.test.ts b/packages/lib-internal/test/themes/context.test.ts similarity index 100% rename from packages/lib/test/themes/context.test.ts rename to packages/lib-internal/test/themes/context.test.ts diff --git a/packages/lib/test/themes/utils.test.ts b/packages/lib-internal/test/themes/utils.test.ts similarity index 100% rename from packages/lib/test/themes/utils.test.ts rename to packages/lib-internal/test/themes/utils.test.ts diff --git a/packages/lib/test/url.test.ts b/packages/lib-internal/test/url.test.ts similarity index 100% rename from packages/lib/test/url.test.ts rename to packages/lib-internal/test/url.test.ts diff --git a/packages/lib/test/utils/get-ipc.test.ts b/packages/lib-internal/test/utils/get-ipc.test.ts similarity index 100% rename from packages/lib/test/utils/get-ipc.test.ts rename to packages/lib-internal/test/utils/get-ipc.test.ts diff --git a/packages/lib/test/utils/stdio-test-helper.ts b/packages/lib-internal/test/utils/stdio-test-helper.ts similarity index 100% rename from packages/lib/test/utils/stdio-test-helper.ts rename to packages/lib-internal/test/utils/stdio-test-helper.ts diff --git a/packages/lib/test/utils/temp-file-helper.mts b/packages/lib-internal/test/utils/temp-file-helper.mts similarity index 100% rename from packages/lib/test/utils/temp-file-helper.mts rename to packages/lib-internal/test/utils/temp-file-helper.mts diff --git a/packages/lib/test/validation/json-parser.test.ts b/packages/lib-internal/test/validation/json-parser.test.ts similarity index 100% rename from packages/lib/test/validation/json-parser.test.ts rename to packages/lib-internal/test/validation/json-parser.test.ts diff --git a/packages/lib/test/versions.test.ts b/packages/lib-internal/test/versions.test.ts similarity index 100% rename from packages/lib/test/versions.test.ts rename to packages/lib-internal/test/versions.test.ts diff --git a/packages/lib/test/words.test.ts b/packages/lib-internal/test/words.test.ts similarity index 100% rename from packages/lib/test/words.test.ts rename to packages/lib-internal/test/words.test.ts diff --git a/packages/lib/test/zod.test.ts b/packages/lib-internal/test/zod.test.ts similarity index 100% rename from packages/lib/test/zod.test.ts rename to packages/lib-internal/test/zod.test.ts diff --git a/packages/lib/tsconfig.dts.json b/packages/lib-internal/tsconfig.dts.json similarity index 100% rename from packages/lib/tsconfig.dts.json rename to packages/lib-internal/tsconfig.dts.json diff --git a/packages/lib/tsconfig.json b/packages/lib-internal/tsconfig.json similarity index 100% rename from packages/lib/tsconfig.json rename to packages/lib-internal/tsconfig.json diff --git a/packages/lib/tsconfig.test.json b/packages/lib-internal/tsconfig.test.json similarity index 100% rename from packages/lib/tsconfig.test.json rename to packages/lib-internal/tsconfig.test.json diff --git a/packages/lib/vitest.config.ts b/packages/lib-internal/vitest.config.ts similarity index 100% rename from packages/lib/vitest.config.ts rename to packages/lib-internal/vitest.config.ts diff --git a/packages/lib/scripts/fix-external-imports.mjs b/packages/lib/scripts/fix-external-imports.mjs deleted file mode 100644 index f3c061c5c..000000000 --- a/packages/lib/scripts/fix-external-imports.mjs +++ /dev/null @@ -1,158 +0,0 @@ -/** - * @fileoverview Fix external package imports to point to dist/external. - * Rewrites require('package') to require('./external/package') for bundled externals. - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import colors from 'yoctocolors-cjs' - -import { isQuiet } from '#socketsecurity/lib/argv/flags' -import { getDefaultLogger } from '#socketsecurity/lib/logger' - -import { externalPackages, scopedPackages } from './build-externals/config.mjs' - -const logger = getDefaultLogger() -const printCompletedHeader = title => console.log(colors.green(`✓ ${title}`)) - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const distDir = path.resolve(__dirname, '..', 'dist') -const distExternalDir = path.join(distDir, 'external') - -// Build list of all external packages to rewrite -const allExternalPackages = [ - ...externalPackages.map(p => p.name), - ...scopedPackages.flatMap(s => { - if (s.name) { - return [`${s.scope}/${s.name}`] - } - if (s.packages) { - return s.packages.map(name => `${s.scope}/${name}`) - } - return [] - }), -] - -/** - * Calculate the relative path from a file to the external directory. - * - * @param {string} filePath - The path to the file being processed - * @returns {string} The relative path prefix (e.g., './' or '../') - */ -function getExternalPathPrefix(filePath) { - const dir = path.dirname(filePath) - const relativePath = path.relative(dir, distExternalDir) - // Normalize to forward slashes and ensure it starts with ./ or ../ - const normalized = relativePath.replace(/\\/g, '/') - return normalized.startsWith('.') ? normalized : `./${normalized}` -} - -/** - * Rewrite external package imports in a file. - * - * @param {string} filePath - Path to the file to process - * @param {boolean} verbose - Show individual file fixes - * @returns {Promise} True if file was modified - */ -async function fixFileImports(filePath, verbose = false) { - let content = await fs.readFile(filePath, 'utf8') - let modified = false - - const externalPrefix = getExternalPathPrefix(filePath) - - for (const pkg of allExternalPackages) { - // Escape special regex characters in package name - const escapedPkg = pkg.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') - - // Match require('pkg') or require("pkg") - // Don't match if it's already pointing to ./external/ or ../external/ - const requirePattern = new RegExp( - `require\\((['"])(?!\\.\\.?\\/external\\/)${escapedPkg}\\1\\)`, - 'g', - ) - - if (requirePattern.test(content)) { - // Replace with require('./external/pkg') or require('../external/pkg') - const replacement = `require('${externalPrefix}/${pkg}')` - content = content.replace(requirePattern, replacement) - modified = true - } - } - - if (modified) { - await fs.writeFile(filePath, content) - if (verbose) { - const relativePath = path.relative(distDir, filePath) - console.log(` Fixed ${relativePath}`) - } - } - - return modified -} - -/** - * Process files in a directory and fix external imports. - * - * @param {string} dir - Directory to process - * @param {boolean} verbose - Show individual file fixes - * @returns {Promise} Number of files fixed - */ -async function processDirectory(dir, verbose = false) { - let fixedCount = 0 - - try { - const entries = await fs.readdir(dir, { withFileTypes: true }) - - for (const entry of entries) { - const fullPath = path.join(dir, entry.name) - - // Skip the external directory itself - if (entry.isDirectory() && fullPath === distExternalDir) { - continue - } - - if (entry.isDirectory()) { - fixedCount += await processDirectory(fullPath, verbose) - } else if (entry.isFile() && entry.name.endsWith('.js')) { - const wasFixed = await fixFileImports(fullPath, verbose) - if (wasFixed) { - fixedCount += 1 - } - } - } - } catch (error) { - // Skip directories that don't exist - if (error.code !== 'ENOENT') { - throw error - } - } - - return fixedCount -} - -async function fixExternalImports() { - const verbose = process.argv.includes('--verbose') - const quiet = isQuiet() - - try { - const fixedCount = await processDirectory(distDir, verbose) - - if (!quiet) { - const title = - fixedCount > 0 - ? `External Imports (${fixedCount} file${fixedCount === 1 ? '' : 's'})` - : 'External Imports (no changes)' - printCompletedHeader(title) - } - } catch (error) { - logger.error(`Failed to fix external imports: ${error.message}`) - process.exitCode = 1 - } -} - -fixExternalImports().catch(error => { - logger.error(`Build failed: ${error.message || error}`) - process.exitCode = 1 -}) diff --git a/packages/minilm-builder/.gitignore b/packages/minilm-builder/.gitignore deleted file mode 100644 index e651e8e5f..000000000 --- a/packages/minilm-builder/.gitignore +++ /dev/null @@ -1 +0,0 @@ -python/ diff --git a/packages/minilm-builder/README.md b/packages/minilm-builder/README.md deleted file mode 100644 index 9f5aa7309..000000000 --- a/packages/minilm-builder/README.md +++ /dev/null @@ -1,95 +0,0 @@ -# minilm-builder - -MiniLM model conversion and optimization for Socket CLI security analysis. - -## Purpose - -This package converts and optimizes MiniLM models for use in Socket CLI: -- **Model conversion**: Convert PyTorch/Transformers models to ONNX format -- **Quantization**: Apply INT8/INT4 mixed-precision quantization -- **Optimization**: ONNX graph optimizations for inference -- **Expected savings**: 5-10MB per model through quantization - -## Build Process - -The build follows these steps: - -1. **Download models** - Fetch MiniLM models from Hugging Face -2. **Convert to ONNX** - Export models to ONNX format -3. **Apply quantization** - Use mixed-precision INT4/INT8 quantization -4. **Optimize graphs** - Apply ONNX optimization passes -5. **Verify** - Test inference with sample inputs -6. **Export** - Copy to distribution location - -## Usage - -**Build and optimize models:** -```bash -pnpm run build -``` - -**Force rebuild (ignore checkpoints):** -```bash -pnpm run build:force -``` - -**Clean build artifacts:** -```bash -pnpm run clean -``` - -## Configuration - -Build configuration in `scripts/build.mjs`: -- **Models**: List of MiniLM model names to process -- **Quantization strategy**: INT4/INT8 mixed precision settings -- **Optimization level**: ONNX optimization passes - -## Models - -The following MiniLM models are converted and optimized: -- **Sentence transformer**: For embedding text snippets -- **Tokenizer**: Vocabulary and tokenization rules - -## Quantization Strategy - -Mixed-precision quantization reduces model size while maintaining accuracy: -- **Attention layers**: INT8 quantization (higher precision for important computations) -- **Feed-forward layers**: INT4 quantization (lower precision, more compression) -- **Embeddings**: INT8 quantization (preserve token representations) -- **Layer norm**: FP32 (no quantization for normalization layers) - -## Output - -Optimized models are exported to: -- `build/models/minilm.onnx` - Quantized MiniLM model -- `build/models/tokenizer.json` - Tokenizer configuration - -## Checkpoints - -The build uses checkpoints for incremental builds: -- `downloaded` - Models downloaded from Hugging Face -- `converted` - Models converted to ONNX -- `quantized` - Quantization applied -- `optimized` - Graph optimizations applied -- `verified` - Inference tested - -Use `--force` flag to ignore checkpoints and rebuild from scratch. - -## Integration - -This package is used by Socket CLI to provide AI-powered security analysis. The optimized models are embedded in the Socket CLI distribution for offline inference. - -## Size Comparison - -Per model: -- **Original PyTorch model**: ~66 MB -- **ONNX FP32**: ~33 MB -- **ONNX INT8**: ~17 MB -- **ONNX INT4/INT8 mixed**: ~13 MB (53 MB saved) - -Size savings come from: -1. ONNX format (50% smaller than PyTorch) -2. INT8 quantization (50% smaller than FP32) -3. INT4 quantization (75% smaller than FP32) -4. Graph optimizations (5-10% additional savings) diff --git a/packages/minilm-builder/package.json b/packages/minilm-builder/package.json deleted file mode 100644 index 83ddfaaf1..000000000 --- a/packages/minilm-builder/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "@socketsecurity/minilm-builder", - "version": "1.0.0", - "description": "MiniLM model conversion and optimization for Socket CLI", - "type": "module", - "private": true, - "scripts": { - "build": "node scripts/build.mjs", - "build:force": "node scripts/build.mjs --force", - "clean": "node scripts/clean.mjs" - }, - "dependencies": { - "@socketsecurity/build-infra": "workspace:*", - "@socketsecurity/lib": "workspace:*" - } -} diff --git a/packages/minilm-builder/scripts/build.mjs b/packages/minilm-builder/scripts/build.mjs deleted file mode 100755 index 4e055b9a0..000000000 --- a/packages/minilm-builder/scripts/build.mjs +++ /dev/null @@ -1,624 +0,0 @@ -#!/usr/bin/env node -/** - * MiniLM Model Builder - * - * Converts and optimizes MiniLM models for Socket CLI: - * 1. Download models from Hugging Face - * 2. Convert to ONNX format - * 3. Apply INT4/INT8 mixed-precision quantization - * 4. Optimize ONNX graphs - * 5. Verify inference - * 6. Export to distribution location - * - * Usage: - * node scripts/build.mjs # Normal build with checkpoints - * node scripts/build.mjs --force # Force rebuild (ignore checkpoints) - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' -import { - checkDiskSpace, - checkPythonVersion, - formatDuration, - getFileSize, -} from '@socketsecurity/build-infra/lib/build-helpers' -import { - printError, - printHeader, - printStep, - printSuccess, - printWarning, -} from '@socketsecurity/build-infra/lib/build-output' -import { - cleanCheckpoint, - createCheckpoint, - shouldRun, -} from '@socketsecurity/build-infra/lib/checkpoint-manager' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = path.dirname(__filename) - -// Parse arguments. -const args = process.argv.slice(2) -const FORCE_BUILD = args.includes('--force') - -// Configuration. -const ROOT_DIR = path.join(__dirname, '..') -const BUILD_DIR = path.join(ROOT_DIR, 'build') -const MODELS_DIR = path.join(BUILD_DIR, 'models') -const CACHE_DIR = path.join(BUILD_DIR, 'cache') -const PYTHON_DIR = path.join(ROOT_DIR, 'python') - -// Model configuration. -const MODELS = [ - { - name: 'sentence-transformers/all-MiniLM-L6-v2', - outputName: 'minilm', - hiddenSize: 384, - numHeads: 12, - }, -] - -/** - * Ensure Python scripts directory exists and create helper scripts. - */ -async function setupPythonScripts() { - await fs.mkdir(PYTHON_DIR, { recursive: true }) - - // Create download script. - const downloadScript = `#!/usr/bin/env python3 -"""Download MiniLM models from Hugging Face.""" -import sys -import json -from pathlib import Path - -try: - from transformers import AutoModel, AutoTokenizer -except ImportError: - print(json.dumps({"error": "transformers not installed"})) - sys.exit(1) - -model_name = sys.argv[1] -cache_dir = sys.argv[2] - -try: - print(json.dumps({"status": "downloading_model"})) - model = AutoModel.from_pretrained(model_name) - - print(json.dumps({"status": "downloading_tokenizer"})) - tokenizer = AutoTokenizer.from_pretrained(model_name) - - cache_path = Path(cache_dir) - cache_path.mkdir(parents=True, exist_ok=True) - - print(json.dumps({"status": "saving_model"})) - model.save_pretrained(cache_path) - - print(json.dumps({"status": "saving_tokenizer"})) - tokenizer.save_pretrained(cache_path) - - print(json.dumps({"status": "complete", "cache_dir": str(cache_path)})) -except Exception as e: - print(json.dumps({"error": str(e)})) - sys.exit(1) -` - - // Create convert script. - const convertScript = `#!/usr/bin/env python3 -"""Convert PyTorch models to ONNX format.""" -import sys -import json -from pathlib import Path - -try: - from optimum.onnxruntime import ORTModelForFeatureExtraction - from transformers import AutoTokenizer -except ImportError: - print(json.dumps({"error": "optimum[onnxruntime] not installed"})) - sys.exit(1) - -cache_dir = sys.argv[1] -output_dir = sys.argv[2] - -try: - print(json.dumps({"status": "loading_model"})) - model = ORTModelForFeatureExtraction.from_pretrained( - cache_dir, - export=True, - provider="CPUExecutionProvider" - ) - - print(json.dumps({"status": "loading_tokenizer"})) - tokenizer = AutoTokenizer.from_pretrained(cache_dir) - - output_path = Path(output_dir) - output_path.mkdir(parents=True, exist_ok=True) - - print(json.dumps({"status": "saving_onnx"})) - model.save_pretrained(output_path) - tokenizer.save_pretrained(output_path) - - print(json.dumps({"status": "complete", "output_dir": str(output_path)})) -except Exception as e: - print(json.dumps({"error": str(e)})) - sys.exit(1) -` - - // Create quantize script. - const quantizeScript = `#!/usr/bin/env python3 -"""Apply INT8 quantization to ONNX models.""" -import sys -import json -from pathlib import Path - -try: - from optimum.onnxruntime import ORTQuantizer - from optimum.onnxruntime.configuration import AutoQuantizationConfig -except ImportError: - print(json.dumps({"error": "optimum[onnxruntime] not installed"})) - sys.exit(1) - -model_dir = sys.argv[1] -output_dir = sys.argv[2] - -try: - print(json.dumps({"status": "loading_quantizer"})) - quantizer = ORTQuantizer.from_pretrained(model_dir) - - print(json.dumps({"status": "configuring_quantization"})) - qconfig = AutoQuantizationConfig.avx512_vnni( - is_static=False, - per_channel=True - ) - - output_path = Path(output_dir) - output_path.mkdir(parents=True, exist_ok=True) - - print(json.dumps({"status": "quantizing"})) - quantizer.quantize(save_dir=output_path, quantization_config=qconfig) - - print(json.dumps({"status": "complete", "output_dir": str(output_path)})) -except Exception as e: - print(json.dumps({"error": str(e)})) - sys.exit(1) -` - - // Create optimize script. - const optimizeScript = `#!/usr/bin/env python3 -"""Optimize ONNX graphs for inference.""" -import sys -import json -from pathlib import Path - -try: - from onnxruntime.transformers.optimizer import optimize_model -except ImportError: - print(json.dumps({"error": "onnxruntime not installed"})) - sys.exit(1) - -model_path = sys.argv[1] -output_path = sys.argv[2] -num_heads = int(sys.argv[3]) -hidden_size = int(sys.argv[4]) - -try: - print(json.dumps({"status": "loading_model"})) - - print(json.dumps({"status": "optimizing"})) - optimized_model = optimize_model( - input=model_path, - model_type='bert', - num_heads=num_heads, - hidden_size=hidden_size, - optimization_options={ - 'enable_gelu_approximation': True, - 'enable_skip_layer_norm': True, - } - ) - - print(json.dumps({"status": "saving"})) - Path(output_path).parent.mkdir(parents=True, exist_ok=True) - optimized_model.save_model_to_file(output_path) - - print(json.dumps({"status": "complete", "output_path": output_path})) -except Exception as e: - print(json.dumps({"error": str(e)})) - sys.exit(1) -` - - // Create verify script. - const verifyScript = `#!/usr/bin/env python3 -"""Verify ONNX model inference.""" -import sys -import json -import numpy as np - -try: - import onnxruntime - from transformers import AutoTokenizer -except ImportError: - print(json.dumps({"error": "onnxruntime or transformers not installed"})) - sys.exit(1) - -model_path = sys.argv[1] -tokenizer_path = sys.argv[2] -test_text = sys.argv[3] if len(sys.argv) > 3 else "This is a test" - -try: - print(json.dumps({"status": "loading_session"})) - session = onnxruntime.InferenceSession(model_path) - - print(json.dumps({"status": "loading_tokenizer"})) - tokenizer = AutoTokenizer.from_pretrained(tokenizer_path) - - print(json.dumps({"status": "tokenizing"})) - inputs = tokenizer(test_text, return_tensors="np", padding=True, truncation=True) - - print(json.dumps({"status": "running_inference"})) - onnx_inputs = {k: v for k, v in inputs.items()} - outputs = session.run(None, onnx_inputs) - - output_shape = outputs[0].shape - output_mean = float(np.mean(outputs[0])) - output_std = float(np.std(outputs[0])) - - print(json.dumps({ - "status": "complete", - "test_text": test_text, - "output_shape": list(output_shape), - "output_mean": output_mean, - "output_std": output_std - })) -except Exception as e: - print(json.dumps({"error": str(e)})) - sys.exit(1) -` - - await fs.writeFile(path.join(PYTHON_DIR, 'download.py'), downloadScript) - await fs.writeFile(path.join(PYTHON_DIR, 'convert.py'), convertScript) - await fs.writeFile(path.join(PYTHON_DIR, 'quantize.py'), quantizeScript) - await fs.writeFile(path.join(PYTHON_DIR, 'optimize.py'), optimizeScript) - await fs.writeFile(path.join(PYTHON_DIR, 'verify.py'), verifyScript) -} - -/** - * Run Python script and parse JSON output. - */ -async function runPythonScript(scriptName, args, options = {}) { - const scriptPath = path.join(PYTHON_DIR, scriptName) - const command = `python3 "${scriptPath}" ${args.map(a => `"${a}"`).join(' ')}` - - const { stdout } = await exec(command, { - stdio: 'pipe', - ...options - }) - - // Parse JSON output from Python script. - const lines = stdout.split('\n').filter(Boolean) - const results = [] - - for (const line of lines) { - try { - const result = JSON.parse(line) - results.push(result) - - if (result.error) { - throw new Error(result.error) - } - - if (result.code && result.code !== 'complete') { - printStep(` ${result.code.replace(/_/g, ' ')}...`) - } - } catch (e) { - if (e.message.startsWith('{')) { - continue - } - throw e - } - } - - return results[results.length - 1] || {} -} - -/** - * Download models from Hugging Face. - */ -async function downloadModels() { - if (!(await shouldRun('minilm', 'downloaded', FORCE_BUILD))) { - return - } - - printHeader('Downloading Models from Hugging Face') - - await fs.mkdir(CACHE_DIR, { recursive: true }) - - for (const model of MODELS) { - printStep(`Model: ${model.name}`) - - try { - const modelCache = path.join(CACHE_DIR, model.outputName) - await runPythonScript('download.py', [model.name, modelCache]) - printSuccess(`Downloaded: ${model.name}`) - } catch (e) { - if (e.message.includes('transformers not installed')) { - printWarning('Python transformers library not installed') - printWarning('Install with: pip install transformers') - throw new Error('Missing Python dependencies') - } - throw e - } - } - - printSuccess('Model download complete') - await createCheckpoint('minilm', 'downloaded') -} - -/** - * Convert models to ONNX format. - */ -async function convertToOnnx() { - if (!(await shouldRun('minilm', 'converted', FORCE_BUILD))) { - return - } - - printHeader('Converting Models to ONNX') - - await fs.mkdir(MODELS_DIR, { recursive: true }) - - for (const model of MODELS) { - printStep(`Converting: ${model.name}`) - - try { - const modelCache = path.join(CACHE_DIR, model.outputName) - const modelOutput = path.join(MODELS_DIR, `${model.outputName}-onnx`) - - await runPythonScript('convert.py', [modelCache, modelOutput]) - printSuccess(`Converted: ${model.name}`) - } catch (e) { - if (e.message.includes('optimum')) { - printWarning('Python optimum library not installed') - printWarning('Install with: pip install optimum[onnxruntime]') - throw new Error('Missing Python dependencies') - } - throw e - } - } - - printSuccess('ONNX conversion complete') - await createCheckpoint('minilm', 'converted') -} - -/** - * Apply mixed-precision quantization. - */ -async function quantizeModels() { - if (!(await shouldRun('minilm', 'quantized', FORCE_BUILD))) { - return - } - - printHeader('Applying INT8 Quantization') - - for (const model of MODELS) { - printStep(`Quantizing: ${model.outputName}`) - - try { - const modelInput = path.join(MODELS_DIR, `${model.outputName}-onnx`) - const modelOutput = path.join(MODELS_DIR, `${model.outputName}-quantized`) - - const sizeBefore = await getFileSize(path.join(modelInput, 'model.onnx')) - printStep(` Size before: ${sizeBefore}`) - - await runPythonScript('quantize.py', [modelInput, modelOutput]) - - const sizeAfter = await getFileSize(path.join(modelOutput, 'model.onnx')) - printStep(` Size after: ${sizeAfter}`) - - printSuccess(`Quantized: ${model.outputName}`) - } catch (e) { - if (e.message.includes('optimum')) { - printWarning('Python optimum library not installed') - printWarning('Install with: pip install optimum[onnxruntime]') - throw new Error('Missing Python dependencies') - } - throw e - } - } - - printSuccess('Quantization complete') - await createCheckpoint('minilm', 'quantized') -} - -/** - * Optimize ONNX graphs. - */ -async function optimizeGraphs() { - if (!(await shouldRun('minilm', 'optimized', FORCE_BUILD))) { - return - } - - printHeader('Optimizing ONNX Graphs') - - for (const model of MODELS) { - printStep(`Optimizing: ${model.outputName}`) - - try { - const modelInput = path.join(MODELS_DIR, `${model.outputName}-quantized`, 'model.onnx') - const modelOutput = path.join(MODELS_DIR, `${model.outputName}.onnx`) - - await runPythonScript('optimize.py', [ - modelInput, - modelOutput, - String(model.numHeads), - String(model.hiddenSize) - ]) - - const finalSize = await getFileSize(modelOutput) - printStep(` Final size: ${finalSize}`) - - printSuccess(`Optimized: ${model.outputName}`) - } catch (e) { - if (e.message.includes('onnxruntime not installed')) { - printWarning('Python onnxruntime library not installed') - printWarning('Install with: pip install onnxruntime') - throw new Error('Missing Python dependencies') - } - throw e - } - } - - printSuccess('Graph optimization complete') - await createCheckpoint('minilm', 'optimized') -} - -/** - * Verify models work correctly. - */ -async function verifyModels() { - if (!(await shouldRun('minilm', 'verified', FORCE_BUILD))) { - return - } - - printHeader('Verifying Model Inference') - - for (const model of MODELS) { - printStep(`Verifying: ${model.outputName}`) - - try { - const modelPath = path.join(MODELS_DIR, `${model.outputName}.onnx`) - const tokenizerPath = path.join(MODELS_DIR, `${model.outputName}-quantized`) - const testText = 'This is a test' - - const result = await runPythonScript('verify.py', [ - modelPath, - tokenizerPath, - testText - ]) - - printStep(` Test: "${result.test_text}"`) - printStep(` Output shape: [${result.output_shape.join(', ')}]`) - printStep(` Mean: ${result.output_mean.toFixed(4)}, Std: ${result.output_std.toFixed(4)}`) - - printSuccess(`Verified: ${model.outputName}`) - } catch (e) { - if (e.message.includes('not installed')) { - printWarning('Missing Python dependencies') - printWarning('Install with: pip install onnxruntime transformers') - throw new Error('Missing Python dependencies') - } - throw e - } - } - - printSuccess('Model verification complete') - await createCheckpoint('minilm', 'verified') -} - -/** - * Export models to distribution location. - */ -async function exportModels() { - printHeader('Exporting Models') - - for (const model of MODELS) { - printStep(`Exporting: ${model.outputName}`) - - const modelPath = path.join(MODELS_DIR, `${model.outputName}.onnx`) - const tokenizerSrc = path.join(MODELS_DIR, `${model.outputName}-quantized`) - const tokenizerDst = path.join(MODELS_DIR, `${model.outputName}-tokenizer`) - - // Check if models exist. - const modelExists = await fs.access(modelPath).then(() => true).catch(() => false) - - if (!modelExists) { - printWarning(`Model not found: ${modelPath}`) - printWarning(`Run build to generate models`) - continue - } - - // Copy tokenizer files. - await fs.mkdir(tokenizerDst, { recursive: true }) - - const tokenizerFiles = ['tokenizer.json', 'tokenizer_config.json', 'special_tokens_map.json', 'vocab.txt'] - for (const file of tokenizerFiles) { - const src = path.join(tokenizerSrc, file) - const dst = path.join(tokenizerDst, file) - - if (await fs.access(src).then(() => true).catch(() => false)) { - await fs.copyFile(src, dst) - } - } - - const modelSize = await getFileSize(modelPath) - printStep(` Model: ${modelSize}`) - printStep(` Location: ${modelPath}`) - } - - printSuccess('Export complete') -} - -/** - * Main build function. - */ -async function main() { - const totalStart = Date.now() - - printHeader('🤖 Building minilm models') - const logger = getDefaultLogger() - logger.info('MiniLM model conversion and optimization') - logger.info('') - - // Pre-flight checks. - printHeader('Pre-flight Checks') - - const diskOk = await checkDiskSpace(BUILD_DIR, 1 * 1024 * 1024 * 1024) - if (!diskOk) { - printWarning('Could not check disk space') - } - - const pythonOk = await checkPythonVersion('3.8') - if (!pythonOk) { - printError('Python 3.8+ required') - printError('Install Python from: https://www.python.org/downloads/') - throw new Error('Python 3.8+ required') - } - - printSuccess('Pre-flight checks passed') - - // Setup Python scripts. - await setupPythonScripts() - - // Build phases. - await downloadModels() - await convertToOnnx() - await quantizeModels() - await optimizeGraphs() - await verifyModels() - await exportModels() - - // Report completion. - const totalDuration = formatDuration(Date.now() - totalStart) - - printHeader('🎉 Build Complete!') - logger.success(`Total time: ${totalDuration}`) - logger.success(`Output: ${MODELS_DIR}`) - logger.info('') - logger.info('Models ready for use:') - for (const model of MODELS) { - logger.info(` - ${model.outputName}.onnx`) - logger.info(` - ${model.outputName}-tokenizer/`) - } - logger.info('') -} - -// Run build. -main().catch((e) => { - printError('Build Failed') - logger.error(e.message) - throw e -}) diff --git a/packages/minilm-builder/scripts/clean.mjs b/packages/minilm-builder/scripts/clean.mjs deleted file mode 100755 index 092d1db93..000000000 --- a/packages/minilm-builder/scripts/clean.mjs +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env node -/** - * MiniLM Model Builder Cleanup - * - * Removes build artifacts and cached files. - * - * Usage: - * node scripts/clean.mjs - */ - -import { existsSync, promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import colors from 'yoctocolors-cjs' - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const packageDir = path.join(__dirname, '..') - -/** - * Main entry point. - */ -async function main() { - const logger = getDefaultLogger() - logger.log('🧹 Cleaning MiniLM Builder') - logger.log('='.repeat(50)) - - const buildDir = path.join(packageDir, 'build') - - if (existsSync(buildDir)) { - logger.log(`\nRemoving: ${buildDir}`) - await fs.rm(buildDir, { recursive: true, force: true }) - logger.log('✓ Build directory removed') - } else { - logger.log('\n✓ Nothing to clean') - } - - logger.log(`\n${colors.green('✓')} Clean complete!`) -} - -main().catch(error => { - logger.error('\n✗ Clean failed:', error.message) - process.exit(1) -}) diff --git a/packages/models/package.json b/packages/models/package.json deleted file mode 100644 index b67b843c0..000000000 --- a/packages/models/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "@socketsecurity/models", - "version": "1.0.0", - "description": "AI models for Socket CLI (MiniLM-L6, CodeT5)", - "type": "module", - "private": true, - "exports": { - "./dist/minilm-l6.onnx": "./dist/minilm-l6.onnx", - "./dist/minilm-l6-tokenizer.json": "./dist/minilm-l6-tokenizer.json", - "./dist/codet5-encoder.onnx": "./dist/codet5-encoder.onnx", - "./dist/codet5-decoder.onnx": "./dist/codet5-decoder.onnx", - "./dist/codet5-tokenizer.json": "./dist/codet5-tokenizer.json" - }, - "scripts": { - "build": "node scripts/build.mjs", - "build:minilm": "node scripts/build.mjs --minilm", - "build:codet5": "node scripts/build.mjs --codet5", - "build:force": "node scripts/build.mjs --force", - "clean": "del-cli dist build" - }, - "dependencies": { - "@socketsecurity/build-infra": "workspace:*", - "@socketsecurity/lib": "workspace:*" - } -} diff --git a/packages/models/scripts/build.mjs b/packages/models/scripts/build.mjs deleted file mode 100644 index 1ce0830e5..000000000 --- a/packages/models/scripts/build.mjs +++ /dev/null @@ -1,409 +0,0 @@ -#!/usr/bin/env node - -/** - * Build script for @socketsecurity/models. - * - * Downloads AI models from Hugging Face, converts to ONNX, and applies quantization. - * - * Workflow: - * 1. Download models from Hugging Face (with fallbacks) - * 2. Convert to ONNX if needed - * 3. Apply quantization (INT4 or INT8) for compression - * 4. Output quantized ONNX models - * - * Options: - * --int8 Use INT8 quantization (better compatibility, ~50% size reduction) - * --int4 Use INT4 quantization (maximum compression, ~75% size reduction, default) - * --minilm Build MiniLM-L6 model only - * --codet5 Build CodeT5 model only - * --all Build all models - * --force Force rebuild even if checkpoints exist - * --clean Clean all checkpoints before building - */ - -import { existsSync } from 'node:fs' -import { copyFile, mkdir, readFile, writeFile } from 'node:fs/promises' -import { dirname, join } from 'node:path' -import { fileURLToPath } from 'node:url' -import { exec } from 'node:child_process' -import { promisify } from 'node:util' - -import { getDefaultLogger } from '@socketsecurity/lib/logger' - -import { - cleanCheckpoint, - createCheckpoint, - getCheckpointData, - shouldRun, -} from '@socketsecurity/build-infra/lib/checkpoint-manager' - -const execAsync = promisify(exec) - -// Check if running in CI. -const IS_CI = !!( - process.env['CI'] || - process.env['GITHUB_ACTIONS'] || - process.env['GITLAB_CI'] || - process.env['CIRCLECI'] -) - -// Parse arguments. -const args = process.argv.slice(2) -const FORCE_BUILD = args.includes('--force') -const CLEAN_BUILD = args.includes('--clean') -const NO_SELF_UPDATE = args.includes('--no-self-update') - -// Model selection flags. -const BUILD_MINILM = args.includes('--all') || args.includes('--minilm') || !args.includes('--codet5') -const BUILD_CODET5 = args.includes('--all') || args.includes('--codet5') - -// Quantization level (default: INT4 for maximum compression). -const QUANT_LEVEL = args.includes('--int8') ? 'INT8' : 'INT4' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = dirname(__filename) -const ROOT = join(__dirname, '..') -const DIST = join(ROOT, 'dist') -const BUILD = join(ROOT, 'build') -const MODELS = join(BUILD, 'models') -const PACKAGE_NAME = 'models' - -// Model sources (with fallbacks and versions). -const MODEL_SOURCES = { - // MiniLM-L6 for embeddings (primary model). - 'minilm-l6': { - primary: 'sentence-transformers/all-MiniLM-L6-v2', - // Pin to specific revision for reproducible builds. - revision: '7dbbc90392e2f80f3d3c277d6e90027e55de9125', - fallbacks: [ - 'microsoft/all-MiniLM-L6-v2', - 'optimum/all-MiniLM-L6-v2' - ], - files: ['model.onnx', 'tokenizer.json'], - task: 'feature-extraction' - }, - // CodeT5 for code analysis. - 'codet5': { - primary: 'Salesforce/codet5-base', - revision: 'main', - fallbacks: [ - 'Salesforce/codet5-small' - ], - files: ['encoder_model.onnx', 'decoder_model.onnx', 'tokenizer.json'], - task: 'text2text-generation' - } -} - -/** - * Download model from Hugging Face. - */ -async function downloadModel(modelKey) { - if (!(await shouldRun(PACKAGE_NAME, `downloaded-${modelKey}`, FORCE_BUILD))) { - return - } - - const logger = getDefaultLogger() - logger.step(`Downloading ${modelKey} model`) - - const config = MODEL_SOURCES[modelKey] - const sources = [config.primary, ...config.fallbacks] - const revision = config.revision - - for (const source of sources) { - try { - logger.substep(`Trying: ${source}@${revision}`) - - await mkdir(MODELS, { recursive: true }) - - // Download using huggingface-cli (fastest) or fallback to Python. - try { - // Try huggingface-cli first. - const revisionFlag = revision ? `--revision=${revision}` : '' - await execAsync( - `huggingface-cli download ${source} ${revisionFlag} --local-dir ${MODELS}/${modelKey}`, - { stdio: 'inherit' } - ) - logger.success(`Downloaded from ${source}`) - await createCheckpoint(PACKAGE_NAME, `downloaded-${modelKey}`, { - source, - revision, - modelKey, - }) - return - } catch { - // Fallback to Python transformers. - const revisionParam = revision ? `, revision='${revision}'` : '' - await execAsync( - `python3 -c "from transformers import AutoTokenizer, AutoModel; ` + - `tokenizer = AutoTokenizer.from_pretrained('${source}'${revisionParam}); ` + - `model = AutoModel.from_pretrained('${source}'${revisionParam}); ` + - `tokenizer.save_pretrained('${MODELS}/${modelKey}'); ` + - `model.save_pretrained('${MODELS}/${modelKey}')"` - ) - logger.success(`Downloaded from ${source}`) - await createCheckpoint(PACKAGE_NAME, `downloaded-${modelKey}`, { - source, - revision, - modelKey, - }) - return - } - } catch (e) { - logger.error(`Failed: ${source}`) - // Continue to next fallback. - } - } - - throw new Error(`Failed to download ${modelKey} from all sources`) -} - -/** - * Convert model to ONNX if needed. - */ -async function convertToOnnx(modelKey) { - if (!(await shouldRun(PACKAGE_NAME, `converted-${modelKey}`, FORCE_BUILD))) { - return - } - - logger.step(`Converting ${modelKey} to ONNX`) - - const config = MODEL_SOURCES[modelKey] - const modelDir = join(MODELS, modelKey) - - // Check for expected ONNX files based on model type. - const expectedFiles = config.files.filter(f => f.endsWith('.onnx')) - const allExist = expectedFiles.every(f => existsSync(join(modelDir, f))) - - if (allExist) { - logger.success('Already in ONNX format') - await createCheckpoint(PACKAGE_NAME, `converted-${modelKey}`, { modelKey }) - return - } - - // Convert using optimum-cli with task specified. - try { - await execAsync( - `python3 -m optimum.exporters.onnx --model ${modelDir} --task ${config.task} ${modelDir}`, - { stdio: 'inherit' } - ) - logger.success('Converted to ONNX') - await createCheckpoint(PACKAGE_NAME, `converted-${modelKey}`, { modelKey }) - } catch (e) { - logger.error(`Conversion failed: ${e.message}`) - throw e - } -} - -/** - * Apply quantization for compression. - * - * Supports two quantization levels: - * - INT4: MatMulNBitsQuantizer with RTN weight-only quantization (maximum compression). - * - INT8: Dynamic quantization (better compatibility, moderate compression). - * - * Results in significant size reduction with minimal accuracy loss. - */ -async function quantizeModel(modelKey, quantLevel) { - const suffix = quantLevel.toLowerCase() - const checkpointKey = `quantized-${modelKey}-${suffix}` - - if (!(await shouldRun(PACKAGE_NAME, checkpointKey, FORCE_BUILD))) { - // Return existing quantized paths. - const modelDir = join(MODELS, modelKey) - if (modelKey === 'codet5') { - return [ - join(modelDir, `encoder_model.${suffix}.onnx`), - join(modelDir, `decoder_model.${suffix}.onnx`) - ] - } - return [join(modelDir, `model.${suffix}.onnx`)] - } - - logger.step(`Applying ${quantLevel} quantization to ${modelKey}`) - - const modelDir = join(MODELS, modelKey) - - // Different files for codet5 (encoder/decoder) vs minilm (single model). - const models = modelKey === 'codet5' - ? [ - { input: 'encoder_model.onnx', output: `encoder_model.${suffix}.onnx` }, - { input: 'decoder_model.onnx', output: `decoder_model.${suffix}.onnx` } - ] - : [{ input: 'model.onnx', output: `model.${suffix}.onnx` }] - - const quantizedPaths = [] - let method = quantLevel - - for (const { input, output } of models) { - const onnxPath = join(modelDir, input) - const quantPath = join(modelDir, output) - - if (!existsSync(onnxPath)) { - logger.warn(`No ONNX model found at ${onnxPath}, skipping`) - continue - } - - let originalSize - let quantSize - - try { - if (quantLevel === 'INT8') { - // INT8: Use dynamic quantization (simpler, more compatible). - await execAsync( - `python3 -c "` + - `from onnxruntime.quantization import quantize_dynamic, QuantType; ` + - `quantize_dynamic('${onnxPath}', '${quantPath}', weight_type=QuantType.QUInt8)` + - `"`, - { stdio: 'inherit' } - ) - } else { - // INT4: Use MatMulNBitsQuantizer (maximum compression). - await execAsync( - `python3 -c "` + - `from onnxruntime.quantization.matmul_nbits_quantizer import MatMulNBitsQuantizer, RTNWeightOnlyQuantConfig; ` + - `from onnxruntime.quantization import quant_utils; ` + - `from pathlib import Path; ` + - `quant_config = RTNWeightOnlyQuantConfig(); ` + - `model = quant_utils.load_model_with_shape_infer(Path('${onnxPath}')); ` + - `quant = MatMulNBitsQuantizer(model, algo_config=quant_config); ` + - `quant.process(); ` + - `quant.model.save_model_to_file('${quantPath}', True)` + - `"`, - { stdio: 'inherit' } - ) - } - - // Get sizes. - originalSize = (await readFile(onnxPath)).length - quantSize = (await readFile(quantPath)).length - const savings = ((1 - quantSize / originalSize) * 100).toFixed(1) - - logger.substep(`${input}: ${(originalSize / 1024 / 1024).toFixed(2)} MB → ${(quantSize / 1024 / 1024).toFixed(2)} MB (${savings}% savings)`) - } catch (e) { - logger.warn(`${quantLevel} quantization failed for ${input}, using FP32 model: ${e.message}`) - // Copy the original ONNX model as the "quantized" version. - await copyFile(onnxPath, quantPath) - method = 'FP32' - originalSize = (await readFile(onnxPath)).length - quantSize = originalSize - } - - quantizedPaths.push(quantPath) - } - - logger.success(`Quantized to ${method}`) - await createCheckpoint(PACKAGE_NAME, checkpointKey, { - modelKey, - method, - quantLevel, - }) - - return quantizedPaths -} - -/** - * Copy quantized models and tokenizers to dist. - */ -async function copyToDist(modelKey, quantizedPaths, quantLevel) { - logger.step('Copying models to dist') - - await mkdir(DIST, { recursive: true }) - - const modelDir = join(MODELS, modelKey) - const suffix = quantLevel.toLowerCase() - - if (modelKey === 'codet5') { - // CodeT5: encoder, decoder, tokenizer. - await copyFile(quantizedPaths[0], join(DIST, `codet5-encoder-${suffix}.onnx`)) - await copyFile(quantizedPaths[1], join(DIST, `codet5-decoder-${suffix}.onnx`)) - await copyFile(join(modelDir, 'tokenizer.json'), join(DIST, 'codet5-tokenizer.json')) - - logger.success(`Copied codet5 models (${quantLevel}) to dist/`) - } else { - // MiniLM: single model + tokenizer. - await copyFile(quantizedPaths[0], join(DIST, `minilm-l6-${suffix}.onnx`)) - await copyFile(join(modelDir, 'tokenizer.json'), join(DIST, 'minilm-l6-tokenizer.json')) - - logger.success(`Copied minilm-l6 model (${quantLevel}) to dist/`) - } -} - -/** - * Main build. - */ -async function main() { - logger.info('Building @socketsecurity/models') - logger.info('='.repeat(60)) - logger.info(`Quantization: ${QUANT_LEVEL}`) - logger.info('') - - const startTime = Date.now() - - const suffix = QUANT_LEVEL.toLowerCase() - - // Clean checkpoints if requested or if output is missing. - const outputMissing = !existsSync(join(DIST, `minilm-l6-${suffix}.onnx`)) && !existsSync(join(DIST, `codet5-encoder-${suffix}.onnx`)) - - if (CLEAN_BUILD || outputMissing) { - if (outputMissing) { - logger.step('Output artifacts missing - cleaning stale checkpoints') - } - await cleanCheckpoint(PACKAGE_NAME) - } - - // Create directories. - await mkdir(DIST, { recursive: true }) - await mkdir(BUILD, { recursive: true }) - - try { - // Build MiniLM-L6 if requested. - if (BUILD_MINILM) { - logger.info('') - logger.info('Building MiniLM-L6...') - logger.info('-'.repeat(60)) - - await downloadModel('minilm-l6') - await convertToOnnx('minilm-l6') - const quantizedPaths = await quantizeModel('minilm-l6', QUANT_LEVEL) - await copyToDist('minilm-l6', quantizedPaths, QUANT_LEVEL) - } - - // Build CodeT5 if requested. - if (BUILD_CODET5) { - logger.info('') - logger.info('Building CodeT5...') - logger.info('-'.repeat(60)) - - await downloadModel('codet5') - await convertToOnnx('codet5') - const quantizedPaths = await quantizeModel('codet5', QUANT_LEVEL) - await copyToDist('codet5', quantizedPaths, QUANT_LEVEL) - } - - const duration = ((Date.now() - startTime) / 1000).toFixed(1) - - logger.info('') - logger.info('='.repeat(60)) - logger.success('Build complete!') - logger.info('') - logger.substep(`Duration: ${duration}s`) - logger.info('') - logger.substep(`Output: ${DIST}`) - - if (BUILD_MINILM) { - logger.substep(` - minilm-l6-${suffix}.onnx (${QUANT_LEVEL} quantized)`) - logger.substep(' - minilm-l6-tokenizer.json') - } - if (BUILD_CODET5) { - logger.substep(` - codet5-encoder-${suffix}.onnx (${QUANT_LEVEL} quantized)`) - logger.substep(` - codet5-decoder-${suffix}.onnx (${QUANT_LEVEL} quantized)`) - logger.substep(' - codet5-tokenizer.json') - } - } catch (error) { - logger.info('') - logger.error(`Build failed: ${error.message}`) - process.exit(1) - } -} - -main() diff --git a/packages/node-smol-builder/.gitignore b/packages/node-smol-builder/.gitignore deleted file mode 100644 index 04cdd4836..000000000 --- a/packages/node-smol-builder/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# WASM build artifacts. -wasm-bundle/pkg/ -wasm-bundle/target/ diff --git a/packages/node-smol-builder/README.md b/packages/node-smol-builder/README.md deleted file mode 100644 index a6d45dff2..000000000 --- a/packages/node-smol-builder/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# @socketbin/node-smol-builder-builder - -Custom Node.js binary builder with Socket security patches. - -**This is a private package used for building Socket CLI binaries.** - -## What It Does - -Builds a custom Node.js v24.10.0 binary from source with: -- Socket security patches -- Brotli compression support -- SEA (Single Executable Application) support -- Bootstrap integration - -## Building - -### Standard Build - -```bash -cd packages/node-smol-builder -node scripts/build.mjs -``` - -### Build without Compression (Opt-out) - -Compression is **enabled by default** (it's called "smol" for a reason! 😄) - -```bash -cd packages/node-smol-builder -node scripts/build.mjs # Default: WITH compression (33 MB → 13 MB) -COMPRESS_BINARY=0 node scripts/build.mjs # Opt-out: WITHOUT compression (33 MB) -``` - -The build process: -1. Downloads Node.js v24.10.0 source -2. Applies Socket security patches from `patches/` -3. Configures and compiles Node.js with size optimizations -4. Copies bootstrap code to internal modules -5. Strips debug symbols (44 MB → 23-27 MB) -6. Signs the binary (macOS ARM64) -7. **Default:** Compresses binary (23-27 MB → 10-12 MB) -8. **Default:** Bundles platform-specific decompression tool - -## Output - -**Default build (with compression):** -- `build/out/Release/node` - Unstripped binary (44 MB) -- `build/out/Stripped/node` - Stripped binary (23-27 MB) -- `build/out/Signed/node` - Stripped + signed (macOS ARM64) -- `build/out/Final/node` - Final binary for distribution (23-27 MB) -- `build/out/Compressed/node` - **Compressed binary (10-12 MB)** ← Default output -- `build/out/Compressed/socket_*_decompress` - Decompression tool (~90 KB) -- `dist/socket-smol` - E2E test binary (copy of Compressed) - -**Without compression (`COMPRESS_BINARY=0`):** -- Same as above, but skips Compressed directory -- `dist/socket-smol` - E2E test binary (copy of Final, uncompressed) - -## Platform Support - -Currently builds for the host platform only. Cross-compilation not yet supported. - -## License - -MIT diff --git a/packages/node-smol-builder/additions/.gitignore b/packages/node-smol-builder/additions/.gitignore deleted file mode 100644 index 7c2831fc1..000000000 --- a/packages/node-smol-builder/additions/.gitignore +++ /dev/null @@ -1 +0,0 @@ -002-bootstrap-loader/internal/socketsecurity_bootstrap_loader.js diff --git a/packages/node-smol-builder/additions/001-brotli-integration/socketsecurity_brotli_builtin_loader.h b/packages/node-smol-builder/additions/001-brotli-integration/socketsecurity_brotli_builtin_loader.h deleted file mode 100644 index 4c6aabb3e..000000000 --- a/packages/node-smol-builder/additions/001-brotli-integration/socketsecurity_brotli_builtin_loader.h +++ /dev/null @@ -1,180 +0,0 @@ -/** - * Socket Security - Minimal-Touch Brotli Builtin Loader - * - * @file socketsecurity_brotli_builtin_loader.h - * @brief External header for Node.js builtin Brotli decompression - * @version 1.0.0 - * @date 2025-01-17 - * - * OVERVIEW - * This header provides transparent Brotli decompression for Node.js JavaScript - * builtin modules. It acts as a drop-in replacement for the standard builtin - * loading path, automatically detecting and decompressing Brotli-compressed - * modules while falling back to standard loading for uncompressed modules. - * - * DESIGN GOALS - * 1. Minimal Node.js source modification (only 10 lines across 2 files) - * 2. All decompression logic in external, maintainable code - * 3. Zero runtime overhead for uncompressed modules - * 4. Safe fallback on any decompression failure - * 5. No undefined behavior or platform-specific hacks - * - * INTEGRATION - * This header is included by src/node_builtins.cc and requires: - * - * 1. Friend declaration in src/node_union_bytes.h: - * friend struct socketsecurity::builtins::UnionBytesAccessor; - * - * 2. Call site modification in src/node_builtins.cc: - * - return source.ToStringChecked(isolate); - * + return socketsecurity::builtins::LoadBuiltinSourceWithBrotli(isolate, id, source); - * - * COMPRESSION FORMAT - * Brotli-compressed builtins use a 12-byte header: - * - * Offset | Size | Description - * -------|------|------------------------------------------ - * 0 | 4 | Magic marker: "BROT" (0x42, 0x52, 0x4F, 0x54) - * 4 | 8 | Decompressed size (little-endian uint64_t) - * 12 | N | Brotli-compressed JavaScript data - * - * BINARY SIZE SAVINGS - * Expected savings with Brotli compression: - * - JavaScript builtins: ~30MB → ~5MB (83% reduction) - * - Total binary impact: 60MB → 35MB baseline - * - With minification + Brotli: 60MB → 20MB (67% total reduction) - * - * LICENSE - * Copyright (c) 2025 Socket Security - * SPDX-License-Identifier: MIT - */ - -#ifndef SOCKETSECURITY_BROTLI_BUILTIN_LOADER_H_ -#define SOCKETSECURITY_BROTLI_BUILTIN_LOADER_H_ - -#include "node_union_bytes.h" -#include "v8.h" -#include -#include -#include -#include - -namespace socketsecurity { -namespace builtins { - -// Magic marker identifying Brotli-compressed builtin modules. -constexpr const char BROTLI_MAGIC[4] = {'B', 'R', 'O', 'T'}; - -// Size of the Brotli compression header (4 bytes magic + 8 bytes size). -constexpr size_t BROTLI_HEADER_SIZE = 12; - -// Maximum decompressed size for a single builtin module (50MB sanity check). -constexpr uint64_t MAX_DECOMPRESSED_SIZE = 50ULL * 1024 * 1024; - -/** - * Accessor struct for UnionBytes private members. - * Requires friend declaration in src/node_union_bytes.h. - */ -struct UnionBytesAccessor { - static const uint8_t* GetData(const node::UnionBytes& source, size_t* out_size) { - // Only handle one-byte (ASCII/UTF-8) strings. - if (!source.is_one_byte() || !source.one_byte_resource_) { - *out_size = 0; - return nullptr; - } - - // Access private member via friend declaration. - auto* resource = source.one_byte_resource_; - *out_size = resource->length(); - return reinterpret_cast(resource->data()); - } -}; - -/** - * Load a Node.js builtin source with optional Brotli decompression. - * - * Drop-in replacement for UnionBytes::ToStringChecked() with transparent - * Brotli decompression support. - * - * ALGORITHM: - * 1. Extract raw bytes from UnionBytes (via friend accessor) - * 2. Check for Brotli magic marker (fast rejection for uncompressed) - * 3. Validate decompressed size - * 4. Decompress with Brotli - * 5. Create V8 string from decompressed JavaScript - * 6. Fall back to standard loading on any error - * - * @param isolate V8 isolate for string creation - * @param id Builtin module identifier (for debugging) - * @param source UnionBytes containing potentially compressed JavaScript - * @return V8 string containing decompressed JavaScript - */ -inline v8::MaybeLocal LoadBuiltinSourceWithBrotli( - v8::Isolate* isolate, - const char* id, - const node::UnionBytes& source) { - - // Step 1: Extract raw bytes from UnionBytes. - size_t data_size; - const uint8_t* data = UnionBytesAccessor::GetData(source, &data_size); - - // Fallback: If we can't access the data, use standard loading. - if (!data || data_size == 0) { - return source.ToStringChecked(isolate); - } - - // Step 2: Check for Brotli compression marker (fast path for uncompressed). - if (data_size < BROTLI_HEADER_SIZE || - std::memcmp(data, BROTLI_MAGIC, 4) != 0) { - return source.ToStringChecked(isolate); - } - - // Step 3: Read and validate decompressed size. - uint64_t decompressed_size; - std::memcpy(&decompressed_size, data + 4, 8); - - if (decompressed_size == 0 || decompressed_size > MAX_DECOMPRESSED_SIZE) { - return source.ToStringChecked(isolate); - } - - // Step 4: Prepare for decompression. - const uint8_t* compressed = data + BROTLI_HEADER_SIZE; - size_t compressed_size = data_size - BROTLI_HEADER_SIZE; - - // Allocate decompression buffer. - auto decompressed = std::make_unique(decompressed_size); - - // Step 5: Decompress with Brotli. - size_t actual_size = decompressed_size; - BrotliDecoderResult result = BrotliDecoderDecompress( - compressed_size, - compressed, - &actual_size, - decompressed.get() - ); - - // Validate decompression result. - if (result != BROTLI_DECODER_RESULT_SUCCESS || - actual_size != decompressed_size) { - return source.ToStringChecked(isolate); - } - - // Step 6: Create V8 string from decompressed JavaScript. - auto maybe_string = v8::String::NewFromOneByte( - isolate, - decompressed.get(), - v8::NewStringType::kNormal, - static_cast(actual_size) - ); - - if (maybe_string.IsEmpty()) { - return source.ToStringChecked(isolate); - } - - return maybe_string; -} - -} // namespace builtins -} // namespace socketsecurity - -#endif // SOCKET_BROTLI_BUILTIN_LOADER_H_ diff --git a/packages/node-smol-builder/additions/002-bootstrap-loader/internal/socketsecurity_bootstrap_loader.js.template b/packages/node-smol-builder/additions/002-bootstrap-loader/internal/socketsecurity_bootstrap_loader.js.template deleted file mode 100644 index 942659ed8..000000000 --- a/packages/node-smol-builder/additions/002-bootstrap-loader/internal/socketsecurity_bootstrap_loader.js.template +++ /dev/null @@ -1,248 +0,0 @@ -/** - * Socket Security Bootstrap Loader - * - * ============================================================================ - * CRITICAL: This module MUST be called early in Node.js pre-execution phase - * ============================================================================ - * - * PURPOSE (What): - * --------------- - * Loads and executes Socket CLI security bootstrap code at Node.js startup. - * The bootstrap monitors package installations, network requests, and file - * operations to detect suspicious activity. - * - * WHY IT EXISTS (Why): - * -------------------- - * 1. EARLY LOADING: Must run before user code to intercept module loading - * 2. ZERO FILESYSTEM: Bootstrap embedded as base64 - no file I/O needed - * 3. ASYNC SUPPORT: Allows background security monitoring without blocking startup - * 4. ISOLATED CONTEXT: Runs in separate VM context to avoid polluting globals - * - * HOW IT WORKS (Visual Flow): - * --------------------------- - * - * Node.js Startup - * │ - * ├─→ lib/internal/process/pre_execution.js - * │ └─→ loadPreloadModules() - * │ └─→ require('internal/socketsecurity_bootstrap_loader')() ← 1-line injection - * │ │ - * │ ├─→ [THIS FILE] - * │ │ ├─→ Decode base64 bootstrap - * │ │ ├─→ Create module context - * │ │ ├─→ Compile with vm.compileFunction() - * │ │ └─→ Execute (async background) - * │ │ - * │ └─→ Returns immediately - * │ - * └─→ User code starts - * (Bootstrap monitors in background) - * - * PERFORMANCE IMPACT: - * ------------------- - * - Decode: ~1-2ms (base64 → string) - * - Compile: ~3-5ms (vm.compileFunction C++ API) - * - Execute: Instant return (async code runs in background) - * - Total: <10ms added to Node.js startup - * - * TECHNICAL DETAILS: - * ------------------ - * 1. Uses vm.compileFunction() (C++ API) instead of eval or vm.runInThisContext() - * - Faster compilation - * - Proper stack traces - * - Module parameter injection (exports, require, module, __filename, __dirname) - * - * 2. Module context created manually: - * - filename: '/internal/bootstrap-smol.js' (virtual path, validation only) - * - paths: Standard node_modules resolution - * - require: Full require() with resolve, cache, extensions - * - * 3. Async execution pattern: - * - Bootstrap can use main().catch(...) pattern - * - Starts executing but returns immediately - * - Background monitoring continues while Node.js initializes - * - * ERROR HANDLING: - * --------------- - * - Catches all errors during load/compile/execute - * - Prints to stderr (console not available this early) - * - Never crashes Node.js (bootstrap failures are non-fatal) - * - * VISUAL EXAMPLE: - * --------------- - * - * Base64 Embedded Bootstrap - * ┌──────────────────────────┐ - * │ ZnVuY3Rpb24gbWFpbigpIHs │ ← Build system embeds here - * │ ...21,000+ lines... │ - * └──────────────────────────┘ - * │ - * ├─→ Buffer.from(base64) → JavaScript source - * │ - * ├─→ vm.compileFunction(source, params) → Compiled function - * │ - * └─→ compiledFn(exports, require, module, ...) → Execute - * │ - * ├─→ Synchronous setup code runs immediately - * └─→ main().catch(...) starts, returns immediately - * │ - * └─→ Async monitoring continues in background - * - * MAINTENANCE NOTES: - * ------------------ - * - This file is processed by build.mjs during compilation - * - SOCKET_BOOTSTRAP_BASE64_PLACEHOLDER replaced with actual base64 - * - Final file copied to Node.js source: lib/internal/socketsecurity_bootstrap_loader.js - * - Patch injects 1 line: require('internal/socketsecurity_bootstrap_loader')() - * - * SECURITY CONSIDERATIONS: - * ------------------------ - * - Bootstrap runs with full Node.js internal access (can require internal modules) - * - No filesystem or network access during load (embedded base64) - * - Errors isolated (won't crash Node.js) - * - Module context prevents global pollution - * - * @module internal/socketsecurity_bootstrap_loader - * @requires internal/modules/cjs/loader - Module system access - * @requires internal/modules/helpers - makeRequireFunction() - * @requires vm - Compilation API - * @requires buffer - Base64 decoding - */ - -'use strict'; - -/** - * Load and execute the Socket bootstrap. - * - * Called from lib/internal/process/pre_execution.js during Node.js startup. - * This function MUST return quickly to avoid blocking Node.js initialization. - * - * EXECUTION FLOW: - * --------------- - * 1. Decode base64 → JavaScript source code - * 2. Create module context (exports, require, module, __filename, __dirname) - * 3. Compile source with vm.compileFunction() (C++ API, fast!) - * 4. Execute compiled function (synchronous setup + async background) - * 5. Return immediately (async code continues in background) - * - * ERROR BEHAVIOR: - * --------------- - * - Any error during load/compile/execute is caught - * - Error printed to stderr (console not available) - * - Node.js continues initialization (non-fatal) - * - User code runs normally (bootstrap disabled) - * - * @returns {void} - * @throws {never} All errors caught and logged to stderr - */ -module.exports = function loadSocketBootstrap() { - // Bootstrap code embedded as base64 (build system replaces this placeholder). - // Split across multiple lines for readability and to avoid line length limits. - // - // PLACEHOLDER REPLACEMENT: - // ├─→ Build system (build.mjs) reads bootstrap source - // ├─→ Encodes as base64 (1275KB → 1700KB) - // ├─→ Splits into 80-char chunks - // └─→ Replaces SOCKET_BOOTSTRAP_BASE64_PLACEHOLDER - const SOCKET_BOOTSTRAP_B64 = ( - SOCKET_BOOTSTRAP_BASE64_PLACEHOLDER - ); - - try { - // STEP 1: Load Node.js internals. - // -------------------------------- - // These modules MUST be available during pre-execution phase. - // If any are missing, the catch block will handle gracefully. - const Module = require('internal/modules/cjs/loader').Module; - const { makeRequireFunction } = require('internal/modules/helpers'); - const vm = require('vm'); - const { Buffer } = require('buffer'); - - // STEP 2: Decode bootstrap from base64. - // -------------------------------------- - // Performance: ~1-2ms for 1700KB base64 → 1275KB JavaScript - // Result: Plain JavaScript source code ready for compilation - const bootstrapCode = Buffer.from(SOCKET_BOOTSTRAP_B64, 'base64').toString('utf8'); - - // STEP 3: Create module context. - // ------------------------------- - // This gives the bootstrap access to require(), module.exports, etc. - // - // CRITICAL: filename MUST be absolute path format for validation. - // - Module.createRequire() validates filename is absolute - // - Doesn't need to exist as real file, just valid path format - // - Using '/internal/bootstrap-smol.js' (Node.js internal path style) - const bootstrapModule = new Module('socket:bootstrap', null); - bootstrapModule.filename = '/internal/bootstrap-smol.js'; // Virtual path (validation only) - bootstrapModule.paths = Module._nodeModulePaths(process.cwd()); // Standard resolution - const exports = {}; - bootstrapModule.exports = exports; - - // STEP 4: Create require function. - // --------------------------------- - // makeRequireFunction() adds: - // - require.resolve() - // - require.cache - // - require.extensions - // - require.main - const moduleRequire = makeRequireFunction(bootstrapModule); - - // STEP 5: Compile using C++ API. - // ------------------------------- - // vm.compileFunction() is FASTER than: - // - eval() (no stack traces, security issues) - // - vm.runInThisContext() (slower, no parameter injection) - // - Module.wrap() + compilation (extra wrapping overhead) - // - // Parameters: ['exports', 'require', 'module', '__filename', '__dirname'] - // These match standard CommonJS module parameters. - const compiledFn = vm.compileFunction( - bootstrapCode, - ['exports', 'require', 'module', '__filename', '__dirname'], - { - filename: '/internal/bootstrap-smol.js', // For stack traces - lineOffset: 0, // Source starts at line 0 - columnOffset: 0, // Source starts at column 0 - } - ); - - // STEP 6: Execute with module context. - // ------------------------------------- - // Reflect.apply() provides clean invocation: - // - thisArg: exports (standard CommonJS) - // - args: [exports, require, module, __filename, __dirname] - // - // ASYNC BEHAVIOR: - // - If bootstrap has main().catch(...), it starts executing - // - Function returns immediately (doesn't wait for async) - // - Async operations continue in background - // - Node.js initialization proceeds normally - Reflect.apply(compiledFn, exports, [ - exports, // exports object - moduleRequire, // require() with resolve, cache, etc. - bootstrapModule, // module object - '/internal/bootstrap-smol.js', // __filename (virtual) - '/internal' // __dirname (virtual) - ]); - - // Returns here immediately, even if bootstrap has async code! - // Background monitoring continues while Node.js initializes. - - } catch (err) { - // ERROR HANDLING: - // --------------- - // - console.* not available this early in bootstrap - // - Use process.stderr.write() for direct output - // - Include stack trace for debugging - // - Never throw (would crash Node.js) - // - // FAILURE MODES: - // 1. Module loading error (require fails) → Bootstrap disabled - // 2. Base64 decode error → Bootstrap disabled - // 3. Compilation error (invalid syntax) → Bootstrap disabled - // 4. Execution error (runtime error) → Bootstrap disabled - // - // In all cases: Node.js continues, user code runs normally. - process.stderr.write(`Socket bootstrap error: ${err.message}\n${err.stack}\n`); - } -}; diff --git a/packages/node-smol-builder/additions/003-compression-tools/CMakeLists.txt b/packages/node-smol-builder/additions/003-compression-tools/CMakeLists.txt deleted file mode 100644 index 9ed1f9799..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/CMakeLists.txt +++ /dev/null @@ -1,116 +0,0 @@ -# CMake build system for Socket compression tools (cross-platform) -# -# Supports: -# - macOS: Mach-O compression with Apple Compression framework -# - Linux: ELF compression with liblzma -# - Windows: PE compression with Windows Compression API -# -# Usage: -# mkdir build && cd build -# cmake .. -# cmake --build . -# - -cmake_minimum_required(VERSION 3.15) -project(socket-compression-tools VERSION 1.0.0 LANGUAGES C CXX) - -set(CMAKE_C_STANDARD 11) -set(CMAKE_CXX_STANDARD 17) - -# Platform detection -if(APPLE) - message(STATUS "Building for macOS (Mach-O)") - set(PLATFORM "macos") -elseif(UNIX) - message(STATUS "Building for Linux (ELF)") - set(PLATFORM "linux") -elseif(WIN32) - message(STATUS "Building for Windows (PE)") - set(PLATFORM "windows") -endif() - -# macOS tools -if(APPLE) - add_executable(socket_macho_compress socket_macho_compress.cc) - target_link_libraries(socket_macho_compress "-framework Compression") - target_compile_options(socket_macho_compress PRIVATE -O3 -Wall -Wextra) - - add_executable(socket_macho_decompress socket_macho_decompress.cc) - target_link_libraries(socket_macho_decompress "-framework Compression") - target_compile_options(socket_macho_decompress PRIVATE -O3 -Wall -Wextra) - - install(TARGETS socket_macho_compress socket_macho_decompress - RUNTIME DESTINATION bin) -endif() - -# Linux tools -if(UNIX AND NOT APPLE) - find_package(LibLZMA REQUIRED) - - add_executable(socket_elf_compress socket_elf_compress.c) - target_link_libraries(socket_elf_compress ${LIBLZMA_LIBRARIES}) - target_include_directories(socket_elf_compress PRIVATE ${LIBLZMA_INCLUDE_DIRS}) - target_compile_options(socket_elf_compress PRIVATE -O3 -Wall -Wextra) - - add_executable(socket_elf_decompress socket_elf_decompress.c) - target_link_libraries(socket_elf_decompress ${LIBLZMA_LIBRARIES}) - target_include_directories(socket_elf_decompress PRIVATE ${LIBLZMA_INCLUDE_DIRS}) - target_compile_options(socket_elf_decompress PRIVATE -O3 -Wall -Wextra) - - install(TARGETS socket_elf_compress socket_elf_decompress - RUNTIME DESTINATION bin) -endif() - -# Windows tools -if(WIN32) - add_executable(socket_pe_compress socket_pe_compress.c) - target_link_libraries(socket_pe_compress Cabinet) - target_compile_options(socket_pe_compress PRIVATE /O2 /W4) - - add_executable(socket_pe_decompress socket_pe_decompress.c) - target_link_libraries(socket_pe_decompress Cabinet) - target_compile_options(socket_pe_decompress PRIVATE /O2 /W4) - - install(TARGETS socket_pe_compress socket_pe_decompress - RUNTIME DESTINATION bin) -endif() - -# Testing -enable_testing() - -if(APPLE) - add_test(NAME test_macho_compress - COMMAND socket_macho_compress) - set_tests_properties(test_macho_compress PROPERTIES - WILL_FAIL TRUE) # Should fail without arguments -endif() - -if(UNIX AND NOT APPLE) - add_test(NAME test_elf_compress - COMMAND socket_elf_compress) - set_tests_properties(test_elf_compress PROPERTIES - WILL_FAIL TRUE) # Should fail without arguments -endif() - -if(WIN32) - add_test(NAME test_pe_compress - COMMAND socket_pe_compress) - set_tests_properties(test_pe_compress PROPERTIES - WILL_FAIL TRUE) # Should fail without arguments -endif() - -# Summary -message(STATUS "") -message(STATUS "Socket Compression Tools Configuration:") -message(STATUS " Platform: ${PLATFORM}") -message(STATUS " Build type: ${CMAKE_BUILD_TYPE}") -message(STATUS " C Compiler: ${CMAKE_C_COMPILER}") -if(APPLE) - message(STATUS " Tools: socket_macho_compress, socket_macho_decompress") -elseif(UNIX) - message(STATUS " Tools: socket_elf_compress, socket_elf_decompress") - message(STATUS " LZMA: ${LIBLZMA_LIBRARIES}") -elseif(WIN32) - message(STATUS " Tools: socket_pe_compress, socket_pe_decompress") -endif() -message(STATUS "") diff --git a/packages/node-smol-builder/additions/003-compression-tools/Makefile b/packages/node-smol-builder/additions/003-compression-tools/Makefile deleted file mode 100644 index 5bced1157..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/Makefile +++ /dev/null @@ -1,58 +0,0 @@ -# Makefile for Socket Mach-O compression tools. -# -# Builds: -# - socket_macho_compress: Compresses Mach-O binaries using Apple's Compression framework -# - socket_macho_decompress: Decompresses and executes compressed binaries -# -# Usage: -# make # Build all tools -# make compress # Build only the compressor -# make decompress # Build only the decompressor -# make clean # Remove build artifacts -# make install # Install to /usr/local/bin (requires sudo) -# - -CXX := /usr/bin/clang++ -CXXFLAGS := -std=c++17 -O3 -Wall -Wextra -LDFLAGS := - -# macOS-specific flags. -ifeq ($(shell uname),Darwin) - CXXFLAGS += -mmacosx-version-min=11.0 - LDFLAGS += -lcompression -endif - -TARGETS := socket_macho_compress socket_macho_decompress -INSTALL_DIR := /usr/local/bin - -.PHONY: all clean install compress decompress - -all: $(TARGETS) - -compress: socket_macho_compress - -decompress: socket_macho_decompress - -socket_macho_compress: socket_macho_compress.cc - $(CXX) $(CXXFLAGS) -o $@ $< $(LDFLAGS) - @echo "✅ Built socket_macho_compress" - -socket_macho_decompress: socket_macho_decompress.cc - $(CXX) $(CXXFLAGS) -o $@ $< $(LDFLAGS) - @echo "✅ Built socket_macho_decompress" - -clean: - rm -f $(TARGETS) - @echo "✅ Cleaned build artifacts" - -install: $(TARGETS) - @echo "Installing to $(INSTALL_DIR)..." - install -m 755 socket_macho_compress $(INSTALL_DIR)/ - install -m 755 socket_macho_decompress $(INSTALL_DIR)/ - @echo "✅ Installed to $(INSTALL_DIR)" - -test: $(TARGETS) - @echo "Running basic tests..." - @./socket_macho_compress --help || true - @./socket_macho_decompress --help || true - @echo "✅ Basic tests passed" diff --git a/packages/node-smol-builder/additions/003-compression-tools/Makefile.linux b/packages/node-smol-builder/additions/003-compression-tools/Makefile.linux deleted file mode 100644 index 74952f42f..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/Makefile.linux +++ /dev/null @@ -1,54 +0,0 @@ -# Makefile for Socket ELF compression tools (Linux) -# -# Builds: -# - socket_elf_compress: Compresses ELF binaries using liblzma -# - socket_elf_decompress: Decompresses and executes compressed binaries -# -# Usage: -# make -f Makefile.linux # Build all tools -# make -f Makefile.linux compress # Build only compressor -# make -f Makefile.linux clean # Remove build artifacts -# -# Requirements: -# - GCC or Clang -# - liblzma-dev (install: apt-get install liblzma-dev) -# - -CC := gcc -CFLAGS := -std=c11 -O3 -Wall -Wextra -D_GNU_SOURCE -LDFLAGS := -llzma - -TARGETS := socket_elf_compress socket_elf_decompress - -.PHONY: all clean compress decompress test - -all: $(TARGETS) - -compress: socket_elf_compress - -decompress: socket_elf_decompress - -socket_elf_compress: socket_elf_compress.c - $(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) - @echo "✅ Built socket_elf_compress" - -socket_elf_decompress: socket_elf_decompress.c - $(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) - @echo "✅ Built socket_elf_decompress" - -clean: - rm -f $(TARGETS) - @echo "✅ Cleaned Linux build artifacts" - -test: $(TARGETS) - @echo "Running basic tests..." - @./socket_elf_compress 2>&1 | head -1 || true - @./socket_elf_decompress 2>&1 | head -1 || true - @echo "✅ Basic tests passed" - -# Install to system (requires root) -install: $(TARGETS) - @echo "Installing to /usr/local/bin..." - install -m 755 socket_elf_compress /usr/local/bin/ - install -m 755 socket_elf_decompress /usr/local/bin/ - @echo "✅ Installed to /usr/local/bin" diff --git a/packages/node-smol-builder/additions/003-compression-tools/Makefile.windows b/packages/node-smol-builder/additions/003-compression-tools/Makefile.windows deleted file mode 100644 index 7dadaddbe..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/Makefile.windows +++ /dev/null @@ -1,47 +0,0 @@ -# Makefile for Socket PE compression tools (Windows/MinGW) -# -# Builds: -# - socket_pe_compress.exe: Compresses PE binaries using Windows API -# - socket_pe_decompress.exe: Decompresses and executes compressed binaries -# -# Usage: -# mingw32-make -f Makefile.windows # Build all tools -# mingw32-make -f Makefile.windows compress # Build only compressor -# mingw32-make -f Makefile.windows clean # Remove build artifacts -# -# Requirements: -# - MinGW-w64 (gcc for Windows) -# - Windows 8+ (for Compression API) -# - -CC := gcc -CFLAGS := -std=c11 -O3 -Wall -Wextra -D_WIN32_WINNT=0x0602 -LDFLAGS := -lCabinet - -TARGETS := socket_pe_compress.exe socket_pe_decompress.exe - -.PHONY: all clean compress decompress test - -all: $(TARGETS) - -compress: socket_pe_compress.exe - -decompress: socket_pe_decompress.exe - -socket_pe_compress.exe: socket_pe_compress.c - $(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) - @echo "Built socket_pe_compress.exe" - -socket_pe_decompress.exe: socket_pe_decompress.c - $(CC) $(CFLAGS) -o $@ $< $(LDFLAGS) - @echo "Built socket_pe_decompress.exe" - -clean: - del /Q $(TARGETS) 2>nul - @echo "Cleaned Windows build artifacts" - -test: $(TARGETS) - @echo "Running basic tests..." - @socket_pe_compress.exe 2>&1 | findstr /C:"Usage" >nul && echo "Compress tool OK" - @socket_pe_decompress.exe 2>&1 | findstr /C:"Usage" >nul && echo "Decompress tool OK" - @echo "Basic tests passed" diff --git a/packages/node-smol-builder/additions/003-compression-tools/README.md b/packages/node-smol-builder/additions/003-compression-tools/README.md deleted file mode 100644 index 2c5f2c2a1..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/README.md +++ /dev/null @@ -1,534 +0,0 @@ -# Socket Binary Compression Tools - -Cross-platform binary compression tools providing UPX-like compression **without antivirus false positives**. - -## Overview - -These tools provide **safe, native binary compression** for macOS, Linux, and Windows that: -- ✓ Work with code signing (macOS) -- ✓ Use native OS APIs (no antivirus flags) -- ✓ Achieve 70-80% compression (better than UPX) -- ✓ Support all major platforms - -**Why not UPX?** UPX triggers 15-30% of antivirus vendors, breaks macOS code signing, and achieves only 50-60% compression. These tools use native OS compression APIs that are trusted by security software and achieve better compression ratios. - -## Tools - -### macOS (Mach-O) - -**socket_macho_compress** - Compresses macOS binaries using Apple's Compression framework. - -**Features:** -- 70-80% size reduction (LZMA) -- Code signing compatible -- Fast decompression (~100-200ms) -- Algorithms: LZMA, LZFSE, ZLIB, LZ4 - -**Usage:** -```bash -socket_macho_compress input_binary output_binary [--quality=lzma] -socket_macho_decompress compressed_binary [args...] -``` - -### Linux (ELF) - -**socket_elf_compress** - Compresses Linux binaries using liblzma (native library). - -**Features:** -- 75-80% size reduction (LZMA) -- No antivirus flags (native library) -- Fast decompression -- Uses xz-utils (pre-installed on most distros) - -**Usage:** -```bash -socket_elf_compress input_binary output_binary [--quality=lzma] -socket_elf_decompress compressed_binary [args...] -``` - -### Windows (PE) - -**socket_pe_compress** - Compresses Windows binaries using Windows Compression API. - -**Features:** -- 70-73% size reduction (LZMS) -- No antivirus flags (native API) -- Windows 8+ compatible -- Algorithms: LZMS, XPRESS, XPRESS_HUFF - -**Usage:** -```bash -socket_pe_compress.exe input_binary output_binary [--quality=lzms] -socket_pe_decompress.exe compressed_binary [args...] -``` - -## Building - -### macOS - -```bash -make # Build all macOS tools -make compress # Build compressor only -make decompress # Build decompressor only -make clean # Clean build artifacts -sudo make install # Install to /usr/local/bin -``` - -**Requirements:** Xcode Command Line Tools (`xcode-select --install`) - -### Linux - -```bash -make -f Makefile.linux # Build all Linux tools -make -f Makefile.linux compress -make -f Makefile.linux decompress -make -f Makefile.linux clean -sudo make -f Makefile.linux install -``` - -**Requirements:** GCC, liblzma-dev (`apt-get install liblzma-dev` or `yum install xz-devel`) - -### Windows - -```bash -mingw32-make -f Makefile.windows # Build all Windows tools -mingw32-make -f Makefile.windows compress -mingw32-make -f Makefile.windows decompress -mingw32-make -f Makefile.windows clean -``` - -**Requirements:** MinGW-w64, Windows 8+ SDK - -### Cross-Platform (CMake) - -```bash -mkdir build && cd build -cmake .. -cmake --build . -``` - -**Requirements:** CMake 3.15+, platform-specific compilers and libraries - -## Quick Start - -### Using Platform-Specific Tools - -```bash -# macOS -./socket_macho_compress /usr/local/bin/node ./node.compressed --quality=lzma -./socket_macho_decompress ./node.compressed --version -codesign --sign - --force ./node.compressed # Optional: Re-sign after compression - -# Linux -./socket_elf_compress /usr/bin/node ./node.compressed --quality=lzma -./socket_elf_decompress ./node.compressed --version - -# Windows -socket_pe_compress.exe node.exe node.compressed --quality=lzms -socket_pe_decompress.exe node.compressed --version -``` - -### Using Cross-Platform Scripts (Recommended) - -```bash -# Automatically detects platform and uses appropriate tool. -node ../../scripts/compress-binary.mjs ./node ./node.compressed --quality=lzma -node ../../scripts/decompress-binary.mjs ./node.compressed --version - -# Benefits: Automatic platform detection, consistent interface, automatic tool building. -``` - -## Compression Quality - -### macOS Algorithms - -| Algorithm | Ratio | Speed | Use Case | -|-----------|-------|-------|----------| -| lz4 | 50% | Very Fast (~50ms) | Fast startup | -| zlib | 60% | Fast (~100ms) | Compatibility | -| lzfse | 67% | Fast (~120ms) | Balance (Apple default) | -| **lzma** | **76%** | Moderate (~200ms) | **Maximum compression** | - -### Linux Algorithms - -| Algorithm | Ratio | Speed | Use Case | -|-----------|-------|-------|----------| -| **lzma** | **75-80%** | Fast | **Maximum compression (default)** | - -### Windows Algorithms - -| Algorithm | Ratio | Speed | Use Case | -|-----------|-------|-------|----------| -| xpress | 60% | Very Fast | Fast startup | -| xpress_huff | 65% | Fast | Balance | -| **lzms** | **70-73%** | Moderate | **Maximum compression (default)** | - -## Size Comparison - -Real-world results with custom Node.js builds: - -### macOS (44 MB stripped + signed) - -``` -Original: 44 MB -├─ LZFSE compressed: 15 MB (67% reduction) -└─ LZMA compressed: 10 MB (76% reduction) ⭐ - -vs. UPX (if it worked): 22 MB (50% reduction, breaks codesign ✗) -``` - -### Linux (39 MB stripped) - -``` -Original: 39 MB -└─ LZMA compressed: 9 MB (77% reduction) ⭐ - -vs. UPX: 20 MB (50% reduction, AV flags ⚠) -``` - -### Windows (estimated, 44 MB) - -``` -Original: 44 MB -└─ LZMS compressed: 13 MB (70% reduction) ⭐ - -vs. UPX: 22 MB (50% reduction, AV flags ⚠) -``` - -**Key Takeaway:** These tools achieve 20-30% better compression than UPX while avoiding antivirus false positives and maintaining code signing compatibility. - -## Integration with Build Process - -### Via Cross-Platform Node.js Script (Recommended) - -```bash -# Automatically detects platform and uses appropriate tool. -node scripts/compress-binary.mjs \ - build/out/Release/node \ - build/out/Release/node.compressed \ - --quality=lzma - -# Test compressed binary. -node scripts/decompress-binary.mjs \ - build/out/Release/node.compressed \ - --version -``` - -### Via Platform-Specific Scripts - -```bash -# macOS only. -node scripts/compress-macho.mjs \ - build/out/Signed/node \ - build/out/Compressed/node \ - --quality=lzma -``` - -### Via Build Script Integration - -Add to your `scripts/build.mjs`: - -```javascript -import { spawn } from '@socketsecurity/registry/lib/spawn' -import path from 'node:path' - -// After building binary. -async function compressBinary(inputPath, outputPath) { - const scriptPath = path.join(__dirname, 'scripts', 'compress-binary.mjs') - - const result = await spawn('node', [ - scriptPath, - inputPath, - outputPath, - '--quality=lzma' - ]) - - if (result.code !== 0) { - throw new Error(`Compression failed: ${result.code}`) - } - - // macOS: Re-sign after compression. - if (process.platform === 'darwin') { - await spawn('codesign', ['--sign', '-', '--force', outputPath]) - } - - console.log('✓ Binary compressed successfully') -} - -// Usage. -await compressBinary( - 'build/out/Release/node', - 'build/out/Release/node.compressed' -) -``` - -## Distribution - -For end-user distribution, ship these files: - -### Option 1: Direct Tool Distribution - -**macOS:** -- `socket-macos-arm64.compressed` (compressed binary) -- `socket_macho_decompress` (decompressor tool) - -**Linux:** -- `socket-linux-x64.compressed` (compressed binary) -- `socket_elf_decompress` (decompressor tool) - -**Windows:** -- `socket-windows-x64.compressed` (compressed binary) -- `socket_pe_decompress.exe` (decompressor tool) - -### Option 2: Wrapper Script (User-Friendly) - -**Unix/Linux/macOS:** -```bash -#!/bin/bash -# socket (user-friendly wrapper) -DIR="$(dirname "$0")" -PLATFORM="$(uname -s)" - -case "$PLATFORM" in - Darwin) DECOMPRESS="socket_macho_decompress" ;; - Linux) DECOMPRESS="socket_elf_decompress" ;; - *) echo "Unsupported platform: $PLATFORM"; exit 1 ;; -esac - -exec "$DIR/$DECOMPRESS" "$DIR/socket.compressed" "$@" -``` - -**Windows (socket.bat):** -```batch -@echo off -"%~dp0socket_pe_decompress.exe" "%~dp0socket.compressed" %* -``` - -Users can then run: -```bash -./socket --version -./socket scan create -``` - -## Technical Details - -### Compressed Binary Format - -All platforms use a consistent header format with platform-specific magic numbers: - -```c -struct CompressedHeader { - uint32_t magic; // Platform-specific magic number - uint32_t algorithm; // Compression algorithm ID - uint64_t original_size; // Decompressed size in bytes - uint64_t compressed_size;// Compressed payload size in bytes -}; -// Followed by compressed payload -``` - -**Magic Numbers:** -- macOS: `"SCMP"` = `0x504D4353` (Socket Compressed Mach-o Protocol) -- Linux: `"SELF"` = `0x53454C46` (Socket ELF) -- Windows: `"SEPE"` = `0x53455045` (Socket PE) - -### Decompression Process - -**macOS:** -1. Parse header and validate magic (`0x504D4353`) -2. Decompress using `compression_decode_buffer()` (Apple framework) -3. Write to `/tmp/socket_decompress_XXXXXX` -4. Execute via `execv()` with original arguments -5. Temp file cleaned up on exit - -**Linux:** -1. Parse header and validate magic (`0x53454C46`) -2. Decompress using `lzma_stream_buffer_decode()` (liblzma) -3. Write to `/tmp/socket_decompress_XXXXXX` -4. Execute via `execv()` with original arguments -5. Temp file cleaned up on exit - -**Windows:** -1. Parse header and validate magic (`0x53455045`) -2. Decompress using `Decompress()` (Windows Compression API) -3. Write to `%TEMP%\socket_*.exe` -4. Execute via `CreateProcessA()` with original arguments -5. Temp file cleaned up after process completes - -### Why This Avoids Antivirus False Positives - -**UPX Problems:** -- Uses self-modifying code (heuristic trigger) -- Packs executable sections (suspicious behavior) -- Well-known packer signature (blacklisted) - -**Our Approach:** -- Uses native OS compression APIs (trusted) -- No self-modifying code (W^X compliant) -- Decompression happens before execution (transparent) -- Native API calls are whitelisted by AV software - -## Troubleshooting - -### macOS Build Errors - -**Error:** `clang++: command not found` -```bash -xcode-select --install -``` - -**Error:** `ld: framework not found Compression` -```bash -# Use system clang, not Homebrew -/usr/bin/clang++ -o socket_macho_compress socket_macho_compress.cc -lcompression -O3 -``` - -### Linux Build Errors - -**Error:** `fatal error: lzma.h: No such file or directory` -```bash -# Debian/Ubuntu -sudo apt-get install liblzma-dev - -# RHEL/CentOS/Fedora -sudo yum install xz-devel -``` - -**Error:** `undefined reference to 'lzma_stream_buffer_encode'` -```bash -# Add -llzma to linker flags -gcc socket_elf_compress.c -o socket_elf_compress -llzma -``` - -### Windows Build Errors - -**Error:** `compressapi.h: No such file or directory` -``` -Install Windows 8+ SDK or use Visual Studio 2015+ -``` - -**Error:** `undefined reference to 'CreateCompressor'` -```bash -# Add Cabinet.lib to linker -gcc socket_pe_compress.c -o socket_pe_compress.exe -lCabinet -``` - -### Compression Errors - -**Error:** `Error: Not a valid Mach-O/ELF/PE binary` -```bash -# Verify binary format matches platform -file your_binary - -# Should show: -# macOS: Mach-O 64-bit executable arm64 -# Linux: ELF 64-bit LSB executable, x86-64 -# Windows: PE32+ executable (console) x86-64 -``` - -### Decompression Errors - -**Error:** `Error: Invalid magic number` - -File was not compressed with Socket tools. Compress it first: -```bash -# Auto-detect platform -node scripts/compress-binary.mjs original_binary compressed_binary -``` - -**Error:** `Error: Size mismatch after decompression` - -Corrupted compressed file. Re-compress from original binary. - -### Runtime Errors - -**Error:** `Failed to execute decompressed binary` (Unix/Linux) -```bash -# Check /tmp permissions -df -h /tmp -ls -ld /tmp -# Should be: drwxrwxrwt (sticky bit set) -``` - -**Error:** `Access denied` (Windows) -```batch -# Check %TEMP% permissions -echo %TEMP% -dir "%TEMP%" -# Ensure write access to temp directory -``` - -## Limitations - -### All Platforms - -1. **Decompression Overhead**: 50-200ms on first run (varies by algorithm) -2. **Memory Usage**: Requires 2x binary size during decompression -3. **Temporary File**: Requires disk space for decompressed binary -4. **Single Binary**: Only compresses single executables (not bundles/libraries) - -### Platform-Specific - -**macOS:** -- Requires macOS 10.9+ (Compression framework availability) -- Compressed binary must be re-signed after compression - -**Linux:** -- Requires liblzma runtime (pre-installed on most distros) -- Temp files use `/tmp` (must have execute permission) - -**Windows:** -- Requires Windows 8+ (Compression API availability) -- Temp files use `%TEMP%` directory - -## Performance Characteristics - -| Platform | Algorithm | Compression | Decompression | Memory | -|----------|-----------|-------------|---------------|--------| -| macOS | LZMA | ~5-10s | ~150ms | 2x size | -| macOS | LZFSE | ~3-5s | ~100ms | 2x size | -| Linux | LZMA | ~5-10s | ~120ms | 2x size | -| Windows | LZMS | ~8-12s | ~180ms | 2x size | - -Tested with 44 MB Node.js binary. - -## Future Improvements - -1. **Self-Extracting Stub**: Embed decompressor in binary for single-file distribution -2. **In-Memory Execution**: Execute from memory without temporary file (platform-dependent) -3. **Streaming Decompression**: Reduce memory usage by streaming decompression -4. **Section-Level Compression**: Compress only code sections for better ratios -5. **Multi-threaded Compression**: Use parallel compression for faster builds - -## References - -### Platform Documentation - -**macOS:** -- [Apple Compression Framework](https://developer.apple.com/documentation/compression) -- [Mach-O File Format](https://developer.apple.com/documentation/kernel/mach-o_file_format) -- [Code Signing Guide](https://developer.apple.com/library/archive/documentation/Security/Conceptual/CodeSigningGuide/) - -**Linux:** -- [liblzma Documentation](https://tukaani.org/xz/xz-file-format.txt) -- [ELF Specification](https://refspecs.linuxfoundation.org/elf/elf.pdf) -- [LZMA Algorithm](https://en.wikipedia.org/wiki/Lempel%E2%80%93Ziv%E2%80%93Markov_chain_algorithm) - -**Windows:** -- [Windows Compression API](https://docs.microsoft.com/en-us/windows/win32/cmpapi/-compression-portal) -- [PE Format Specification](https://docs.microsoft.com/en-us/windows/win32/debug/pe-format) -- [Cabinet Compression](https://docs.microsoft.com/en-us/windows/win32/msi/cabinet-files) - -### Socket Documentation - -- [Cross-Platform Compression Guide](../../docs/cross-platform-compression.md) -- [macOS Compression Details](../../docs/macho-compression.md) -- [Build System Documentation](../../docs/wasm-build-guide.md) - -### UPX Alternatives - -- [Why Avoid UPX](https://security.stackexchange.com/questions/195085/why-do-antivirus-programs-often-flag-upx-packed-executables) -- [Native Compression Benefits](https://github.com/upx/upx/issues/332) - -## License - -Copyright © 2024 Socket Security. All rights reserved. diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_brotli2c.cc b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_brotli2c.cc deleted file mode 100644 index 74b773674..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_brotli2c.cc +++ /dev/null @@ -1,1030 +0,0 @@ -// -// Socket Brotli2C - JavaScript to C++ converter with Brotli compression. -// Based on Node.js js2c tool with Socket Security enhancements. -// -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include "embedded_data.h" -#include "executable_wrapper.h" -#include "simdutf.h" -#include "brotli/encode.h" -#include "uv.h" - -#if defined(_WIN32) -#include // _S_IREAD _S_IWRITE -#ifndef S_IRUSR -#define S_IRUSR _S_IREAD -#endif // S_IRUSR -#ifndef S_IWUSR -#define S_IWUSR _S_IWRITE -#endif // S_IWUSR -#endif -namespace node { -namespace js2c { -int Main(int argc, char* argv[]); - -static bool is_verbose = false; - -void Debug(const char* format, ...) { - va_list arguments; - va_start(arguments, format); - if (is_verbose) { - vfprintf(stderr, format, arguments); - } - va_end(arguments); -} - -// Compress data with Brotli. -// Returns empty vector on failure. -std::vector CompressWithBrotli(const std::vector& code, - const std::string& var) { - size_t input_size = code.size(); - size_t max_compressed_size = BrotliEncoderMaxCompressedSize(input_size); - std::vector compressed_temp(max_compressed_size); - size_t compressed_size = max_compressed_size; - - int result = BrotliEncoderCompress( - BROTLI_DEFAULT_QUALITY, - BROTLI_DEFAULT_WINDOW, - BROTLI_MODE_TEXT, - input_size, - reinterpret_cast(code.data()), - &compressed_size, - compressed_temp.data() - ); - - if (!result) { - fprintf(stderr, "Brotli compression failed for %s\n", var.c_str()); - return {}; - } - - // Create result vector with 12-byte header + compressed data. - // Header format: - // Offset 0-3: Magic "BROT" (0x42, 0x52, 0x4F, 0x54) - // Offset 4-11: Decompressed size (uint64_t little-endian) - // Offset 12+: Brotli-compressed data - const size_t header_size = 12; - std::vector with_header(header_size + compressed_size); - - // Write magic marker "BROT". - with_header[0] = 0x42; // 'B' - with_header[1] = 0x52; // 'R' - with_header[2] = 0x4F; // 'O' - with_header[3] = 0x54; // 'T' - - // Write decompressed size as little-endian uint64_t. - uint64_t decompressed_size = static_cast(input_size); - for (size_t i = 0; i < 8; i++) { - with_header[4 + i] = (decompressed_size >> (i * 8)) & 0xFF; - } - - // Copy compressed data after header. - std::memcpy(with_header.data() + header_size, compressed_temp.data(), compressed_size); - - // Log compression stats. - double ratio = 100.0 * (1.0 - (double)(header_size + compressed_size) / input_size); - Debug("Compressed %s: %zu → %zu bytes with header (%.1f%% reduction)\n", - var.c_str(), input_size, header_size + compressed_size, ratio); - - return with_header; -} - -void PrintUvError(const char* syscall, const char* filename, int error) { - fprintf(stderr, "[%s] %s: %s\n", syscall, filename, uv_strerror(error)); -} - -int GetStats(const char* path, std::function func) { - uv_fs_t req; - int r = uv_fs_stat(nullptr, &req, path, nullptr); - if (r == 0) { - func(static_cast(req.ptr)); - } - uv_fs_req_cleanup(&req); - return r; -} - -bool IsDirectory(const std::string& filename, int* error) { - bool result = false; - *error = GetStats(filename.c_str(), [&](const uv_stat_t* stats) { - result = !!(stats->st_mode & S_IFDIR); - }); - if (*error != 0) { - PrintUvError("stat", filename.c_str(), *error); - } - return result; -} - -size_t GetFileSize(const std::string& filename, int* error) { - size_t result = 0; - *error = GetStats(filename.c_str(), - [&](const uv_stat_t* stats) { result = stats->st_size; }); - return result; -} - -constexpr bool FilenameIsConfigGypi(const std::string_view path) { - return path == "config.gypi" || path.ends_with("/config.gypi"); -} - -typedef std::vector FileList; -typedef std::map FileMap; - -bool SearchFiles(const std::string& dir, - FileMap* file_map, - std::string_view extension) { - uv_fs_t scan_req; - int result = uv_fs_scandir(nullptr, &scan_req, dir.c_str(), 0, nullptr); - bool errored = false; - if (result < 0) { - PrintUvError("scandir", dir.c_str(), result); - errored = true; - } else { - auto it = file_map->insert({std::string(extension), FileList()}).first; - FileList& files = it->second; - files.reserve(files.size() + result); - uv_dirent_t dent; - while (true) { - result = uv_fs_scandir_next(&scan_req, &dent); - if (result == UV_EOF) { - break; - } - - if (result != 0) { - PrintUvError("scandir_next", dir.c_str(), result); - errored = true; - break; - } - - std::string path = dir + '/' + dent.name; - if (path.ends_with(extension)) { - files.emplace_back(path); - continue; - } - if (!IsDirectory(path, &result)) { - if (result == 0) { // It's a file, no need to search further. - continue; - } else { - errored = true; - break; - } - } - - if (!SearchFiles(path, file_map, extension)) { - errored = true; - break; - } - } - } - - uv_fs_req_cleanup(&scan_req); - return !errored; -} - -constexpr std::string_view kMjsSuffix = ".mjs"; -constexpr std::string_view kJsSuffix = ".js"; -constexpr std::string_view kGypiSuffix = ".gypi"; -constexpr std::string_view depsPrefix = "deps/"; -constexpr std::string_view libPrefix = "lib/"; - -constexpr std::string_view HasAllowedExtensions( - const std::string_view filename) { - for (const auto& ext : {kGypiSuffix, kJsSuffix, kMjsSuffix}) { - if (filename.ends_with(ext)) { - return ext; - } - } - return {}; -} - -using Fragment = std::vector; -using Fragments = std::vector>; - -std::vector Join(const Fragments& fragments, - const std::string& separator) { - size_t length = separator.size() * (fragments.size() - 1); - for (size_t i = 0; i < fragments.size(); ++i) { - length += fragments[i].size(); - } - std::vector buf(length, 0); - size_t cursor = 0; - for (size_t i = 0; i < fragments.size(); ++i) { - const Fragment& fragment = fragments[i]; - // Avoid using snprintf on large chunks of data because it's much slower. - // It's fine to use it on small amount of data though. - if (i != 0) { - memcpy(buf.data() + cursor, separator.c_str(), separator.size()); - cursor += separator.size(); - } - memcpy(buf.data() + cursor, fragment.data(), fragment.size()); - cursor += fragment.size(); - } - buf.resize(cursor); - return buf; -} - -const char* kTemplate = R"( -#include "env-inl.h" -#include "node_builtins.h" -#include "node_external_reference.h" -#include "node_internals.h" - -namespace node { - -namespace builtins { - -%.*s -namespace { -const ThreadsafeCopyOnWrite global_source_map { - BuiltinSourceMap { -%.*s - } // BuiltinSourceMap -}; // ThreadsafeCopyOnWrite -} // anonymous namespace - -void BuiltinLoader::LoadJavaScriptSource() { - source_ = global_source_map; -} - -void RegisterExternalReferencesForInternalizedBuiltinCode( - ExternalReferenceRegistry* registry) { -%.*s -} - -UnionBytes BuiltinLoader::GetConfig() { - return UnionBytes(&config_resource); -} - -} // namespace builtins - -} // namespace node -)"; - -Fragment Format(const Fragments& definitions, - const Fragments& initializers, - const Fragments& registrations) { - std::vector def_buf = Join(definitions, "\n"); - size_t def_size = def_buf.size(); - std::vector init_buf = Join(initializers, "\n"); - size_t init_size = init_buf.size(); - std::vector reg_buf = Join(registrations, "\n"); - size_t reg_size = reg_buf.size(); - - size_t result_size = - def_size + init_size + reg_size + strlen(kTemplate) + 100; - std::vector result(result_size, 0); - int r = snprintf(result.data(), - result_size, - kTemplate, - static_cast(def_buf.size()), - def_buf.data(), - static_cast(init_buf.size()), - init_buf.data(), - static_cast(reg_buf.size()), - reg_buf.data()); - result.resize(r); - return result; -} - -std::vector ReadFileSync(const char* path, size_t size, int* error) { - uv_fs_t req; - Debug("ReadFileSync %s with size %zu\n", path, size); - - uv_file file = uv_fs_open(nullptr, &req, path, O_RDONLY, 0, nullptr); - if (req.result < 0) { - uv_fs_req_cleanup(&req); - *error = req.result; - return std::vector(); - } - uv_fs_req_cleanup(&req); - - std::vector contents(size); - size_t offset = 0; - - while (offset < size) { - uv_buf_t buf = uv_buf_init(contents.data() + offset, size - offset); - int bytes_read = uv_fs_read(nullptr, &req, file, &buf, 1, offset, nullptr); - offset += bytes_read; - *error = req.result; - uv_fs_req_cleanup(&req); - if (*error < 0) { - uv_fs_close(nullptr, &req, file, nullptr); - // We can't do anything if uv_fs_close returns error, so just return. - return std::vector(); - } - if (bytes_read <= 0) { - break; - } - } - assert(offset == size); - - *error = uv_fs_close(nullptr, &req, file, nullptr); - return contents; -} - -int WriteFileSync(const std::vector& out, const char* path) { - Debug("WriteFileSync %zu bytes to %s\n", out.size(), path); - uv_fs_t req; - uv_file file = uv_fs_open(nullptr, - &req, - path, - UV_FS_O_CREAT | UV_FS_O_WRONLY | UV_FS_O_TRUNC, - S_IWUSR | S_IRUSR, - nullptr); - int err = req.result; - uv_fs_req_cleanup(&req); - if (err < 0) { - return err; - } - - uv_buf_t buf = uv_buf_init(const_cast(out.data()), out.size()); - err = uv_fs_write(nullptr, &req, file, &buf, 1, 0, nullptr); - uv_fs_req_cleanup(&req); - - int r = uv_fs_close(nullptr, &req, file, nullptr); - uv_fs_req_cleanup(&req); - if (err < 0) { - // We can't do anything if uv_fs_close returns error, so just return. - return err; - } - return r; -} - -int WriteIfChanged(const Fragment& out, const std::string& dest) { - Debug("output size %zu\n", out.size()); - - int error = 0; - size_t size = GetFileSize(dest, &error); - if (error != 0 && error != UV_ENOENT) { - return error; - } - Debug("existing size %zu\n", size); - - bool changed = true; - // If it's not the same size, the file is definitely changed so we'll - // just proceed to update. Otherwise check the content before deciding - // whether we want to write it. - if (error != UV_ENOENT && size == out.size()) { - std::vector content = ReadFileSync(dest.c_str(), size, &error); - if (error == 0) { // In case of error, always write the file. - changed = (memcmp(content.data(), out.data(), size) != 0); - } - } - if (!changed) { - Debug("No change, return\n"); - return 0; - } - return WriteFileSync(out, dest.c_str()); -} - -std::string GetFileId(const std::string& filename) { - size_t end = filename.size(); - size_t start = 0; - std::string prefix; - // Strip .mjs and .js suffix - if (filename.ends_with(kMjsSuffix)) { - end -= kMjsSuffix.size(); - } else if (filename.ends_with(kJsSuffix)) { - end -= kJsSuffix.size(); - } - - // deps/acorn/acorn/dist/acorn.js -> internal/deps/acorn/acorn/dist/acorn - if (filename.starts_with(depsPrefix)) { - start = depsPrefix.size(); - prefix = "internal/deps/"; - } else if (filename.starts_with(libPrefix)) { - // lib/internal/url.js -> internal/url - start = libPrefix.size(); - prefix = ""; - } - - return prefix + std::string(filename.begin() + start, filename.begin() + end); -} - -std::string GetVariableName(const std::string& id) { - std::string result = id; - size_t length = result.size(); - - for (size_t i = 0; i < length; ++i) { - if (result[i] == '.' || result[i] == '-' || result[i] == '/') { - result[i] = '_'; - } - } - return result; -} - -// The function returns a string buffer and an array of -// offsets. The string is just "0,1,2,3,...,65535,". -// The second array contain the offsets indicating the -// start of each substring ("0,", "1,", etc.) and the final -// offset points just beyond the end of the string. -// 382106 is the length of the string "0,1,2,3,...,65535,". -// 65537 is 2**16 + 1 -// This function could be constexpr, but it might become too expensive to -// compile. -std::pair, std::array> -precompute_string() { - // the string "0,1,2,3,...,65535,". - std::array str; - // the offsets in the string pointing at the beginning of each substring - std::array off; - off[0] = 0; - char* p = &str[0]; - constexpr auto const_int_to_str = [](uint16_t value, char* s) -> uint32_t { - uint32_t index = 0; - do { - s[index++] = '0' + (value % 10); - value /= 10; - } while (value != 0); - - for (uint32_t i = 0; i < index / 2; ++i) { - char temp = s[i]; - s[i] = s[index - i - 1]; - s[index - i - 1] = temp; - } - s[index] = ','; - return index + 1; - }; - for (int i = 0; i < 65536; ++i) { - size_t offset = const_int_to_str(i, p); - p += offset; - off[i + 1] = off[i] + offset; - } - return {str, off}; -} - -const std::string_view GetCode(uint16_t index) { - // We use about 644254 bytes of memory. An array of 65536 strings might use - // 2097152 bytes so we save 3x the memory. - static auto [backing_string, offsets] = precompute_string(); - return std::string_view(&backing_string[offsets[index]], - offsets[index + 1] - offsets[index]); -} - -#ifdef NODE_JS2C_USE_STRING_LITERALS -const char* string_literal_def_template = "static const %s *%s_raw = "; -constexpr std::string_view latin1_string_literal_start = - "reinterpret_cast(\""; -constexpr std::string_view ascii_string_literal_start = - "reinterpret_cast(R\"JS2C1b732aee("; -constexpr std::string_view utf16_string_literal_start = - "reinterpret_cast(uR\"JS2C1b732aee("; -constexpr std::string_view latin1_string_literal_end = "\");"; -constexpr std::string_view utf_string_literal_end = ")JS2C1b732aee\");"; -#else -const char* array_literal_def_template = "static const %s %s_raw[] = "; -constexpr std::string_view array_literal_start = "{\n"; -constexpr std::string_view array_literal_end = "\n};\n\n"; -#endif - -enum class CodeType { - kAscii, // Code points are all within 0-127. - kLatin1, // Code points are all within 0-255. - kTwoByte, -}; -template -Fragment GetDefinitionImpl(const std::vector& code, - const std::string& var, - CodeType type) { - constexpr bool is_two_byte = std::is_same_v; - static_assert(is_two_byte || std::is_same_v); - - size_t count = is_two_byte - ? simdutf::utf16_length_from_utf8(code.data(), code.size()) - : code.size(); - constexpr const char* arr_type = is_two_byte ? "uint16_t" : "uint8_t"; - constexpr const char* resource_type = is_two_byte - ? "StaticExternalTwoByteResource" - : "StaticExternalOneByteResource"; - -#ifdef NODE_JS2C_USE_STRING_LITERALS - const char* literal_def_template = string_literal_def_template; - size_t unit = type == CodeType::kLatin1 ? 4 : 1; - size_t def_size = 512 + code.size() * unit; -#else - const char* literal_def_template = array_literal_def_template; - constexpr size_t unit = (is_two_byte ? 5 : 3) + 1; - size_t def_size = 512 + count * unit; -#endif - - Fragment result(def_size, 0); - - int cur = snprintf( - result.data(), def_size, literal_def_template, arr_type, var.c_str()); - - assert(cur != 0); - -#ifdef NODE_JS2C_USE_STRING_LITERALS - std::string_view start_string_view; - switch (type) { - case CodeType::kAscii: - start_string_view = ascii_string_literal_start; - break; - case CodeType::kLatin1: - start_string_view = latin1_string_literal_start; - break; - case CodeType::kTwoByte: - start_string_view = utf16_string_literal_start; - break; - } - - memcpy( - result.data() + cur, start_string_view.data(), start_string_view.size()); - cur += start_string_view.size(); - - if (type != CodeType::kLatin1) { - memcpy(result.data() + cur, code.data(), code.size()); - cur += code.size(); - } else { - const uint8_t* ptr = reinterpret_cast(code.data()); - for (size_t i = 0; i < count; ++i) { - // Avoid using snprintf on large chunks of data because it's much slower. - // It's fine to use it on small amount of data though. - uint8_t ch = ptr[i]; - if (ch > 127) { - Debug("In %s, found non-ASCII Latin-1 character at %zu: %d\n", - var.c_str(), - i, - ch); - } - const std::string& str = GetOctalCode(ch); - memcpy(result.data() + cur, str.c_str(), str.size()); - cur += str.size(); - } - } - - std::string_view string_literal_end; - switch (type) { - case CodeType::kAscii: - string_literal_end = utf_string_literal_end; - break; - case CodeType::kLatin1: - string_literal_end = latin1_string_literal_end; - break; - case CodeType::kTwoByte: - string_literal_end = utf_string_literal_end; - break; - } - memcpy(result.data() + cur, - string_literal_end.data(), - string_literal_end.size()); - cur += string_literal_end.size(); -#else - memcpy(result.data() + cur, - array_literal_start.data(), - array_literal_start.size()); - cur += array_literal_start.size(); - - // Avoid using snprintf on large chunks of data because it's much slower. - // It's fine to use it on small amount of data though. - if constexpr (is_two_byte) { - std::vector utf16_codepoints(count); - size_t utf16_count = simdutf::convert_utf8_to_utf16( - code.data(), - code.size(), - reinterpret_cast(utf16_codepoints.data())); - assert(utf16_count != 0); - utf16_codepoints.resize(utf16_count); - Debug("static size %zu\n", utf16_count); - for (size_t i = 0; i < utf16_count; ++i) { - std::string_view str = GetCode(utf16_codepoints[i]); - memcpy(result.data() + cur, str.data(), str.size()); - cur += str.size(); - } - } else { - const uint8_t* ptr = reinterpret_cast(code.data()); - for (size_t i = 0; i < count; ++i) { - uint16_t ch = static_cast(ptr[i]); - if (ch > 127) { - Debug("In %s, found non-ASCII Latin-1 character at %zu: %d\n", - var.c_str(), - i, - ch); - } - std::string_view str = GetCode(ch); - memcpy(result.data() + cur, str.data(), str.size()); - cur += str.size(); - } - } - - memcpy( - result.data() + cur, array_literal_end.data(), array_literal_end.size()); - cur += array_literal_end.size(); -#endif - - int end_size = snprintf(result.data() + cur, - result.size() - cur, - "static %s %s_resource(%s_raw, %zu, nullptr);\n", - resource_type, - var.c_str(), - var.c_str(), - count); - cur += end_size; - result.resize(cur); - return result; -} - -bool Simplify(const std::vector& code, - const std::string& var, - std::vector* simplified) { - // Allowlist files to avoid false positives. - // TODO(joyeecheung): this could be removed if undici updates itself - // to replace "’" with "'" though we could still keep this skeleton in - // place for future hot fixes that are verified by humans. - if (var != "internal_deps_undici_undici") { - return false; - } - - size_t code_size = code.size(); - simplified->reserve(code_size); - const uint8_t* ptr = reinterpret_cast(code.data()); - size_t simplified_count = 0; - for (size_t i = 0; i < code_size; ++i) { - switch (ptr[i]) { - case 226: { // ’ [ 226, 128, 153 ] -> ' - if (i + 2 < code_size && ptr[i + 1] == 128 && ptr[i + 2] == 153) { - simplified->push_back('\''); - i += 2; - simplified_count++; - break; - } - [[fallthrough]]; - } - default: { - simplified->push_back(code[i]); - break; - } - } - } - - if (simplified_count > 0) { - Debug("Simplified %lu characters, ", simplified_count); - Debug("old size %lu, new size %lu\n", code_size, simplified->size()); - return true; - } - return false; -} - -// SOCKET MODIFICATION: Direct byte array generation for Brotli-compressed binary data. -// Solution 2: Bypass GetDefinitionImpl entirely and generate byte arrays directly. -// This ensures Brotli-compressed binary data is ALWAYS output as byte arrays, -// never as string literals (which break on binary data like "BR" magic bytes). -// -// This function generates C++ byte array definitions directly without relying on -// the GetDefinitionImpl template or NODE_JS2C_USE_STRING_LITERALS preprocessor flag. -Fragment GenerateByteArrayDefinition(const std::vector& data, const std::string& var) { - // Generate the array declaration: static const uint8_t var_raw[] = {...}; - std::stringstream ss; - ss << "static const uint8_t " << var << "_raw[] = {"; - - // Generate the byte array initializer. - for (size_t i = 0; i < data.size(); i++) { - if (i > 0) { - ss << ","; - } - // Add newline every 16 bytes for readability. - if (i % 16 == 0) { - ss << "\n "; - } - // Output byte as hex: 0x00 to 0xFF. - ss << "0x" << std::hex << std::setw(2) << std::setfill('0') - << (static_cast(static_cast(data[i]))); - } - ss << "\n};\n"; - - // Generate the size declaration. - ss << "static const size_t " << var << "_raw_len = " << std::dec << data.size() << ";\n"; - - // Generate the resource struct (required by Node.js builtin loading). - ss << "static StaticExternalOneByteResource " - << var << "_resource(" << var << "_raw, " << data.size() << ", nullptr);\n"; - - // Convert stringstream to string, then to vector (Fragment). - std::string result_str = ss.str(); - return Fragment(result_str.begin(), result_str.end()); -} - -// SOCKET MODIFICATION: GetDefinition now compresses with Brotli and outputs as byte arrays. -Fragment GetDefinition(const std::string& var, const std::vector& code) { - Debug("GetDefinition %s, code size %zu\n", var.c_str(), code.size()); - - // Compress with Brotli. - std::vector compressed = CompressWithBrotli(code, var); - - if (compressed.empty()) { - fprintf(stderr, "Warning: Brotli compression failed for %s, using uncompressed\n", var.c_str()); - // Fallback to uncompressed byte arrays. - return GenerateByteArrayDefinition(code, var); - } - - // Convert compressed bytes to char vector. - std::vector compressed_char(compressed.begin(), compressed.end()); - - // Binary compressed data MUST use byte arrays. - // Use our direct byte array generator. - return GenerateByteArrayDefinition(compressed_char, var); -} - -int AddModule(const std::string& filename, - Fragments* definitions, - Fragments* initializers, - Fragments* registrations) { - Debug("AddModule %s start\n", filename.c_str()); - - int error = 0; - size_t file_size = GetFileSize(filename, &error); - if (error != 0) { - return error; - } - std::vector code = ReadFileSync(filename.c_str(), file_size, &error); - if (error != 0) { - return error; - } - std::string file_id = GetFileId(filename); - std::string var = GetVariableName(file_id); - - definitions->emplace_back(GetDefinition(var, code)); - - // Initializers of the BuiltinSourceMap: - // {"fs", UnionBytes{&fs_resource}}, - Fragment& init_buf = initializers->emplace_back(Fragment(256, 0)); - int init_size = snprintf(init_buf.data(), - init_buf.size(), - " {\"%s\", UnionBytes(&%s_resource) },", - file_id.c_str(), - var.c_str()); - init_buf.resize(init_size); - - // Registrations: - // registry->Register(&fs_resource); - Fragment& reg_buf = registrations->emplace_back(Fragment(256, 0)); - int reg_size = snprintf(reg_buf.data(), - reg_buf.size(), - " registry->Register(&%s_resource);", - var.c_str()); - reg_buf.resize(reg_size); - return 0; -} - -std::vector ReplaceAll(const std::vector& data, - const std::string& search, - const std::string& replacement) { - auto cur = data.begin(); - auto last = data.begin(); - std::vector result; - result.reserve(data.size()); - while ((cur = std::search(last, data.end(), search.begin(), search.end())) != - data.end()) { - result.insert(result.end(), last, cur); - result.insert(result.end(), - replacement.c_str(), - replacement.c_str() + replacement.size()); - last = cur + search.size(); - } - result.insert(result.end(), last, data.end()); - return result; -} - -std::vector StripComments(const std::vector& input) { - std::vector result; - result.reserve(input.size()); - - auto last_hash = input.cbegin(); - auto line_begin = input.cbegin(); - auto end = input.cend(); - while ((last_hash = std::find(line_begin, end, '#')) != end) { - result.insert(result.end(), line_begin, last_hash); - line_begin = std::find(last_hash, end, '\n'); - if (line_begin != end) { - line_begin += 1; - } - } - result.insert(result.end(), line_begin, end); - return result; -} - -// This is technically unused for our config.gypi, but just porting it here to -// mimic js2c.py. -std::vector JoinMultilineString(const std::vector& input) { - std::vector result; - result.reserve(input.size()); - - auto closing_quote = input.cbegin(); - auto last_inserted = input.cbegin(); - auto end = input.cend(); - std::string search = "'\n"; - while ((closing_quote = std::search( - last_inserted, end, search.begin(), search.end())) != end) { - if (closing_quote != last_inserted) { - result.insert(result.end(), last_inserted, closing_quote - 1); - last_inserted = closing_quote - 1; - } - auto opening_quote = closing_quote + 2; - while (opening_quote != end && isspace(*opening_quote)) { - opening_quote++; - } - if (opening_quote == end) { - break; - } - if (*opening_quote == '\'') { - last_inserted = opening_quote + 1; - } else { - result.insert(result.end(), last_inserted, opening_quote); - last_inserted = opening_quote; - } - } - result.insert(result.end(), last_inserted, end); - return result; -} - -std::vector JSONify(const std::vector& code) { - // 1. Remove string comments - std::vector stripped = StripComments(code); - - // 2. join multiline strings - std::vector joined = JoinMultilineString(stripped); - - // 3. normalize string literals from ' into " - for (size_t i = 0; i < joined.size(); ++i) { - if (joined[i] == '\'') { - joined[i] = '"'; - } - } - - // 4. turn pseudo-booleans strings into Booleans - std::vector result3 = ReplaceAll(joined, R"("true")", "true"); - std::vector result4 = ReplaceAll(result3, R"("false")", "false"); - - return result4; -} - -int AddGypi(const std::string& var, - const std::string& filename, - Fragments* definitions) { - Debug("AddGypi %s start\n", filename.c_str()); - - int error = 0; - size_t file_size = GetFileSize(filename, &error); - if (error != 0) { - return error; - } - std::vector code = ReadFileSync(filename.c_str(), file_size, &error); - if (error != 0) { - return error; - } - assert(var == "config"); - - std::vector transformed = JSONify(code); - // Socket: Do NOT compress config.gypi - it's parsed as JSON, not loaded as JS. - // Use direct byte array generation instead of GetDefinition (which compresses). - definitions->emplace_back(GenerateByteArrayDefinition(transformed, var)); - return 0; -} - -int JS2C(const FileList& js_files, - const FileList& mjs_files, - const std::string& config, - const std::string& dest) { - Fragments definitions; - definitions.reserve(js_files.size() + mjs_files.size() + 1); - Fragments initializers; - initializers.reserve(js_files.size() + mjs_files.size()); - Fragments registrations; - registrations.reserve(js_files.size() + mjs_files.size() + 1); - - for (const auto& filename : js_files) { - int r = AddModule(filename, &definitions, &initializers, ®istrations); - if (r != 0) { - return r; - } - } - for (const auto& filename : mjs_files) { - int r = AddModule(filename, &definitions, &initializers, ®istrations); - if (r != 0) { - return r; - } - } - - assert(FilenameIsConfigGypi(config)); - // "config.gypi" -> config_raw. - int r = AddGypi("config", config, &definitions); - if (r != 0) { - return r; - } - Fragment out = Format(definitions, initializers, registrations); - return WriteIfChanged(out, dest); -} - -int PrintUsage(const char* argv0) { - fprintf(stderr, - "Usage: %s [--verbose] [--root /path/to/project/root] " - "path/to/output.cc path/to/directory " - "[extra-files ...]\n", - argv0); - return 1; -} - -int Main(int argc, char* argv[]) { - if (argc < 3) { - return PrintUsage(argv[0]); - } - - std::vector args; - args.reserve(argc); - std::string root_dir; - for (int i = 1; i < argc; ++i) { - std::string arg(argv[i]); - if (arg == "--verbose") { - is_verbose = true; - } else if (arg == "--root") { - if (i == argc - 1) { - fprintf(stderr, "--root must be followed by a path\n"); - return 1; - } - root_dir = argv[++i]; - } else { - args.emplace_back(argv[i]); - } - } - - if (args.size() < 2) { - return PrintUsage(argv[0]); - } - - if (!root_dir.empty()) { - int r = uv_chdir(root_dir.c_str()); - if (r != 0) { - fprintf(stderr, "Cannot switch to the directory specified by --root\n"); - PrintUvError("chdir", root_dir.c_str(), r); - return 1; - } - } - std::string output = args[0]; - - FileMap file_map; - for (size_t i = 1; i < args.size(); ++i) { - int error = 0; - const std::string& file = args[i]; - if (IsDirectory(file, &error)) { - if (!SearchFiles(file, &file_map, kJsSuffix) || - !SearchFiles(file, &file_map, kMjsSuffix)) { - return 1; - } - } else if (error != 0) { - return 1; - } else { // It's a file. - std::string_view extension = HasAllowedExtensions(file); - if (extension.size() != 0) { - auto it = file_map.insert({std::string(extension), FileList()}).first; - it->second.push_back(file); - } else { - fprintf(stderr, "Unsupported file: %s\n", file.c_str()); - return 1; - } - } - } - - // Should have exactly 3 types: `.js`, `.mjs` and `.gypi`. - assert(file_map.size() == 3); - auto gypi_it = file_map.find(".gypi"); - // Currently config.gypi is the only `.gypi` file allowed - if (gypi_it == file_map.end() || gypi_it->second.size() != 1 || - !FilenameIsConfigGypi(gypi_it->second[0])) { - fprintf( - stderr, - "Arguments should contain one and only one .gypi file: config.gypi\n"); - return 1; - } - auto js_it = file_map.find(".js"); - auto mjs_it = file_map.find(".mjs"); - assert(js_it != file_map.end() && mjs_it != file_map.end()); - - auto it = std::find(mjs_it->second.begin(), - mjs_it->second.end(), - "lib/eslint.config_partial.mjs"); - if (it != mjs_it->second.end()) { - mjs_it->second.erase(it); - } - - std::sort(js_it->second.begin(), js_it->second.end()); - std::sort(mjs_it->second.begin(), mjs_it->second.end()); - - return JS2C(js_it->second, mjs_it->second, gypi_it->second[0], output); -} -} // namespace js2c -} // namespace node - -NODE_MAIN(int argc, node::argv_type raw_argv[]) { - char** argv; - node::FixupMain(argc, raw_argv, &argv); - return node::js2c::Main(argc, argv); -} diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_elf_compress.c b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_elf_compress.c deleted file mode 100644 index 83d3680c4..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_elf_compress.c +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Socket ELF Compressor - Binary compression for Linux using liblzma - * Compresses ELF binaries while maintaining functionality and avoiding AV flags - * - * Usage: - * socket_elf_compress input_binary output_binary [--quality=lzma|zstd|lz4] - * - * Features: - * - Uses native liblzma (no AV flags) - * - ~75-77% compression with LZMA - * - Creates self-contained compressed binary - * - Compatible with all Linux distributions - */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -/* Compression quality settings */ -typedef enum { - QUALITY_LZ4, /* Fast decompression, lower ratio (~50%) */ - QUALITY_ZSTD, /* Balanced, good performance (~65%) */ - QUALITY_LZMA, /* Maximum compression (~75%) */ - QUALITY_DEFAULT = QUALITY_LZMA -} CompressionQuality; - -/* Compressed binary header format */ -struct CompressedHeader { - uint32_t magic; /* "SELF" = Socket ELF = 0x53454C46 */ - uint32_t algorithm; /* Compression algorithm ID */ - uint64_t original_size; /* Decompressed size in bytes */ - uint64_t compressed_size;/* Compressed payload size in bytes */ -}; - -#define MAGIC_SELF 0x53454C46 /* "SELF" */ -#define ALGO_LZMA 1 -#define ALGO_ZSTD 2 -#define ALGO_LZ4 3 - -/* Get algorithm name for display */ -const char* get_algorithm_name(CompressionQuality quality) { - switch (quality) { - case QUALITY_LZ4: return "LZ4"; - case QUALITY_ZSTD: return "ZSTD"; - case QUALITY_LZMA: return "LZMA"; - default: return "LZMA"; - } -} - -/* Get algorithm ID for header */ -uint32_t get_algorithm_id(CompressionQuality quality) { - switch (quality) { - case QUALITY_LZ4: return ALGO_LZ4; - case QUALITY_ZSTD: return ALGO_ZSTD; - case QUALITY_LZMA: return ALGO_LZMA; - default: return ALGO_LZMA; - } -} - -/* Read entire file into memory */ -uint8_t* read_file(const char* path, size_t* size) { - FILE* file = fopen(path, "rb"); - if (!file) { - fprintf(stderr, "Error: Cannot open file: %s\n", path); - return NULL; - } - - fseek(file, 0, SEEK_END); - *size = ftell(file); - fseek(file, 0, SEEK_SET); - - uint8_t* buffer = malloc(*size); - if (!buffer) { - fprintf(stderr, "Error: Cannot allocate %zu bytes\n", *size); - fclose(file); - return NULL; - } - - if (fread(buffer, 1, *size, file) != *size) { - fprintf(stderr, "Error: Cannot read file: %s\n", path); - free(buffer); - fclose(file); - return NULL; - } - - fclose(file); - return buffer; -} - -/* Write buffer to file */ -int write_file(const char* path, const uint8_t* data, size_t size) { - FILE* file = fopen(path, "wb"); - if (!file) { - fprintf(stderr, "Error: Cannot create file: %s\n", path); - return 0; - } - - if (fwrite(data, 1, size, file) != size) { - fprintf(stderr, "Error: Cannot write file: %s\n", path); - fclose(file); - return 0; - } - - fclose(file); - - /* Set executable permissions */ - chmod(path, 0755); - return 1; -} - -/* Compress data using LZMA */ -uint8_t* compress_lzma(const uint8_t* input, size_t input_size, - size_t* compressed_size) { - /* Allocate output buffer (worst case: input size + 5% + 4KB) */ - size_t output_capacity = input_size + (input_size / 20) + 4096; - uint8_t* output = malloc(output_capacity); - if (!output) { - fprintf(stderr, "Error: Cannot allocate compression buffer\n"); - return NULL; - } - - /* Configure LZMA for maximum compression */ - lzma_options_lzma opt_lzma; - if (lzma_lzma_preset(&opt_lzma, LZMA_PRESET_DEFAULT | LZMA_PRESET_EXTREME)) { - fprintf(stderr, "Error: LZMA preset initialization failed\n"); - free(output); - return NULL; - } - - /* Set up filters */ - lzma_filter filters[] = { - { .id = LZMA_FILTER_LZMA2, .options = &opt_lzma }, - { .id = LZMA_VLI_UNKNOWN, .options = NULL } - }; - - /* Compress */ - *compressed_size = output_capacity; - lzma_ret ret = lzma_stream_buffer_encode( - filters, - LZMA_CHECK_CRC64, - NULL, - input, - input_size, - output, - compressed_size, - output_capacity - ); - - if (ret != LZMA_OK) { - fprintf(stderr, "Error: LZMA compression failed (code: %d)\n", ret); - free(output); - return NULL; - } - - double ratio = 100.0 * (1.0 - (double)*compressed_size / input_size); - printf(" Compressed %zu → %zu bytes (%.1f%% reduction) using LZMA\n", - input_size, *compressed_size, ratio); - - return output; -} - -/* Parse ELF header to verify it's a valid binary */ -int verify_elf(const uint8_t* data, size_t size) { - if (size < sizeof(Elf64_Ehdr)) { - fprintf(stderr, "Error: File too small to be an ELF binary\n"); - return 0; - } - - const Elf64_Ehdr* ehdr = (const Elf64_Ehdr*)data; - - /* Check ELF magic */ - if (ehdr->e_ident[EI_MAG0] != ELFMAG0 || - ehdr->e_ident[EI_MAG1] != ELFMAG1 || - ehdr->e_ident[EI_MAG2] != ELFMAG2 || - ehdr->e_ident[EI_MAG3] != ELFMAG3) { - fprintf(stderr, "Error: Not a valid ELF binary\n"); - return 0; - } - - /* Display ELF info */ - printf("ELF Info:\n"); - printf(" Architecture: %s\n", - ehdr->e_ident[EI_CLASS] == ELFCLASS64 ? "64-bit" : "32-bit"); - printf(" Type: %s\n", - ehdr->e_type == ET_EXEC ? "Executable" : - ehdr->e_type == ET_DYN ? "Shared Object" : "Other"); - printf(" Machine: %s\n", - ehdr->e_machine == EM_X86_64 ? "x86_64" : - ehdr->e_machine == EM_AARCH64 ? "aarch64" : - ehdr->e_machine == EM_386 ? "i386" : "Other"); - - return 1; -} - -/* Main compression function */ -int compress_elf(const char* input_path, const char* output_path, - CompressionQuality quality) { - - printf("Socket ELF Compressor\n"); - printf("=====================\n"); - printf("Input: %s\n", input_path); - printf("Output: %s\n", output_path); - printf("Algorithm: %s\n\n", get_algorithm_name(quality)); - - /* Read input binary */ - printf("Reading input binary...\n"); - size_t input_size; - uint8_t* input_data = read_file(input_path, &input_size); - if (!input_data) { - return 1; - } - - printf(" Original size: %zu bytes (%.2f MB)\n\n", - input_size, input_size / 1024.0 / 1024.0); - - /* Verify ELF format */ - printf("Verifying ELF binary...\n"); - if (!verify_elf(input_data, input_size)) { - free(input_data); - return 1; - } - printf("\n"); - - /* Compress binary */ - printf("Compressing binary...\n"); - size_t compressed_size; - uint8_t* compressed_data = NULL; - - if (quality == QUALITY_LZMA) { - compressed_data = compress_lzma(input_data, input_size, &compressed_size); - } else { - fprintf(stderr, "Error: Only LZMA is currently supported\n"); - free(input_data); - return 1; - } - - if (!compressed_data) { - free(input_data); - return 1; - } - printf("\n"); - - /* Build output file */ - printf("Creating output binary...\n"); - - /* Create header */ - struct CompressedHeader header = { - .magic = MAGIC_SELF, - .algorithm = get_algorithm_id(quality), - .original_size = input_size, - .compressed_size = compressed_size - }; - - /* Allocate output buffer: header + compressed data */ - size_t output_size = sizeof(header) + compressed_size; - uint8_t* output = malloc(output_size); - if (!output) { - fprintf(stderr, "Error: Cannot allocate output buffer\n"); - free(input_data); - free(compressed_data); - return 1; - } - - /* Copy header and compressed data */ - memcpy(output, &header, sizeof(header)); - memcpy(output + sizeof(header), compressed_data, compressed_size); - - /* Write output file */ - if (!write_file(output_path, output, output_size)) { - free(input_data); - free(compressed_data); - free(output); - return 1; - } - - /* Calculate statistics */ - double total_ratio = 100.0 * (1.0 - (double)output_size / input_size); - printf(" Output size: %zu bytes (%.2f MB)\n", - output_size, output_size / 1024.0 / 1024.0); - printf(" Total savings: %.1f%%\n", total_ratio); - printf(" Saved: %.2f MB\n", - (input_size - output_size) / 1024.0 / 1024.0); - printf("\n"); - - printf("✅ Compression complete!\n"); - printf("\n"); - printf("Note: Use socket_elf_decompress to run the binary.\n"); - printf("Example: socket_elf_decompress %s [args...]\n", output_path); - - free(input_data); - free(compressed_data); - free(output); - return 0; -} - -int main(int argc, char* argv[]) { - if (argc < 3) { - fprintf(stderr, "Usage: %s input_binary output_binary [--quality=lzma]\n", - argv[0]); - fprintf(stderr, "\n"); - fprintf(stderr, "Compresses Linux ELF binaries using native liblzma.\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "Quality options:\n"); - fprintf(stderr, " lzma - Maximum compression (~75%%, default)\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "Example:\n"); - fprintf(stderr, " %s ./node ./node.compressed --quality=lzma\n", - argv[0]); - return 1; - } - - const char* input_path = argv[1]; - const char* output_path = argv[2]; - CompressionQuality quality = QUALITY_DEFAULT; - - /* Parse quality argument */ - if (argc >= 4) { - const char* quality_arg = argv[3]; - if (strncmp(quality_arg, "--quality=", 10) == 0) { - const char* quality_str = quality_arg + 10; - if (strcmp(quality_str, "lzma") == 0) { - quality = QUALITY_LZMA; - } else if (strcmp(quality_str, "zstd") == 0) { - quality = QUALITY_ZSTD; - } else if (strcmp(quality_str, "lz4") == 0) { - quality = QUALITY_LZ4; - } else { - fprintf(stderr, "Warning: Unknown quality '%s', using default (lzma)\n", - quality_str); - } - } - } - - return compress_elf(input_path, output_path, quality); -} diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_elf_decompress.c b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_elf_decompress.c deleted file mode 100644 index 1e4685c12..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_elf_decompress.c +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Socket ELF Decompressor - Runtime decompression and execution for Linux - * Decompresses binaries created by socket_elf_compress and executes them - * - * Usage: - * socket_elf_decompress compressed_binary [args...] - * - * This tool: - * 1. Reads the compressed binary - * 2. Decompresses it using liblzma - * 3. Executes the decompressed binary with original arguments - */ - -#include -#include -#include -#include -#include -#include -#include -#include - -/* Compressed binary header format (must match compressor) */ -struct CompressedHeader { - uint32_t magic; /* "SELF" = 0x53454C46 */ - uint32_t algorithm; /* Compression algorithm ID */ - uint64_t original_size; /* Decompressed size in bytes */ - uint64_t compressed_size;/* Compressed payload size in bytes */ -}; - -#define MAGIC_SELF 0x53454C46 /* "SELF" */ -#define ALGO_LZMA 1 -#define ALGO_ZSTD 2 -#define ALGO_LZ4 3 - -/* Read entire file into memory */ -uint8_t* read_file(const char* path, size_t* size) { - FILE* file = fopen(path, "rb"); - if (!file) { - fprintf(stderr, "Error: Cannot open file: %s\n", path); - return NULL; - } - - fseek(file, 0, SEEK_END); - *size = ftell(file); - fseek(file, 0, SEEK_SET); - - uint8_t* buffer = malloc(*size); - if (!buffer) { - fprintf(stderr, "Error: Cannot allocate %zu bytes\n", *size); - fclose(file); - return NULL; - } - - if (fread(buffer, 1, *size, file) != *size) { - fprintf(stderr, "Error: Cannot read file: %s\n", path); - free(buffer); - fclose(file); - return NULL; - } - - fclose(file); - return buffer; -} - -/* Decompress LZMA data */ -uint8_t* decompress_lzma(const uint8_t* input, size_t input_size, - size_t output_size) { - /* Allocate output buffer */ - uint8_t* output = malloc(output_size); - if (!output) { - fprintf(stderr, "Error: Cannot allocate %zu bytes for decompression\n", - output_size); - return NULL; - } - - /* Decompress */ - size_t in_pos = 0; - size_t out_pos = 0; - uint64_t memlimit = UINT64_MAX; - - lzma_ret ret = lzma_stream_buffer_decode( - &memlimit, - 0, - NULL, - input, - &in_pos, - input_size, - output, - &out_pos, - output_size - ); - - if (ret != LZMA_OK) { - fprintf(stderr, "Error: LZMA decompression failed (code: %d)\n", ret); - free(output); - return NULL; - } - - if (out_pos != output_size) { - fprintf(stderr, "Error: Size mismatch (expected %zu, got %zu)\n", - output_size, out_pos); - free(output); - return NULL; - } - - return output; -} - -/* Decompress and execute binary */ -int decompress_and_execute(const char* compressed_path, int argc, char* argv[]) { - printf("Socket ELF Decompressor\n"); - printf("=======================\n\n"); - - /* Read compressed binary */ - printf("Reading compressed binary: %s\n", compressed_path); - size_t file_size; - uint8_t* file_data = read_file(compressed_path, &file_size); - if (!file_data) { - return 1; - } - - /* Parse header */ - if (file_size < sizeof(struct CompressedHeader)) { - fprintf(stderr, "Error: File too small to contain header\n"); - free(file_data); - return 1; - } - - struct CompressedHeader* header = (struct CompressedHeader*)file_data; - - /* Validate magic */ - if (header->magic != MAGIC_SELF) { - fprintf(stderr, "Error: Invalid magic number (not a compressed Socket binary)\n"); - fprintf(stderr, "Expected: 0x%08x, Got: 0x%08x\n", MAGIC_SELF, header->magic); - free(file_data); - return 1; - } - - printf(" Compressed size: %lu bytes (%.2f MB)\n", - header->compressed_size, - header->compressed_size / 1024.0 / 1024.0); - printf(" Decompressed size: %lu bytes (%.2f MB)\n", - header->original_size, - header->original_size / 1024.0 / 1024.0); - printf(" Algorithm: %u\n", header->algorithm); - printf("\n"); - - /* Decompress */ - printf("Decompressing...\n"); - const uint8_t* compressed_payload = file_data + sizeof(struct CompressedHeader); - - uint8_t* decompressed = NULL; - if (header->algorithm == ALGO_LZMA) { - decompressed = decompress_lzma( - compressed_payload, - header->compressed_size, - header->original_size - ); - } else { - fprintf(stderr, "Error: Unsupported algorithm: %u\n", header->algorithm); - free(file_data); - return 1; - } - - if (!decompressed) { - free(file_data); - return 1; - } - - printf(" ✅ Decompressed successfully\n\n"); - - /* Write decompressed binary to temporary file */ - printf("Writing temporary executable...\n"); - - char temp_path[] = "/tmp/socket_decompress_XXXXXX"; - int temp_fd = mkstemp(temp_path); - if (temp_fd == -1) { - fprintf(stderr, "Error: Failed to create temporary file\n"); - free(file_data); - free(decompressed); - return 1; - } - - ssize_t written = write(temp_fd, decompressed, header->original_size); - close(temp_fd); - - if (written != (ssize_t)header->original_size) { - fprintf(stderr, "Error: Failed to write temporary file\n"); - unlink(temp_path); - free(file_data); - free(decompressed); - return 1; - } - - /* Make temporary file executable */ - chmod(temp_path, 0755); - - printf(" Temporary file: %s\n\n", temp_path); - - /* Execute decompressed binary */ - printf("Executing decompressed binary...\n"); - printf("─────────────────────────────────\n\n"); - - /* Build new argv with temp_path as argv[0] */ - char** new_argv = malloc((argc) * sizeof(char*)); - new_argv[0] = temp_path; - for (int i = 2; i < argc; i++) { - new_argv[i - 1] = argv[i]; - } - new_argv[argc - 1] = NULL; - - /* Execute */ - execv(temp_path, new_argv); - - /* If we get here, execv failed */ - fprintf(stderr, "Error: Failed to execute decompressed binary\n"); - unlink(temp_path); - free(file_data); - free(decompressed); - free(new_argv); - return 1; -} - -int main(int argc, char* argv[]) { - if (argc < 2) { - fprintf(stderr, "Usage: %s compressed_binary [args...]\n", argv[0]); - fprintf(stderr, "\n"); - fprintf(stderr, "Decompresses and executes a binary created by socket_elf_compress.\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "Example:\n"); - fprintf(stderr, " %s ./node.compressed --version\n", argv[0]); - return 1; - } - - return decompress_and_execute(argv[1], argc, argv); -} diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_compress b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_compress deleted file mode 100755 index 6702044bd..000000000 Binary files a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_compress and /dev/null differ diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_compress.cc b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_compress.cc deleted file mode 100644 index 3d45d4d9a..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_compress.cc +++ /dev/null @@ -1,492 +0,0 @@ -// -// Socket Mach-O Compressor - Binary compression using Apple's Compression framework. -// Compresses specific Mach-O sections while preserving code signature compatibility. -// -// Usage: -// socket_macho_compress input_binary output_binary [--quality=default|lzfse|lz4|lzma|zlib] -// -// Features: -// - Compresses __TEXT section (executable code) -// - Uses Apple's native compression framework -// - Preserves Mach-O structure for code signing -// - Creates self-extracting stub for runtime decompression -// - ~20-30% size reduction on top of stripping -// - -#include -#include -#include -#include -#include -#include -#include -#include - -#if defined(__APPLE__) -#include -#include -#include -#include -#include -#include -#endif - -namespace socket { -namespace macho { - -// Compression quality settings. -enum class CompressionQuality { - LZ4, // Fast decompression, lower ratio (~20-30%). - ZLIB, // Balanced, good compatibility (~30-40%). - LZFSE, // Apple's algorithm, best for binaries (~35-45%). - LZMA, // Maximum compression, slower (~40-50%). - DEFAULT = LZFSE, -}; - -#if defined(__APPLE__) - -// Convert quality enum to compression_algorithm. -compression_algorithm GetCompressionAlgorithm(CompressionQuality quality) { - switch (quality) { - case CompressionQuality::LZ4: - return COMPRESSION_LZ4; - case CompressionQuality::ZLIB: - return COMPRESSION_ZLIB; - case CompressionQuality::LZFSE: - return COMPRESSION_LZFSE; - case CompressionQuality::LZMA: - return COMPRESSION_LZMA; - default: - return COMPRESSION_LZFSE; - } -} - -// Get algorithm name for logging. -const char* GetAlgorithmName(CompressionQuality quality) { - switch (quality) { - case CompressionQuality::LZ4: - return "LZ4"; - case CompressionQuality::ZLIB: - return "ZLIB"; - case CompressionQuality::LZFSE: - return "LZFSE"; - case CompressionQuality::LZMA: - return "LZMA"; - default: - return "LZFSE"; - } -} - -// Read entire file into memory. -std::vector ReadFile(const std::string& path) { - std::ifstream file(path, std::ios::binary | std::ios::ate); - if (!file) { - fprintf(stderr, "Error: Cannot open file: %s\n", path.c_str()); - return {}; - } - - std::streamsize size = file.tellg(); - file.seekg(0, std::ios::beg); - - std::vector buffer(size); - if (!file.read(reinterpret_cast(buffer.data()), size)) { - fprintf(stderr, "Error: Cannot read file: %s\n", path.c_str()); - return {}; - } - - return buffer; -} - -// Write buffer to file. -bool WriteFile(const std::string& path, const std::vector& data) { - std::ofstream file(path, std::ios::binary); - if (!file) { - fprintf(stderr, "Error: Cannot create file: %s\n", path.c_str()); - return false; - } - - if (!file.write(reinterpret_cast(data.data()), data.size())) { - fprintf(stderr, "Error: Cannot write file: %s\n", path.c_str()); - return false; - } - - // Set executable permissions. - chmod(path.c_str(), 0755); - return true; -} - -// Compress data using Apple's Compression framework. -std::vector CompressData( - const uint8_t* input, - size_t input_size, - CompressionQuality quality) { - - compression_algorithm algorithm = GetCompressionAlgorithm(quality); - - // Allocate worst-case size for compressed output. - size_t max_compressed_size = input_size + 4096; - std::vector compressed(max_compressed_size); - - size_t compressed_size = compression_encode_buffer( - compressed.data(), - compressed.capacity(), - input, - input_size, - nullptr, - algorithm); - - if (compressed_size == 0) { - fprintf(stderr, "Error: Compression failed\n"); - return {}; - } - - compressed.resize(compressed_size); - - double ratio = 100.0 * (1.0 - static_cast(compressed_size) / input_size); - printf(" Compressed %zu → %zu bytes (%.1f%% reduction) using %s\n", - input_size, compressed_size, ratio, GetAlgorithmName(quality)); - - return compressed; -} - -// Mach-O header structure for 64-bit binaries. -struct MachOInfo { - bool is_64bit = false; - bool is_big_endian = false; - size_t header_size = 0; - uint32_t ncmds = 0; - std::vector header_data; -}; - -// Parse Mach-O header. -MachOInfo ParseMachOHeader(const std::vector& binary) { - MachOInfo info; - - if (binary.size() < sizeof(mach_header_64)) { - fprintf(stderr, "Error: Binary too small to be valid Mach-O\n"); - return info; - } - - const uint32_t magic = *reinterpret_cast(binary.data()); - - if (magic == MH_MAGIC_64 || magic == MH_CIGAM_64) { - info.is_64bit = true; - info.is_big_endian = (magic == MH_CIGAM_64); - const auto* header = reinterpret_cast(binary.data()); - info.header_size = sizeof(mach_header_64); - info.ncmds = header->ncmds; - } else if (magic == MH_MAGIC || magic == MH_CIGAM) { - info.is_64bit = false; - info.is_big_endian = (magic == MH_CIGAM); - const auto* header = reinterpret_cast(binary.data()); - info.header_size = sizeof(mach_header); - info.ncmds = header->ncmds; - } else { - fprintf(stderr, "Error: Not a valid Mach-O binary (magic: 0x%08x)\n", magic); - return info; - } - - // Copy header for modification. - info.header_data.assign(binary.begin(), binary.begin() + info.header_size); - - printf("Mach-O Info:\n"); - printf(" Architecture: %s\n", info.is_64bit ? "64-bit" : "32-bit"); - printf(" Load commands: %u\n", info.ncmds); - - return info; -} - -// Find __TEXT segment in Mach-O binary. -struct SegmentInfo { - bool found = false; - size_t file_offset = 0; - size_t file_size = 0; - size_t vm_size = 0; -}; - -SegmentInfo FindTextSegment(const std::vector& binary, const MachOInfo& info) { - SegmentInfo segment; - - size_t offset = info.header_size; - - for (uint32_t i = 0; i < info.ncmds && offset < binary.size(); ++i) { - const auto* cmd = reinterpret_cast(binary.data() + offset); - - if (cmd->cmd == LC_SEGMENT_64 && info.is_64bit) { - const auto* seg_cmd = reinterpret_cast(cmd); - - if (strncmp(seg_cmd->segname, "__TEXT", 16) == 0) { - segment.found = true; - segment.file_offset = seg_cmd->fileoff; - segment.file_size = seg_cmd->filesize; - segment.vm_size = seg_cmd->vmsize; - - printf("Found __TEXT segment:\n"); - printf(" File offset: 0x%zx\n", segment.file_offset); - printf(" File size: %zu bytes (%.2f MB)\n", - segment.file_size, - segment.file_size / 1024.0 / 1024.0); - break; - } - } else if (cmd->cmd == LC_SEGMENT && !info.is_64bit) { - const auto* seg_cmd = reinterpret_cast(cmd); - - if (strncmp(seg_cmd->segname, "__TEXT", 16) == 0) { - segment.found = true; - segment.file_offset = seg_cmd->fileoff; - segment.file_size = seg_cmd->filesize; - segment.vm_size = seg_cmd->vmsize; - - printf("Found __TEXT segment:\n"); - printf(" File offset: 0x%zx\n", segment.file_offset); - printf(" File size: %zu bytes (%.2f MB)\n", - segment.file_size, - segment.file_size / 1024.0 / 1024.0); - break; - } - } - - offset += cmd->cmdsize; - } - - if (!segment.found) { - fprintf(stderr, "Warning: __TEXT segment not found\n"); - } - - return segment; -} - -// Decompression stub that will be prepended to compressed binary. -// This code runs first, decompresses the main binary to memory, and executes it. -const char* kDecompressionStubSource = R"STUB( -#include -#include -#include -#include -#include -#include -#include - -// Compressed binary data embedded after this stub. -extern const unsigned char compressed_binary[]; -extern const unsigned long compressed_size; -extern const unsigned long decompressed_size; -extern const int compression_algorithm; - -// Decompression stub entry point. -// This runs before main(), decompresses the embedded binary, and executes it. -__attribute__((constructor)) -static void decompress_and_execute() { - // Allocate memory for decompressed binary. - void* decompressed = mmap( - NULL, - decompressed_size, - PROT_READ | PROT_WRITE | PROT_EXEC, - MAP_PRIVATE | MAP_ANONYMOUS, - -1, - 0); - - if (decompressed == MAP_FAILED) { - fprintf(stderr, "Error: Failed to allocate memory for decompression\n"); - exit(1); - } - - // Decompress embedded binary. - size_t result = compression_decode_buffer( - (uint8_t*)decompressed, - decompressed_size, - compressed_binary, - compressed_size, - NULL, - (compression_algorithm)compression_algorithm); - - if (result != decompressed_size) { - fprintf(stderr, "Error: Decompression failed (expected %lu, got %zu)\n", - decompressed_size, result); - exit(1); - } - - // Make decompressed memory executable. - if (mprotect(decompressed, decompressed_size, PROT_READ | PROT_EXEC) != 0) { - fprintf(stderr, "Error: Failed to set executable permissions\n"); - exit(1); - } - - // Execute decompressed binary via function pointer. - // This effectively transfers control to the decompressed main(). - typedef int (*main_func_t)(int argc, char** argv, char** envp); - main_func_t main_func = (main_func_t)decompressed; - - // Get original argc/argv from dyld. - int argc = *_NSGetArgc(); - char** argv = *_NSGetArgv(); - char** envp = *_NSGetEnviron(); - - // Execute decompressed binary's main(). - int exit_code = main_func(argc, argv, envp); - - // Cleanup. - munmap(decompressed, decompressed_size); - exit(exit_code); -} - -int main(int argc, char** argv) { - // This should never execute because constructor runs first. - fprintf(stderr, "Error: Decompression stub failed\n"); - return 1; -} -)STUB"; - -// Main compression function. -bool CompressMachO( - const std::string& input_path, - const std::string& output_path, - CompressionQuality quality) { - - printf("Socket Mach-O Compressor\n"); - printf("========================\n"); - printf("Input: %s\n", input_path.c_str()); - printf("Output: %s\n", output_path.c_str()); - printf("Algorithm: %s\n\n", GetAlgorithmName(quality)); - - // Read input binary. - printf("Reading input binary...\n"); - std::vector binary = ReadFile(input_path); - if (binary.empty()) { - return false; - } - - size_t original_size = binary.size(); - printf(" Original size: %zu bytes (%.2f MB)\n\n", - original_size, original_size / 1024.0 / 1024.0); - - // Parse Mach-O header. - printf("Parsing Mach-O structure...\n"); - MachOInfo info = ParseMachOHeader(binary); - if (info.header_size == 0) { - return false; - } - printf("\n"); - - // Find __TEXT segment. - printf("Locating __TEXT segment...\n"); - SegmentInfo text_segment = FindTextSegment(binary, info); - if (!text_segment.found) { - return false; - } - printf("\n"); - - // Compress the entire binary (simpler approach). - // A more sophisticated version would compress only __TEXT segment. - printf("Compressing binary...\n"); - std::vector compressed = CompressData( - binary.data(), - binary.size(), - quality); - - if (compressed.empty()) { - return false; - } - - size_t compressed_size = compressed.size(); - printf("\n"); - - // For now, just write the compressed data with a simple header. - // A full implementation would create a decompression stub. - printf("Creating output binary...\n"); - - // Header: magic + algorithm + original_size + compressed_size. - struct CompressedHeader { - uint32_t magic; // "SCMP" = Socket Compressed. - uint32_t algorithm; - uint64_t original_size; - uint64_t compressed_size; - } header; - - header.magic = 0x504D4353; // "SCMP". - header.algorithm = static_cast(GetCompressionAlgorithm(quality)); - header.original_size = original_size; - header.compressed_size = compressed_size; - - // Build output: header + compressed data. - std::vector output; - output.reserve(sizeof(header) + compressed_size); - - // Write header. - const uint8_t* header_bytes = reinterpret_cast(&header); - output.insert(output.end(), header_bytes, header_bytes + sizeof(header)); - - // Write compressed data. - output.insert(output.end(), compressed.begin(), compressed.end()); - - // Write output file. - if (!WriteFile(output_path, output)) { - return false; - } - - size_t final_size = output.size(); - double total_ratio = 100.0 * (1.0 - static_cast(final_size) / original_size); - - printf(" Output size: %zu bytes (%.2f MB)\n", - final_size, final_size / 1024.0 / 1024.0); - printf(" Total savings: %.1f%%\n", total_ratio); - printf(" Saved: %.2f MB\n", - (original_size - final_size) / 1024.0 / 1024.0); - printf("\n"); - - printf("✅ Compression complete!\n"); - printf("\nNote: This is a proof-of-concept.\n"); - printf("The output requires a decompression stub to execute.\n"); - printf("Use the companion decompressor tool to run the binary.\n"); - - return true; -} - -#else // !defined(__APPLE__) - -bool CompressMachO( - const std::string& input_path, - const std::string& output_path, - CompressionQuality quality) { - fprintf(stderr, "Error: This tool only works on macOS\n"); - return false; -} - -#endif // defined(__APPLE__) - -} // namespace macho -} // namespace socket - -int main(int argc, char* argv[]) { - if (argc < 3) { - fprintf(stderr, "Usage: %s input_binary output_binary [--quality=lzfse|lz4|lzma|zlib]\n", argv[0]); - return 1; - } - - std::string input_path = argv[1]; - std::string output_path = argv[2]; - socket::macho::CompressionQuality quality = socket::macho::CompressionQuality::DEFAULT; - - // Parse optional quality argument. - if (argc >= 4) { - std::string quality_arg = argv[3]; - if (quality_arg.find("--quality=") == 0) { - std::string quality_str = quality_arg.substr(10); - if (quality_str == "lz4") { - quality = socket::macho::CompressionQuality::LZ4; - } else if (quality_str == "zlib") { - quality = socket::macho::CompressionQuality::ZLIB; - } else if (quality_str == "lzfse") { - quality = socket::macho::CompressionQuality::LZFSE; - } else if (quality_str == "lzma") { - quality = socket::macho::CompressionQuality::LZMA; - } else { - fprintf(stderr, "Warning: Unknown quality '%s', using default (lzfse)\n", - quality_str.c_str()); - } - } - } - - bool success = socket::macho::CompressMachO(input_path, output_path, quality); - return success ? 0 : 1; -} diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_decompress b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_decompress deleted file mode 100755 index e65abfdeb..000000000 Binary files a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_decompress and /dev/null differ diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_decompress.cc b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_decompress.cc deleted file mode 100644 index 8b4062ccc..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_macho_decompress.cc +++ /dev/null @@ -1,552 +0,0 @@ -// -// -// Socket Mach-O Decompressor - Runtime decompression with caching. -// Decompresses binaries created by socket_macho_compress and executes them. -// -// Caching Strategy (follows npm/npx/socket-lib pattern): -// Reference: https://github.com/npm/cli/blob/v11.6.2/workspaces/libnpmexec/lib/index.js#L233-L244 -// Reference: @socketsecurity/lib/src/dlx.ts generateCacheKey() -// -// - Cache key (directory name): First 16 chars of SHA-512 hash of compressed file -// (matches npm/npx: SHA-512 truncated to 16 chars for shorter paths) -// (matches socket-lib: generateCacheKey() uses sha512().substring(0,16)) -// - Content verification: Full SHA-512 of decompressed binary -// (npm uses SHA-512 for content hashes via cacache put.js algorithms: ['sha512']) -// - First run: Decompress to ~/.socket/_dlx//node -// - Subsequent runs: Execute cached binary directly (zero overhead) -// -// Usage: -// socket_macho_decompress compressed_binary [args...] -// - -#include -#include -#include -#include -#include -#include -#include -#include - -#if defined(__APPLE__) -#include -#include -#include -#include -#include -#include -#include -#include -#endif - -namespace socket { -namespace macho { - -#if defined(__APPLE__) - -// Compressed binary header format. -struct CompressedHeader { - uint32_t magic; // "SCMP" = 0x504D4353. - uint32_t algorithm; - uint64_t original_size; - uint64_t compressed_size; -}; - -// Get home directory. -std::string GetHomeDirectory() { - const char* home = getenv("HOME"); - if (home) { - return std::string(home); - } - - struct passwd* pw = getpwuid(getuid()); - if (pw && pw->pw_dir) { - return std::string(pw->pw_dir); - } - - return ""; -} - -// Calculate first 16 chars of SHA-512 hash (for cache keys, matching socket-lib). -// This matches npm/npx behavior and socket-lib's generateCacheKey() function. -// Reference: @socketsecurity/lib/src/dlx.ts line 36 -// Implementation: createHash('sha512').update(spec).digest('hex').substring(0, 16) -std::string CalculateCacheKey(const std::vector& data) { - unsigned char hash[CC_SHA512_DIGEST_LENGTH]; - CC_SHA512(data.data(), static_cast(data.size()), hash); - - std::ostringstream ss; - ss << std::hex << std::setfill('0'); - // Only output first 8 bytes (16 hex chars) to match socket-lib. - for (int i = 0; i < 8; ++i) { - ss << std::setw(2) << static_cast(hash[i]); - } - - return ss.str(); -} - -// Calculate SHA-512 of data (for content verification, like npm/cacache). -std::string CalculateSHA512(const std::vector& data) { - unsigned char hash[CC_SHA512_DIGEST_LENGTH]; - CC_SHA512(data.data(), static_cast(data.size()), hash); - - std::ostringstream ss; - ss << std::hex << std::setfill('0'); - for (int i = 0; i < CC_SHA512_DIGEST_LENGTH; ++i) { - ss << std::setw(2) << static_cast(hash[i]); - } - - return ss.str(); -} - -// Calculate first 16 chars of SHA-512 hash of a file (for cache keys, matching socket-lib). -std::string CalculateFileCacheKey(const std::string& path) { - std::ifstream file(path, std::ios::binary); - if (!file) { - return ""; - } - - CC_SHA512_CTX ctx; - CC_SHA512_Init(&ctx); - - char buffer[8192]; - while (file.read(buffer, sizeof(buffer))) { - CC_SHA512_Update(&ctx, buffer, file.gcount()); - } - if (file.gcount() > 0) { - CC_SHA512_Update(&ctx, buffer, file.gcount()); - } - - unsigned char hash[CC_SHA512_DIGEST_LENGTH]; - CC_SHA512_Final(hash, &ctx); - - std::ostringstream ss; - ss << std::hex << std::setfill('0'); - // Only output first 8 bytes (16 hex chars) to match socket-lib. - for (int i = 0; i < 8; ++i) { - ss << std::setw(2) << static_cast(hash[i]); - } - - return ss.str(); -} - -// Calculate SHA-512 of a file (for content verification, like npm/cacache). -std::string CalculateFileSHA512(const std::string& path) { - std::ifstream file(path, std::ios::binary); - if (!file) { - return ""; - } - - CC_SHA512_CTX ctx; - CC_SHA512_Init(&ctx); - - char buffer[8192]; - while (file.read(buffer, sizeof(buffer))) { - CC_SHA512_Update(&ctx, buffer, file.gcount()); - } - if (file.gcount() > 0) { - CC_SHA512_Update(&ctx, buffer, file.gcount()); - } - - unsigned char hash[CC_SHA512_DIGEST_LENGTH]; - CC_SHA512_Final(hash, &ctx); - - std::ostringstream ss; - ss << std::hex << std::setfill('0'); - for (int i = 0; i < CC_SHA512_DIGEST_LENGTH; ++i) { - ss << std::setw(2) << static_cast(hash[i]); - } - - return ss.str(); -} - -// Create directory recursively. -bool CreateDirectory(const std::string& path) { - std::string current; - for (size_t i = 0; i < path.size(); ++i) { - if (path[i] == '/' || i == path.size() - 1) { - if (i == path.size() - 1 && path[i] != '/') { - current += path[i]; - } - - if (!current.empty() && current != "/") { - struct stat st; - if (stat(current.c_str(), &st) != 0) { - if (mkdir(current.c_str(), 0755) != 0) { - return false; - } - } - } - - if (i < path.size() - 1) { - current += path[i]; - } - } else { - current += path[i]; - } - } - - return true; -} - -// Check if file exists. -bool FileExists(const std::string& path) { - struct stat st; - return stat(path.c_str(), &st) == 0 && S_ISREG(st.st_mode); -} - -// Read file into memory. -std::vector ReadFile(const std::string& path) { - std::ifstream file(path, std::ios::binary | std::ios::ate); - if (!file) { - fprintf(stderr, "Error: Cannot open file: %s\n", path.c_str()); - return {}; - } - - std::streamsize size = file.tellg(); - file.seekg(0, std::ios::beg); - - std::vector buffer(size); - if (!file.read(reinterpret_cast(buffer.data()), size)) { - fprintf(stderr, "Error: Cannot read file: %s\n", path.c_str()); - return {}; - } - - return buffer; -} - -// Write data to file. -bool WriteFile(const std::string& path, const void* data, size_t size) { - std::ofstream file(path, std::ios::binary); - if (!file) { - return false; - } - - file.write(static_cast(data), size); - return file.good(); -} - -// Extract spec string from self-extracting binary if embedded. -// Format: "SOCKET_SPEC:package@version\n" appears after the decompressor stub. -std::string ExtractEmbeddedSpec(const std::string& binary_path) { - std::ifstream file(binary_path, std::ios::binary); - if (!file) { - return ""; - } - - // Search for "SOCKET_SPEC:" marker in the binary. - std::string marker = "SOCKET_SPEC:"; - std::string buffer; - buffer.resize(4096); - - while (file.read(&buffer[0], buffer.size())) { - size_t pos = buffer.find(marker); - if (pos != std::string::npos) { - // Found marker, read until newline. - size_t start = pos + marker.length(); - size_t end = buffer.find('\n', start); - if (end != std::string::npos) { - return buffer.substr(start, end - start); - } - } - } - - return ""; -} - -// Decompress and execute binary. -int DecompressAndExecute(const std::string& compressed_path, int argc, char* argv[]) { - printf("Socket Mach-O Decompressor\n"); - printf("==========================\n\n"); - - // Try to extract embedded spec string (for socket-lib cache key). - std::string spec = ExtractEmbeddedSpec(compressed_path); - std::string cacheKey; - - if (!spec.empty()) { - printf("Found embedded spec: %s\n", spec.c_str()); - printf("Calculating cache key from spec (SHA-512 truncated to 16 chars)...\n"); - std::vector specBytes(spec.begin(), spec.end()); - cacheKey = CalculateCacheKey(specBytes); - printf(" Cache key: %s\n\n", cacheKey.c_str()); - } else { - // Fallback: use compressed file hash for cache key. - printf("No embedded spec found, using file hash for cache key\n"); - printf("Reading compressed binary: %s\n", compressed_path.c_str()); - std::vector compressed_data = ReadFile(compressed_path); - if (compressed_data.empty()) { - return 1; - } - - printf("Calculating cache key (SHA-512 truncated to 16 chars)...\n"); - cacheKey = CalculateCacheKey(compressed_data); - printf(" Cache key: %s\n\n", cacheKey.c_str()); - } - - // Read compressed binary for decompression. - printf("Reading compressed binary: %s\n", compressed_path.c_str()); - std::vector compressed_data = ReadFile(compressed_path); - if (compressed_data.empty()) { - return 1; - } - - // Build cache path. - std::string home = GetHomeDirectory(); - if (home.empty()) { - fprintf(stderr, "Error: Cannot determine home directory\n"); - return 1; - } - - std::string cache_dir = home + "/.socket/_dlx/" + cacheKey; - std::string cached_binary = cache_dir + "/node"; - std::string metadata_file = cache_dir + "/.dlx-metadata.json"; - - // Check if cached binary exists. - if (FileExists(cached_binary)) { - printf("Cache hit! Verifying cached binary...\n"); - printf(" Location: %s\n", cached_binary.c_str()); - - // Verify cached binary integrity using SHA-512 (like npm/cacache). - std::string cached_sha512 = CalculateFileSHA512(cached_binary); - if (cached_sha512.empty()) { - fprintf(stderr, "Warning: Cannot verify cached binary, re-decompressing\n"); - } else { - // Read expected checksum from metadata if it exists. - std::ifstream meta(metadata_file); - if (meta) { - std::string line; - bool verified = false; - while (std::getline(meta, line)) { - if (line.find("\"checksum\"") != std::string::npos) { - size_t start = line.find(": \""); - if (start != std::string::npos) { - start += 3; - size_t end = line.find("\"", start); - if (end != std::string::npos) { - std::string expected = line.substr(start, end - start); - if (expected == cached_sha512) { - verified = true; - printf(" ✓ Integrity verified (SHA-512 match)\n\n"); - break; - } - } - } - } - } - - if (!verified) { - printf(" ✓ Binary exists (integrity check skipped)\n\n"); - } - } else { - printf(" ✓ Binary exists (no metadata to verify)\n\n"); - } - - // Execute cached binary directly. - printf("Executing cached binary (zero decompression overhead)...\n"); - printf("─────────────────────────────────────────────────────────\n\n"); - - // Build argv with cached_binary as argv[0]. - std::vector new_argv; - new_argv.push_back(const_cast(cached_binary.c_str())); - for (int i = 2; i < argc; ++i) { - new_argv.push_back(argv[i]); - } - new_argv.push_back(nullptr); - - // Execute. - execv(cached_binary.c_str(), new_argv.data()); - - // If we get here, execv failed. - fprintf(stderr, "Error: Failed to execute cached binary\n"); - return 1; - } - } - - // Cache miss or verification failed - decompress. - printf("Cache miss. Decompressing to cache...\n"); - - // Parse header. - if (compressed_data.size() < sizeof(CompressedHeader)) { - fprintf(stderr, "Error: File too small to contain header\n"); - return 1; - } - - const CompressedHeader* header = - reinterpret_cast(compressed_data.data()); - - // Validate magic. - if (header->magic != 0x504D4353) { - fprintf(stderr, "Error: Invalid magic number (not a compressed Socket binary)\n"); - fprintf(stderr, "Expected: 0x504D4353, Got: 0x%08x\n", header->magic); - return 1; - } - - printf(" Compressed size: %llu bytes (%.2f MB)\n", - header->compressed_size, - header->compressed_size / 1024.0 / 1024.0); - printf(" Decompressed size: %llu bytes (%.2f MB)\n", - header->original_size, - header->original_size / 1024.0 / 1024.0); - printf(" Algorithm: %u\n\n", header->algorithm); - - // Allocate memory for decompressed binary. - printf("Allocating memory...\n"); - void* decompressed = mmap( - nullptr, - header->original_size, - PROT_READ | PROT_WRITE, - MAP_PRIVATE | MAP_ANONYMOUS, - -1, - 0); - - if (decompressed == MAP_FAILED) { - fprintf(stderr, "Error: Failed to allocate %llu bytes\n", header->original_size); - return 1; - } - - // Decompress. - printf("Decompressing...\n"); - const uint8_t* compressed_payload = compressed_data.data() + sizeof(CompressedHeader); - - size_t result = compression_decode_buffer( - static_cast(decompressed), - header->original_size, - compressed_payload, - header->compressed_size, - nullptr, - static_cast(header->algorithm)); - - if (result == 0) { - fprintf(stderr, "Error: Decompression failed\n"); - munmap(decompressed, header->original_size); - return 1; - } - - if (result != header->original_size) { - fprintf(stderr, "Error: Size mismatch (expected %llu, got %zu)\n", - header->original_size, result); - munmap(decompressed, header->original_size); - return 1; - } - - printf(" ✓ Decompressed successfully\n\n"); - - // Calculate checksum of decompressed binary using SHA-512 (like npm/cacache). - printf("Calculating checksum (SHA-512)...\n"); - std::vector decompressed_vec( - static_cast(decompressed), - static_cast(decompressed) + header->original_size); - std::string decompressed_sha512 = CalculateSHA512(decompressed_vec); - printf(" Checksum: %s\n\n", decompressed_sha512.c_str()); - - // Create cache directory. - printf("Creating cache directory...\n"); - if (!CreateDirectory(cache_dir)) { - fprintf(stderr, "Error: Failed to create cache directory: %s\n", cache_dir.c_str()); - munmap(decompressed, header->original_size); - return 1; - } - printf(" Location: %s\n\n", cache_dir.c_str()); - - // Write decompressed binary to cache. - printf("Writing to cache...\n"); - if (!WriteFile(cached_binary, decompressed, header->original_size)) { - fprintf(stderr, "Error: Failed to write cached binary\n"); - munmap(decompressed, header->original_size); - return 1; - } - - // Make cached binary executable. - chmod(cached_binary.c_str(), 0755); - - printf(" ✓ Cached binary: %s\n\n", cached_binary.c_str()); - - // Write metadata (unified schema with TypeScript dlxBinary). - // Canonical documentation: @socketsecurity/lib/src/dlx-binary.ts (DlxMetadata interface) - // Also documented in: packages/cli/src/utils/dlx/binary.mts - // Core fields: version, cache_key, timestamp, checksum, checksum_algorithm, platform, arch, size, source - // Extra fields: compressed_size, compression_algorithm, compression_ratio (C++ decompression specific) - std::ostringstream metadata; - metadata << "{\n"; - metadata << " \"version\": \"1.0.0\",\n"; - metadata << " \"cache_key\": \"" << cacheKey << "\",\n"; - metadata << " \"timestamp\": " << (time(nullptr) * 1000LL) << ",\n"; // Milliseconds for JS compat. - metadata << " \"checksum\": \"" << decompressed_sha512 << "\",\n"; - metadata << " \"checksum_algorithm\": \"sha512\",\n"; -#if defined(__APPLE__) - metadata << " \"platform\": \"darwin\",\n"; -#elif defined(__linux__) - metadata << " \"platform\": \"linux\",\n"; -#elif defined(_WIN32) - metadata << " \"platform\": \"win32\",\n"; -#else - metadata << " \"platform\": \"unknown\",\n"; -#endif -#if defined(__x86_64__) || defined(_M_X64) - metadata << " \"arch\": \"x64\",\n"; -#elif defined(__aarch64__) || defined(_M_ARM64) - metadata << " \"arch\": \"arm64\",\n"; -#else - metadata << " \"arch\": \"unknown\",\n"; -#endif - metadata << " \"size\": " << header->original_size << ",\n"; - metadata << " \"source\": {\n"; - metadata << " \"type\": \"decompression\",\n"; - metadata << " \"path\": \"" << compressed_path << "\"\n"; - metadata << " },\n"; - metadata << " \"extra\": {\n"; - metadata << " \"compressed_size\": " << header->compressed_size << ",\n"; - metadata << " \"compression_algorithm\": " << header->algorithm << ",\n"; - metadata << " \"compression_ratio\": " << (double)header->original_size / header->compressed_size << "\n"; - metadata << " }\n"; - metadata << "}\n"; - - WriteFile(metadata_file, metadata.str().c_str(), metadata.str().size()); - - // Execute cached binary. - printf("Executing decompressed binary...\n"); - printf("─────────────────────────────────\n\n"); - - // Build argv with cached_binary as argv[0]. - std::vector new_argv; - new_argv.push_back(const_cast(cached_binary.c_str())); - for (int i = 2; i < argc; ++i) { - new_argv.push_back(argv[i]); - } - new_argv.push_back(nullptr); - - // Execute. - execv(cached_binary.c_str(), new_argv.data()); - - // If we get here, execv failed. - fprintf(stderr, "Error: Failed to execute decompressed binary\n"); - munmap(decompressed, header->original_size); - return 1; -} - -#else // !defined(__APPLE__) - -int DecompressAndExecute(const std::string& compressed_path, int argc, char* argv[]) { - fprintf(stderr, "Error: This tool only works on macOS\n"); - return 1; -} - -#endif // defined(__APPLE__) - -} // namespace macho -} // namespace socket - -int main(int argc, char* argv[]) { - if (argc < 2) { - fprintf(stderr, "Usage: %s compressed_binary [args...]\n", argv[0]); - fprintf(stderr, "\n"); - fprintf(stderr, "Decompresses and executes a binary created by socket_macho_compress.\n"); - fprintf(stderr, "Uses ~/.socket/_dlx/ for caching (zero overhead on subsequent runs).\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "Example:\n"); - fprintf(stderr, " %s ./node.compressed --version\n", argv[0]); - return 1; - } - - return socket::macho::DecompressAndExecute(argv[1], argc, argv); -} diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_pe_compress.c b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_pe_compress.c deleted file mode 100644 index 06014b860..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_pe_compress.c +++ /dev/null @@ -1,364 +0,0 @@ -/* - * Socket PE Compressor - Binary compression for Windows using native API - * Compresses PE binaries while maintaining functionality and avoiding AV flags - * - * Usage: - * socket_pe_compress.exe input_binary output_binary [--quality=lzms|xpress] - * - * Features: - * - Uses Windows Compression API (no AV flags) - * - ~70-73% compression with LZMS - * - Creates self-contained compressed binary - * - Compatible with Windows 8+ - */ - -#ifdef _WIN32 - -#include -#include -#include -#include -#include -#include - -/* Compression quality settings */ -typedef enum { - QUALITY_XPRESS, /* Fast decompression (~60%) */ - QUALITY_XPRESS_HUFF, /* Balanced (~65%) */ - QUALITY_LZMS, /* Maximum compression (~70-73%) */ - QUALITY_DEFAULT = QUALITY_LZMS -} CompressionQuality; - -/* Compressed binary header format */ -struct CompressedHeader { - uint32_t magic; /* "SEPE" = Socket PE = 0x53455045 */ - uint32_t algorithm; /* Compression algorithm ID */ - uint64_t original_size; /* Decompressed size in bytes */ - uint64_t compressed_size;/* Compressed payload size in bytes */ -}; - -#define MAGIC_SEPE 0x53455045 /* "SEPE" */ - -/* Get algorithm name for display */ -const char* get_algorithm_name(CompressionQuality quality) { - switch (quality) { - case QUALITY_XPRESS: return "XPRESS"; - case QUALITY_XPRESS_HUFF: return "XPRESS_HUFF"; - case QUALITY_LZMS: return "LZMS"; - default: return "LZMS"; - } -} - -/* Get Windows compression algorithm */ -DWORD get_windows_algorithm(CompressionQuality quality) { - switch (quality) { - case QUALITY_XPRESS: return COMPRESS_ALGORITHM_XPRESS; - case QUALITY_XPRESS_HUFF: return COMPRESS_ALGORITHM_XPRESS_HUFF; - case QUALITY_LZMS: return COMPRESS_ALGORITHM_LZMS; - default: return COMPRESS_ALGORITHM_LZMS; - } -} - -/* Read entire file into memory */ -BYTE* read_file(const char* path, SIZE_T* size) { - HANDLE file = CreateFileA(path, GENERIC_READ, FILE_SHARE_READ, NULL, - OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL); - if (file == INVALID_HANDLE_VALUE) { - fprintf(stderr, "Error: Cannot open file: %s (error: %lu)\n", - path, GetLastError()); - return NULL; - } - - LARGE_INTEGER file_size; - if (!GetFileSizeEx(file, &file_size)) { - fprintf(stderr, "Error: Cannot get file size (error: %lu)\n", - GetLastError()); - CloseHandle(file); - return NULL; - } - - *size = (SIZE_T)file_size.QuadPart; - - BYTE* buffer = (BYTE*)malloc(*size); - if (!buffer) { - fprintf(stderr, "Error: Cannot allocate %zu bytes\n", *size); - CloseHandle(file); - return NULL; - } - - DWORD bytes_read; - if (!ReadFile(file, buffer, (DWORD)*size, &bytes_read, NULL) || - bytes_read != *size) { - fprintf(stderr, "Error: Cannot read file (error: %lu)\n", - GetLastError()); - free(buffer); - CloseHandle(file); - return NULL; - } - - CloseHandle(file); - return buffer; -} - -/* Write buffer to file */ -BOOL write_file(const char* path, const BYTE* data, SIZE_T size) { - HANDLE file = CreateFileA(path, GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, - FILE_ATTRIBUTE_NORMAL, NULL); - if (file == INVALID_HANDLE_VALUE) { - fprintf(stderr, "Error: Cannot create file: %s (error: %lu)\n", - path, GetLastError()); - return FALSE; - } - - DWORD bytes_written; - if (!WriteFile(file, data, (DWORD)size, &bytes_written, NULL) || - bytes_written != size) { - fprintf(stderr, "Error: Cannot write file (error: %lu)\n", - GetLastError()); - CloseHandle(file); - return FALSE; - } - - CloseHandle(file); - return TRUE; -} - -/* Compress data using Windows Compression API */ -BYTE* compress_data(const BYTE* input, SIZE_T input_size, - SIZE_T* compressed_size, CompressionQuality quality) { - DWORD algorithm = get_windows_algorithm(quality); - - /* Create compressor */ - COMPRESSOR_HANDLE compressor = NULL; - if (!CreateCompressor(algorithm, NULL, &compressor)) { - fprintf(stderr, "Error: Cannot create compressor (error: %lu)\n", - GetLastError()); - return NULL; - } - - /* Query compressed buffer size */ - SIZE_T compressed_buffer_size; - if (!Compress(compressor, input, input_size, NULL, 0, - &compressed_buffer_size)) { - if (GetLastError() != ERROR_INSUFFICIENT_BUFFER) { - fprintf(stderr, "Error: Cannot query buffer size (error: %lu)\n", - GetLastError()); - CloseCompressor(compressor); - return NULL; - } - } - - /* Allocate compressed buffer */ - BYTE* compressed = (BYTE*)malloc(compressed_buffer_size); - if (!compressed) { - fprintf(stderr, "Error: Cannot allocate %zu bytes\n", - compressed_buffer_size); - CloseCompressor(compressor); - return NULL; - } - - /* Compress */ - if (!Compress(compressor, input, input_size, compressed, - compressed_buffer_size, compressed_size)) { - fprintf(stderr, "Error: Compression failed (error: %lu)\n", - GetLastError()); - free(compressed); - CloseCompressor(compressor); - return NULL; - } - - CloseCompressor(compressor); - - double ratio = 100.0 * (1.0 - (double)*compressed_size / input_size); - printf(" Compressed %zu -> %zu bytes (%.1f%% reduction) using %s\n", - input_size, *compressed_size, ratio, get_algorithm_name(quality)); - - return compressed; -} - -/* Verify PE format */ -BOOL verify_pe(const BYTE* data, SIZE_T size) { - if (size < sizeof(IMAGE_DOS_HEADER)) { - fprintf(stderr, "Error: File too small to be a PE binary\n"); - return FALSE; - } - - IMAGE_DOS_HEADER* dos_header = (IMAGE_DOS_HEADER*)data; - - /* Check DOS magic */ - if (dos_header->e_magic != IMAGE_DOS_SIGNATURE) { - fprintf(stderr, "Error: Not a valid PE binary (invalid DOS signature)\n"); - return FALSE; - } - - /* Check PE header */ - if (dos_header->e_lfanew >= size) { - fprintf(stderr, "Error: Invalid PE header offset\n"); - return FALSE; - } - - IMAGE_NT_HEADERS* nt_headers = (IMAGE_NT_HEADERS*)(data + dos_header->e_lfanew); - if (nt_headers->Signature != IMAGE_NT_SIGNATURE) { - fprintf(stderr, "Error: Not a valid PE binary (invalid NT signature)\n"); - return FALSE; - } - - /* Display PE info */ - printf("PE Info:\n"); - printf(" Architecture: %s\n", - nt_headers->FileHeader.Machine == IMAGE_FILE_MACHINE_AMD64 ? "x64" : - nt_headers->FileHeader.Machine == IMAGE_FILE_MACHINE_I386 ? "x86" : - nt_headers->FileHeader.Machine == IMAGE_FILE_MACHINE_ARM64 ? "ARM64" : - "Other"); - printf(" Type: %s\n", - nt_headers->OptionalHeader.Subsystem == IMAGE_SUBSYSTEM_WINDOWS_CUI ? - "Console" : "GUI"); - - return TRUE; -} - -/* Main compression function */ -int compress_pe(const char* input_path, const char* output_path, - CompressionQuality quality) { - - printf("Socket PE Compressor\n"); - printf("====================\n"); - printf("Input: %s\n", input_path); - printf("Output: %s\n", output_path); - printf("Algorithm: %s\n\n", get_algorithm_name(quality)); - - /* Read input binary */ - printf("Reading input binary...\n"); - SIZE_T input_size; - BYTE* input_data = read_file(input_path, &input_size); - if (!input_data) { - return 1; - } - - printf(" Original size: %zu bytes (%.2f MB)\n\n", - input_size, input_size / 1024.0 / 1024.0); - - /* Verify PE format */ - printf("Verifying PE binary...\n"); - if (!verify_pe(input_data, input_size)) { - free(input_data); - return 1; - } - printf("\n"); - - /* Compress binary */ - printf("Compressing binary...\n"); - SIZE_T compressed_size; - BYTE* compressed_data = compress_data(input_data, input_size, - &compressed_size, quality); - if (!compressed_data) { - free(input_data); - return 1; - } - printf("\n"); - - /* Build output file */ - printf("Creating output binary...\n"); - - /* Create header */ - struct CompressedHeader header = { - .magic = MAGIC_SEPE, - .algorithm = (uint32_t)get_windows_algorithm(quality), - .original_size = input_size, - .compressed_size = compressed_size - }; - - /* Allocate output buffer: header + compressed data */ - SIZE_T output_size = sizeof(header) + compressed_size; - BYTE* output = (BYTE*)malloc(output_size); - if (!output) { - fprintf(stderr, "Error: Cannot allocate output buffer\n"); - free(input_data); - free(compressed_data); - return 1; - } - - /* Copy header and compressed data */ - memcpy(output, &header, sizeof(header)); - memcpy(output + sizeof(header), compressed_data, compressed_size); - - /* Write output file */ - if (!write_file(output_path, output, output_size)) { - free(input_data); - free(compressed_data); - free(output); - return 1; - } - - /* Calculate statistics */ - double total_ratio = 100.0 * (1.0 - (double)output_size / input_size); - printf(" Output size: %zu bytes (%.2f MB)\n", - output_size, output_size / 1024.0 / 1024.0); - printf(" Total savings: %.1f%%\n", total_ratio); - printf(" Saved: %.2f MB\n", - (input_size - output_size) / 1024.0 / 1024.0); - printf("\n"); - - printf("Success! Compression complete.\n"); - printf("\n"); - printf("Note: Use socket_pe_decompress.exe to run the binary.\n"); - printf("Example: socket_pe_decompress.exe %s [args...]\n", output_path); - - free(input_data); - free(compressed_data); - free(output); - return 0; -} - -int main(int argc, char* argv[]) { - if (argc < 3) { - fprintf(stderr, "Usage: %s input_binary output_binary [--quality=lzms|xpress]\n", - argv[0]); - fprintf(stderr, "\n"); - fprintf(stderr, "Compresses Windows PE binaries using native Windows Compression API.\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "Quality options:\n"); - fprintf(stderr, " lzms - Maximum compression (~70-73%%, default)\n"); - fprintf(stderr, " xpress - Fast decompression (~60%%)\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "Example:\n"); - fprintf(stderr, " %s node.exe node.compressed --quality=lzms\n", - argv[0]); - return 1; - } - - const char* input_path = argv[1]; - const char* output_path = argv[2]; - CompressionQuality quality = QUALITY_DEFAULT; - - /* Parse quality argument */ - if (argc >= 4) { - const char* quality_arg = argv[3]; - if (strncmp(quality_arg, "--quality=", 10) == 0) { - const char* quality_str = quality_arg + 10; - if (strcmp(quality_str, "lzms") == 0) { - quality = QUALITY_LZMS; - } else if (strcmp(quality_str, "xpress") == 0) { - quality = QUALITY_XPRESS; - } else if (strcmp(quality_str, "xpress_huff") == 0) { - quality = QUALITY_XPRESS_HUFF; - } else { - fprintf(stderr, "Warning: Unknown quality '%s', using default (lzms)\n", - quality_str); - } - } - } - - return compress_pe(input_path, output_path, quality); -} - -#else /* !_WIN32 */ - -#include - -int main() { - fprintf(stderr, "Error: This tool only works on Windows\n"); - return 1; -} - -#endif /* _WIN32 */ diff --git a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_pe_decompress.c b/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_pe_decompress.c deleted file mode 100644 index 0f5028fe4..000000000 --- a/packages/node-smol-builder/additions/003-compression-tools/socketsecurity_pe_decompress.c +++ /dev/null @@ -1,301 +0,0 @@ -/* - * Socket PE Decompressor - Runtime decompression and execution for Windows - * Decompresses binaries created by socket_pe_compress and executes them - * - * Usage: - * socket_pe_decompress.exe compressed_binary [args...] - * - * This tool: - * 1. Reads the compressed binary - * 2. Decompresses it using Windows Compression API - * 3. Executes the decompressed binary with original arguments - */ - -#ifdef _WIN32 - -#include -#include -#include -#include -#include -#include - -/* Compressed binary header format (must match compressor) */ -struct CompressedHeader { - uint32_t magic; /* "SEPE" = 0x53455045 */ - uint32_t algorithm; /* Compression algorithm ID */ - uint64_t original_size; /* Decompressed size in bytes */ - uint64_t compressed_size;/* Compressed payload size in bytes */ -}; - -#define MAGIC_SEPE 0x53455045 /* "SEPE" */ - -/* Read entire file into memory */ -BYTE* read_file(const char* path, SIZE_T* size) { - HANDLE file = CreateFileA(path, GENERIC_READ, FILE_SHARE_READ, NULL, - OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL); - if (file == INVALID_HANDLE_VALUE) { - fprintf(stderr, "Error: Cannot open file: %s (error: %lu)\n", - path, GetLastError()); - return NULL; - } - - LARGE_INTEGER file_size; - if (!GetFileSizeEx(file, &file_size)) { - fprintf(stderr, "Error: Cannot get file size (error: %lu)\n", - GetLastError()); - CloseHandle(file); - return NULL; - } - - *size = (SIZE_T)file_size.QuadPart; - - BYTE* buffer = (BYTE*)malloc(*size); - if (!buffer) { - fprintf(stderr, "Error: Cannot allocate %zu bytes\n", *size); - CloseHandle(file); - return NULL; - } - - DWORD bytes_read; - if (!ReadFile(file, buffer, (DWORD)*size, &bytes_read, NULL) || - bytes_read != *size) { - fprintf(stderr, "Error: Cannot read file (error: %lu)\n", - GetLastError()); - free(buffer); - CloseHandle(file); - return NULL; - } - - CloseHandle(file); - return buffer; -} - -/* Decompress data using Windows Compression API */ -BYTE* decompress_data(const BYTE* input, SIZE_T input_size, - SIZE_T output_size, DWORD algorithm) { - /* Create decompressor */ - DECOMPRESSOR_HANDLE decompressor = NULL; - if (!CreateDecompressor(algorithm, NULL, &decompressor)) { - fprintf(stderr, "Error: Cannot create decompressor (error: %lu)\n", - GetLastError()); - return NULL; - } - - /* Allocate output buffer */ - BYTE* output = (BYTE*)malloc(output_size); - if (!output) { - fprintf(stderr, "Error: Cannot allocate %zu bytes for decompression\n", - output_size); - CloseDecompressor(decompressor); - return NULL; - } - - /* Decompress */ - SIZE_T decompressed_size; - if (!Decompress(decompressor, input, input_size, output, output_size, - &decompressed_size)) { - fprintf(stderr, "Error: Decompression failed (error: %lu)\n", - GetLastError()); - free(output); - CloseDecompressor(decompressor); - return NULL; - } - - CloseDecompressor(decompressor); - - if (decompressed_size != output_size) { - fprintf(stderr, "Error: Size mismatch (expected %zu, got %zu)\n", - output_size, decompressed_size); - free(output); - return NULL; - } - - return output; -} - -/* Decompress and execute binary */ -int decompress_and_execute(const char* compressed_path, int argc, char* argv[]) { - printf("Socket PE Decompressor\n"); - printf("======================\n\n"); - - /* Read compressed binary */ - printf("Reading compressed binary: %s\n", compressed_path); - SIZE_T file_size; - BYTE* file_data = read_file(compressed_path, &file_size); - if (!file_data) { - return 1; - } - - /* Parse header */ - if (file_size < sizeof(struct CompressedHeader)) { - fprintf(stderr, "Error: File too small to contain header\n"); - free(file_data); - return 1; - } - - struct CompressedHeader* header = (struct CompressedHeader*)file_data; - - /* Validate magic */ - if (header->magic != MAGIC_SEPE) { - fprintf(stderr, "Error: Invalid magic number (not a compressed Socket binary)\n"); - fprintf(stderr, "Expected: 0x%08x, Got: 0x%08x\n", MAGIC_SEPE, header->magic); - free(file_data); - return 1; - } - - printf(" Compressed size: %llu bytes (%.2f MB)\n", - header->compressed_size, - header->compressed_size / 1024.0 / 1024.0); - printf(" Decompressed size: %llu bytes (%.2f MB)\n", - header->original_size, - header->original_size / 1024.0 / 1024.0); - printf(" Algorithm: %u\n", header->algorithm); - printf("\n"); - - /* Decompress */ - printf("Decompressing...\n"); - const BYTE* compressed_payload = file_data + sizeof(struct CompressedHeader); - - BYTE* decompressed = decompress_data( - compressed_payload, - (SIZE_T)header->compressed_size, - (SIZE_T)header->original_size, - header->algorithm - ); - - if (!decompressed) { - free(file_data); - return 1; - } - - printf(" Success! Decompressed successfully\n\n"); - - /* Write decompressed binary to temporary file */ - printf("Writing temporary executable...\n"); - - /* Create temp file */ - char temp_dir[MAX_PATH]; - char temp_path[MAX_PATH]; - - if (!GetTempPathA(MAX_PATH, temp_dir)) { - fprintf(stderr, "Error: Cannot get temp directory (error: %lu)\n", - GetLastError()); - free(file_data); - free(decompressed); - return 1; - } - - if (!GetTempFileNameA(temp_dir, "socket_", 0, temp_path)) { - fprintf(stderr, "Error: Cannot create temp file name (error: %lu)\n", - GetLastError()); - free(file_data); - free(decompressed); - return 1; - } - - /* Add .exe extension */ - strcat_s(temp_path, MAX_PATH, ".exe"); - - /* Write decompressed binary */ - HANDLE file = CreateFileA(temp_path, GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, - FILE_ATTRIBUTE_NORMAL, NULL); - if (file == INVALID_HANDLE_VALUE) { - fprintf(stderr, "Error: Cannot create temp file: %s (error: %lu)\n", - temp_path, GetLastError()); - free(file_data); - free(decompressed); - return 1; - } - - DWORD bytes_written; - BOOL write_success = WriteFile(file, decompressed, (DWORD)header->original_size, - &bytes_written, NULL); - CloseHandle(file); - - if (!write_success || bytes_written != header->original_size) { - fprintf(stderr, "Error: Cannot write temp file (error: %lu)\n", - GetLastError()); - DeleteFileA(temp_path); - free(file_data); - free(decompressed); - return 1; - } - - printf(" Temporary file: %s\n\n", temp_path); - - /* Execute decompressed binary */ - printf("Executing decompressed binary...\n"); - printf("-------------------------------------\n\n"); - - /* Build command line */ - char cmdline[32768]; - cmdline[0] = '\0'; - - /* Add executable path */ - strcat_s(cmdline, sizeof(cmdline), "\""); - strcat_s(cmdline, sizeof(cmdline), temp_path); - strcat_s(cmdline, sizeof(cmdline), "\""); - - /* Add arguments */ - for (int i = 2; i < argc; i++) { - strcat_s(cmdline, sizeof(cmdline), " "); - strcat_s(cmdline, sizeof(cmdline), argv[i]); - } - - /* Execute */ - STARTUPINFOA si = { sizeof(si) }; - PROCESS_INFORMATION pi; - - if (!CreateProcessA(temp_path, cmdline, NULL, NULL, FALSE, 0, - NULL, NULL, &si, &pi)) { - fprintf(stderr, "Error: Failed to execute (error: %lu)\n", - GetLastError()); - DeleteFileA(temp_path); - free(file_data); - free(decompressed); - return 1; - } - - /* Wait for process to complete */ - WaitForSingleObject(pi.hProcess, INFINITE); - - /* Get exit code */ - DWORD exit_code = 0; - GetExitCodeProcess(pi.hProcess, &exit_code); - - CloseHandle(pi.hProcess); - CloseHandle(pi.hThread); - - /* Clean up */ - DeleteFileA(temp_path); - free(file_data); - free(decompressed); - - return (int)exit_code; -} - -int main(int argc, char* argv[]) { - if (argc < 2) { - fprintf(stderr, "Usage: %s compressed_binary [args...]\n", argv[0]); - fprintf(stderr, "\n"); - fprintf(stderr, "Decompresses and executes a binary created by socket_pe_compress.\n"); - fprintf(stderr, "\n"); - fprintf(stderr, "Example:\n"); - fprintf(stderr, " %s node.compressed --version\n", argv[0]); - return 1; - } - - return decompress_and_execute(argv[1], argc, argv); -} - -#else /* !_WIN32 */ - -#include - -int main() { - fprintf(stderr, "Error: This tool only works on Windows\n"); - return 1; -} - -#endif /* _WIN32 */ diff --git a/packages/node-smol-builder/additions/004-polyfills/README.md b/packages/node-smol-builder/additions/004-polyfills/README.md deleted file mode 100644 index 9b35fe7f1..000000000 --- a/packages/node-smol-builder/additions/004-polyfills/README.md +++ /dev/null @@ -1,53 +0,0 @@ -# Socket CLI Polyfills for small-icu - -This directory contains polyfill modules that provide safety fallbacks for Node.js built with `--with-intl=small-icu` (English-only ICU data). - -## Why These Polyfills? - -When Node.js is built with `--with-intl=small-icu`: -- Binary size reduced by ~5 MB (English-only ICU vs full ICU) -- Most internationalization features work correctly -- Some edge cases may throw errors or behave unexpectedly - -These polyfills act as safety layers, providing basic fallback implementations when native ICU methods fail. - -## Polyfills Included - -### 1. `localeCompare.js` -- **Target**: `String.prototype.localeCompare()` -- **Purpose**: Provides basic alphabetical comparison fallback -- **Applied to**: `lib/internal/per_context/primordials.js` - -### 2. `normalize.js` -- **Target**: `String.prototype.normalize()` -- **Purpose**: Provides basic Unicode normalization fallback -- **Applied to**: `lib/internal/bootstrap/node.js` - -## Design: External Injection (Option A) - -These polyfills use the external module injection pattern: - -1. **Standalone files**: Each polyfill is a self-contained module -2. **Minimal patches**: Node.js core files only load these external modules -3. **Clean separation**: Polyfill logic lives here, not in Node.js source -4. **Easy maintenance**: Update polyfills without regenerating patches - -## Patch Structure - -Each polyfill is loaded via a minimal patch that: -1. Checks if the native implementation works -2. Falls back to the polyfill if native throws an error -3. Adds a `Socket CLI: Polyfill ` marker for verification - -## Testing - -Polyfills are verified during build via `scripts/build.mjs`: -- Checks for polyfill markers in target files -- Warns if not applied (but doesn't fail build) -- Polyfills are optional safety layers, not required - -## Binary Size Impact - -- Polyfill code: ~2-4 KB uncompressed -- Brotli compressed: ~1 KB -- Negligible impact on final binary size diff --git a/packages/node-smol-builder/additions/004-polyfills/localeCompare.js b/packages/node-smol-builder/additions/004-polyfills/localeCompare.js deleted file mode 100644 index dfb643e4d..000000000 --- a/packages/node-smol-builder/additions/004-polyfills/localeCompare.js +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Socket CLI: Polyfill localeCompare - * - * Safety fallback for String.prototype.localeCompare() in small-icu builds. - * - * WHY THIS EXISTS: - * - Node.js built with --with-intl=small-icu has English-only ICU data - * - localeCompare() may throw errors or behave unexpectedly for non-English locales - * - This polyfill provides basic alphabetical comparison as a fallback - * - * WHEN IT ACTIVATES: - * - Only when native localeCompare() throws an error - * - Preserves native behavior whenever possible - * - * LIMITATIONS: - * - Fallback uses basic character code comparison (no locale awareness) - * - Does not support advanced collation rules - * - Sufficient for Socket CLI's internal use cases - */ - -'use strict'; - -// Save reference to original implementation. -const originalLocaleCompare = String.prototype.localeCompare; - -// Basic fallback: compare character codes. -function fallbackLocaleCompare(that, locales, options) { - // Basic comparison using character codes. - if (that < this) return -1; - if (that > this) return 1; - return 0; -} - -// Wrapper that tries native first, falls back if it throws. -function polyfillLocaleCompare(that, locales, options) { - try { - // Try native implementation first. - return originalLocaleCompare.call(this, that, locales, options); - } catch (e) { - // If native throws, use fallback. - return fallbackLocaleCompare.call(this, that); - } -} - -// Replace String.prototype.localeCompare with wrapped version. -Object.defineProperty(String.prototype, 'localeCompare', { - value: polyfillLocaleCompare, - writable: true, - enumerable: false, - configurable: true, -}); diff --git a/packages/node-smol-builder/additions/004-polyfills/normalize.js b/packages/node-smol-builder/additions/004-polyfills/normalize.js deleted file mode 100644 index f0849b173..000000000 --- a/packages/node-smol-builder/additions/004-polyfills/normalize.js +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Socket CLI: Polyfill String.prototype.normalize - * - * Safety fallback for String.prototype.normalize() in small-icu builds. - * - * WHY THIS EXISTS: - * - Node.js built with --with-intl=small-icu may have limited normalization support - * - normalize() may throw errors for certain forms or edge cases - * - This polyfill provides a basic fallback when native implementation fails - * - * WHEN IT ACTIVATES: - * - Only when native normalize() throws an error - * - Preserves native behavior whenever possible - * - * LIMITATIONS: - * - Fallback returns original string unchanged (identity function) - * - Does not perform actual Unicode normalization - * - Sufficient for Socket CLI's internal use cases where normalization is optional - * - * NORMALIZATION FORMS: - * - NFC: Canonical Decomposition, followed by Canonical Composition - * - NFD: Canonical Decomposition - * - NFKC: Compatibility Decomposition, followed by Canonical Composition - * - NFKD: Compatibility Decomposition - */ - -'use strict'; - -// Save reference to original implementation. -const originalNormalize = String.prototype.normalize; - -// Basic fallback: return string unchanged (identity function). -// This is safe because: -// 1. Many use cases work fine with un-normalized strings. -// 2. Socket CLI's code paths that use normalize() handle this gracefully. -// 3. Better to have working code with un-normalized strings than crash. -function fallbackNormalize(form) { - return this; -} - -// Wrapper that tries native first, falls back if it throws. -function polyfillNormalize(form) { - try { - // Try native implementation first. - return originalNormalize.call(this, form); - } catch (e) { - // If native throws, use fallback (return unchanged). - return fallbackNormalize.call(this, form); - } -} - -// Replace String.prototype.normalize with wrapped version. -Object.defineProperty(String.prototype, 'normalize', { - value: polyfillNormalize, - writable: true, - enumerable: false, - configurable: true, -}); diff --git a/packages/node-smol-builder/docker/Dockerfile.alpine b/packages/node-smol-builder/docker/Dockerfile.alpine deleted file mode 100644 index f9fc1d389..000000000 --- a/packages/node-smol-builder/docker/Dockerfile.alpine +++ /dev/null @@ -1,23 +0,0 @@ -FROM alpine:3.19 - -# Install build dependencies. -# Note: curl and patch can be auto-installed by the build script, -# but pre-installing them improves build performance. -RUN apk add --no-cache \ - nodejs \ - npm \ - python3 \ - make \ - g++ \ - linux-headers \ - git \ - curl \ - patch \ - ccache \ - ninja - -# Install pnpm globally. -RUN npm install -g pnpm@10.20.0 - -# Set working directory. -WORKDIR /workspace diff --git a/packages/node-smol-builder/docker/README.md b/packages/node-smol-builder/docker/README.md deleted file mode 100644 index 7c745e481..000000000 --- a/packages/node-smol-builder/docker/README.md +++ /dev/null @@ -1,102 +0,0 @@ -# Alpine Docker Build Environment - -**Purpose**: Builds Socket smol binaries for Alpine Linux using musl libc instead of glibc. - -## Why Alpine Needs Docker - -``` -┌─────────────────────────────────────────────────────────┐ -│ GitHub Actions Ubuntu Runner │ -│ ┌─────────────────────────────────────────────────────┐ │ -│ │ Standard Linux Build (glibc) │ │ -│ │ → Compiles directly on runner │ │ -│ │ → Works for linux-x64, linux-arm64 │ │ -│ └─────────────────────────────────────────────────────┘ │ -│ │ -│ ┌─────────────────────────────────────────────────────┐ │ -│ │ Alpine Linux Build (musl) │ │ -│ │ → REQUIRES Docker container │ │ -│ │ → Different libc (musl vs glibc) │ │ -│ │ → Different system libraries │ │ -│ │ → alpine-x64, alpine-arm64 │ │ -│ └─────────────────────────────────────────────────────┘ │ -└─────────────────────────────────────────────────────────┘ -``` - -## Build Flow - -``` -GitHub Actions Workflow - ↓ -Docker Build - → FROM alpine:3.19 - → Install: nodejs, npm, python3, make, g++, ninja - → Install: pnpm@10.20.0 - → Set WORKDIR /workspace - ↓ -Docker Run - → Mount: socket-cli repo → /workspace - → Execute: pnpm --filter @socketbin/node-smol-builder run build - → Output: socket-smol-alpine-{x64,arm64} -``` - -## Critical Differences - -| Aspect | Standard Linux | Alpine Linux | -|--------|----------------|--------------| -| **C Library** | glibc | musl | -| **Build Environment** | Native runner | Docker container | -| **Package Manager** | apt | apk | -| **Binary Compatibility** | Most Linux distros | Alpine only | -| **Size** | Larger | Smaller | - -## When This Runs - -Triggered by `.github/workflows/build-smol.yml` when: -- Building alpine-x64 or alpine-arm64 platforms -- Smol binary cache miss or force rebuild -- Cross-architecture via Docker Buildx + QEMU - -## Container Contents - -```dockerfile -FROM alpine:3.19 - -# Build toolchain -RUN apk add --no-cache \ - nodejs # Runtime - npm # Package manager - python3 # Node.js build scripts - make # Build system - g++ # C++ compiler - linux-headers # Kernel headers - git # Version control - ccache # Compiler cache - ninja # Fast build system - -# Socket tooling -RUN npm install -g pnpm@10.20.0 - -WORKDIR /workspace -``` - -## Platform Mapping - -| Matrix Arch | Docker Platform | Binary Output | -|-------------|-----------------|---------------| -| `x64` | `linux/amd64` | `socket-smol-alpine-x64` | -| `arm64` | `linux/arm64` | `socket-smol-alpine-arm64` | - -## Cache Strategy - -Docker image cached via GitHub Actions cache: -- **Cache key**: `alpine-builder-{x64,arm64}` -- **Scope**: Per architecture -- **Invalidation**: Manual or workflow changes -- **Benefits**: Faster subsequent builds (~30s vs ~5min) - -## Related Files - -- `Dockerfile.alpine` - This container definition -- `.github/workflows/build-smol.yml` - CI workflow using this -- `../build.mjs` - Build script executed inside container diff --git a/packages/node-smol-builder/docs/README.md b/packages/node-smol-builder/docs/README.md deleted file mode 100644 index 04c7b0a48..000000000 --- a/packages/node-smol-builder/docs/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# node-smol-builder Documentation - -Package-level documentation for the Socket custom Node.js binary builder. - -## Overview - -This package builds a custom Node.js v24.10.0 binary from source with Socket security patches, Brotli compression support, SEA support, and bootstrap integration. - -## Contents - -### Binary Compression - -- **[binary-compression-distribution.md](./binary-compression-distribution.md)** - Complete guide to binary compression distribution strategy -- **[compression-quick-start.md](./compression-quick-start.md)** - Quick start guide for macOS binary compression -- **[compression-test-results.md](./compression-test-results.md)** - Real-world compression performance benchmarks - -### Build Process - -- Build compression is now integrated into the main build script -- Enable with: `COMPRESS_BINARY=1 node scripts/build.mjs` -- Cross-platform support: macOS (LZFSE/LZMA), Linux (LZMA), Windows (LZMS) - -Future package-level documentation will be added here for: -- Socket patch architecture and versioning -- Cross-platform build considerations -- Upstream Node.js tracking and update process - -## Sub-Package Documentation - -- **wasm-bundle/** - Rust WASM compression module - - [Cross-Platform Compression](../wasm-bundle/docs/cross-platform-compression.md) - WASM-based binary compression without UPX - - [macOS Binary Compression](../wasm-bundle/docs/macho-compression.md) - macOS-specific Mach-O compression - -## Quick Links - -- **Main README**: `../README.md` -- **Build Scripts**: `../scripts/` -- **Socket Patches**: `../patches/socket/` - -## Build Output - -- **Location**: `build/out/Release/` -- **Files**: `node` (custom Node.js binary with Socket patches) -- **Platforms**: Currently builds for host platform only - -## Upstream - -- **Repository**: https://github.com/nodejs/node -- **Version**: v24.10.0 -- **Socket Patches**: `../patches/socket/` -- **License**: MIT diff --git a/packages/node-smol-builder/package.json b/packages/node-smol-builder/package.json deleted file mode 100644 index a37970238..000000000 --- a/packages/node-smol-builder/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "dependencies": { - "@socketsecurity/bootstrap": "workspace:*", - "@socketsecurity/build-infra": "workspace:*", - "@socketsecurity/lib": "workspace:*" - }, - "description": "Custom Node.js binary builder with Socket security patches", - "devDependencies": { - "vitest": "catalog:" - }, - "license": "MIT", - "name": "@socketbin/node-smol-builder", - "private": true, - "scripts": { - "build": "node scripts/build.mjs", - "build:all": "node scripts/build.mjs --all-platforms", - "test": "vitest run", - "test:watch": "vitest" - }, - "version": "1.2.0" -} diff --git a/packages/node-smol-builder/patches/001-socketsecurity_bootstrap_preexec_v24.10.0.template.patch b/packages/node-smol-builder/patches/001-socketsecurity_bootstrap_preexec_v24.10.0.template.patch deleted file mode 100644 index 6695fdd43..000000000 --- a/packages/node-smol-builder/patches/001-socketsecurity_bootstrap_preexec_v24.10.0.template.patch +++ /dev/null @@ -1,25 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Load Socket security bootstrap (minimal injection approach) -# -# This patch adds a single require() call to load the Socket bootstrap. -# The actual bootstrap loader logic lives in lib/internal/socketsecurity_bootstrap_loader.js -# (added via the additions/ directory during build). -# -# Benefits of this approach: -# - Minimal patch footprint (1 line vs 21,000+ lines) -# - Bootstrap loader code can have full comments and formatting -# - Easier to maintain and understand -# - Cleaner separation of concerns - ---- a/lib/internal/process/pre_execution.js -+++ b/lib/internal/process/pre_execution.js -@@ -675,6 +675,9 @@ function runEmbedderPreload() { - } - - function loadPreloadModules() { -+ // Load Socket security bootstrap (loader added via build additions/). -+ require('internal/socketsecurity_bootstrap_loader')(); -+ - // For user code, we preload modules if `-r` is passed - const preloadModules = getOptionValue('--require'); - if (preloadModules && preloadModules.length > 0) { diff --git a/packages/node-smol-builder/patches/002-socketsecurity_brotli_builtin_v24.10.0.patch b/packages/node-smol-builder/patches/002-socketsecurity_brotli_builtin_v24.10.0.patch deleted file mode 100644 index 01012a008..000000000 --- a/packages/node-smol-builder/patches/002-socketsecurity_brotli_builtin_v24.10.0.patch +++ /dev/null @@ -1,35 +0,0 @@ -# @node-versions: v24+ -# @description: Enable Socket Brotli builtin loading (minimal touch) -# @phase: 3 -# -# MINIMAL PATCH: Only 3 lines modified in node_builtins.cc -# -# This patch integrates the external Socket Brotli loader into Node.js's -# builtin loading path. Changes: -# 1. Include external Brotli loader header -# 2. Replace ToStringChecked() call with Brotli-aware loader -# -# All decompression logic is in socket_brotli_builtin_loader.h (external). -# The loader automatically falls back to standard loading for uncompressed -# modules, so this is a transparent, zero-overhead change. - ---- a/src/node_builtins.cc -+++ b/src/node_builtins.cc -@@ -6,6 +6,7 @@ - #include "node_threadsafe_cow-inl.h" - #include "simdutf.h" - #include "util-inl.h" -+#include "socketsecurity_brotli_builtin_loader.h" // Socket: External Brotli loader - - namespace node { - namespace builtins { -@@ -201,7 +202,8 @@ MaybeLocal BuiltinLoader::LoadBuiltinSource(Isolate* isolate, - fprintf(stderr, "Cannot find native builtin: \"%s\".\n", id); - ABORT(); - } -- return source_it->second.ToStringChecked(isolate); -+ // Socket: Use external Brotli loader (falls back to standard loading). -+ return ::socketsecurity::builtins::LoadBuiltinSourceWithBrotli(isolate, id, source_it->second); - #else // !NODE_BUILTIN_MODULES_PATH - std::string filename = OnDiskFileName(id); - diff --git a/packages/node-smol-builder/patches/003-socketsecurity_brotli_friend_v24.10.0.patch b/packages/node-smol-builder/patches/003-socketsecurity_brotli_friend_v24.10.0.patch deleted file mode 100644 index c4baf3ec3..000000000 --- a/packages/node-smol-builder/patches/003-socketsecurity_brotli_friend_v24.10.0.patch +++ /dev/null @@ -1,43 +0,0 @@ -# @node-versions: v24+ -# @description: Grant friend access for Socket Brotli decompression (minimal touch) -# @phase: 3 -# -# MINIMAL PATCH: Only 10 lines added to node_union_bytes.h -# -# This patch grants friend access to socketsecurity::builtins::UnionBytesAccessor, -# allowing our external Brotli loader to access UnionBytes private members -# for decompression. This is the minimal change required - all decompression -# logic lives in the external socket_brotli_builtin_loader.h header. -# -# Design: We use a separate accessor struct (not direct function friendship) -# to minimize the friend declaration surface and clearly separate concerns. -# -# IMPORTANT: Forward declaration MUST be in global namespace (before node namespace) -# to avoid name collisions and ensure correct friend declaration matching. - ---- a/src/node_union_bytes.h -+++ b/src/node_union_bytes.h -@@ -6,6 +6,13 @@ - - #include "v8.h" - -+// Socket: Forward declaration for external Brotli loader (must be in global namespace). -+namespace socketsecurity { -+namespace builtins { -+struct UnionBytesAccessor; -+} -+} -+ - namespace node { - - // An external resource intended to be used with static lifetime. -@@ -67,6 +74,9 @@ class UnionBytes { - - bool is_one_byte() const { return one_byte_resource_ != nullptr; } - -+ // Socket: Grant access to external Brotli loader. -+ friend struct ::socketsecurity::builtins::UnionBytesAccessor; -+ - v8::Local ToStringChecked(v8::Isolate* isolate) const; - - private: diff --git a/packages/node-smol-builder/patches/004-socketsecurity_brotli2c_build_v24.10.0.patch b/packages/node-smol-builder/patches/004-socketsecurity_brotli2c_build_v24.10.0.patch deleted file mode 100644 index 3f34b0d8a..000000000 --- a/packages/node-smol-builder/patches/004-socketsecurity_brotli2c_build_v24.10.0.patch +++ /dev/null @@ -1,101 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Enable socketsecurity_brotli2c build tool for Brotli compression -# @requires: tools/socketsecurity_brotli2c.cc -# -# Adds the socketsecurity_brotli2c build target to node.gyp, which compiles the -# Socket Security Brotli compression tool used to convert Brotli-compressed data -# into C++ byte arrays for embedding in the Node.js binary. -# -# This patch also modifies the node_js2c action to use socketsecurity_brotli2c -# instead of the standard js2c tool, ensuring Brotli-compressed binary -# data is output as byte arrays rather than string literals. -# -# This enables Brotli compression of JavaScript built-in modules for -# approximately 3.36 MB binary size reduction. -# -# CRITICAL: socketsecurity_brotli2c does NOT define NODE_JS2C_USE_STRING_LITERALS because -# Brotli-compressed binary data must be output as byte arrays, not string literals. -# -# PATCH CREATION PROCESS: -# This patch was created using standard unified diff format (not git format). -# Steps: -# 1. Clone Node.js v24.10.0 -# 2. Modify node.gyp to: -# - Add socketsecurity_brotli2c target -# - Add socketsecurity_brotli2c_exec variable -# - Modify node_js2c action to use socketsecurity_brotli2c_exec -# 3. Generate patch with: diff -u original.gyp modified.gyp -# 4. Validate with: patch -p1 --dry-run < patch-file -# ---- a/node.gyp -+++ b/node.gyp -@@ -434,6 +434,7 @@ - ], - 'node_mksnapshot_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)node_mksnapshot<(EXECUTABLE_SUFFIX)', - 'node_js2c_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)node_js2c<(EXECUTABLE_SUFFIX)', -+ 'socketsecurity_brotli2c_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)socketsecurity_brotli2c<(EXECUTABLE_SUFFIX)', - 'conditions': [ - ['GENERATOR == "ninja"', { - 'node_text_start_object_path': 'src/large_pages/node_text_start.node_text_start.o' -@@ -1026,7 +1027,7 @@ - 'action_name': 'node_js2c', - 'process_outputs_as_sources': 1, - 'inputs': [ -- '<(node_js2c_exec)', -+ '<(socketsecurity_brotli2c_exec)', - '<@(library_files)', - '<@(deps_files)', - 'config.gypi' -@@ -1035,7 +1036,7 @@ - '<(SHARED_INTERMEDIATE_DIR)/node_javascript.cc', - ], - 'action': [ -- '<(node_js2c_exec)', -+ '<(socketsecurity_brotli2c_exec)', - '<@(_outputs)', - 'lib', - 'config.gypi', -@@ -1389,6 +1390,43 @@ - }], - ] - }, -+ { -+ 'target_name': 'socketsecurity_brotli2c', -+ 'type': 'executable', -+ 'toolsets': ['host'], -+ 'defines!': ['NODE_JS2C_USE_STRING_LITERALS'], -+ 'include_dirs': [ -+ 'tools', -+ 'src', -+ ], -+ 'sources': [ -+ 'tools/socketsecurity_brotli2c.cc', -+ 'tools/executable_wrapper.h', -+ 'src/embedded_data.h', -+ 'src/embedded_data.cc', -+ ], -+ 'conditions': [ -+ [ 'node_shared_simdutf=="false"', { -+ 'dependencies': [ 'tools/v8_gypfiles/v8.gyp:simdutf#host' ], -+ }], -+ [ 'node_shared_libuv=="false"', { -+ 'dependencies': [ 'deps/uv/uv.gyp:libuv#host' ], -+ }], -+ [ 'node_shared_brotli=="false"', { -+ 'dependencies': [ 'deps/brotli/brotli.gyp:brotli#host' ], -+ }], -+ [ 'debug_node=="true"', { -+ 'cflags!': [ '-O3' ], -+ 'cflags': [ '-g', '-O0' ], -+ 'defines': [ 'DEBUG' ], -+ 'xcode_settings': { -+ 'OTHER_CFLAGS': [ -+ '-g', '-O0' -+ ], -+ }, -+ }], -+ ] -+ }, - { - 'target_name': 'node_mksnapshot', - 'type': 'executable', diff --git a/packages/node-smol-builder/patches/005-socketsecurity_disable_modules_v24.10.0.patch b/packages/node-smol-builder/patches/005-socketsecurity_disable_modules_v24.10.0.patch deleted file mode 100644 index 5cb3c60e7..000000000 --- a/packages/node-smol-builder/patches/005-socketsecurity_disable_modules_v24.10.0.patch +++ /dev/null @@ -1,36 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Disable unused Node.js modules (1.5-3MB savings) -# @phase: 1 -# -# Disables HTTP/2 and WASI bindings. -# Socket CLI doesn't use these features, so removing them reduces binary size. -# -# IMPORTANT: These bindings are REQUIRED and cannot be disabled: -# - contextify: Required for realm initialization -# - worker: Required for main thread detection (is_main_thread check) -# - trace_events: Required by internal modules (util/debuglog, timers) -# -# Expected savings: -# - http2: 1-2MB -# - wasi: 500KB-1MB - ---- a/src/node_binding.cc -+++ b/src/node_binding.cc -@@ -53,7 +53,7 @@ - V(fs_dir) \ - V(fs_event_wrap) \ - V(heap_utils) \ -- V(http2) \ -+ /* V(http2) */ /* Socket: Disabled HTTP/2 support (1-2MB) */ \ - V(http_parser) \ - V(inspector) \ - V(internal_only_v8) \ -@@ -92,7 +92,7 @@ - V(util) \ - V(uv) \ - V(v8) \ -- V(wasi) \ -+ /* V(wasi) */ /* Socket: Disabled WASI (500KB-1MB) */ \ - V(wasm_web_api) \ - V(watchdog) \ - V(worker) // Socket: REQUIRED for Node.js bootstrap (main thread detection) \ diff --git a/packages/node-smol-builder/patches/006-socketsecurity_fix_gcc_lto_v24.10.0.patch b/packages/node-smol-builder/patches/006-socketsecurity_fix_gcc_lto_v24.10.0.patch deleted file mode 100644 index 881080dd8..000000000 --- a/packages/node-smol-builder/patches/006-socketsecurity_fix_gcc_lto_v24.10.0.patch +++ /dev/null @@ -1,16 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Fix GCC LTO configuration for compatibility -# -# Removes -fuse-linker-plugin flag that causes issues with some GCC versions - ---- a/common.gypi -+++ b/common.gypi -@@ -192,7 +192,7 @@ - ['clang==1', { - 'lto': ' -flto ', # Clang - }, { -- 'lto': ' -flto=4 -fuse-linker-plugin -ffat-lto-objects ', # GCC -+ 'lto': ' -flto=4 -ffat-lto-objects ', # GCC - }], - ], - }, diff --git a/packages/node-smol-builder/patches/007-socketsecurity_sea_pkg_v24.10.0.patch b/packages/node-smol-builder/patches/007-socketsecurity_sea_pkg_v24.10.0.patch deleted file mode 100644 index a948f3da5..000000000 --- a/packages/node-smol-builder/patches/007-socketsecurity_sea_pkg_v24.10.0.patch +++ /dev/null @@ -1,21 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Enable SEA detection for pkg binaries -# @requires: yao-pkg-patches -# -# Overrides the isSea binding to always return true, making pkg binaries -# report as Single Executable Applications for consistency. -# -# This is required for pkg to properly detect and load embedded code. - ---- a/lib/sea.js -+++ b/lib/sea.js -@@ -3,7 +3,8 @@ const { - ArrayBufferPrototypeSlice, - } = primordials; - --const { isSea, getAsset: getAssetInternal, getAssetKeys: getAssetKeysInternal } = internalBinding('sea'); -+const isSea = () => true; -+const { getAsset: getAssetInternal, getAssetKeys: getAssetKeysInternal } = internalBinding('sea'); - const { TextDecoder } = require('internal/encoding'); - const { validateString } = require('internal/validators'); - const { diff --git a/packages/node-smol-builder/patches/008-socketsecurity_localecompare_polyfill_v24.10.0.patch b/packages/node-smol-builder/patches/008-socketsecurity_localecompare_polyfill_v24.10.0.patch deleted file mode 100644 index 36b5fdb3b..000000000 --- a/packages/node-smol-builder/patches/008-socketsecurity_localecompare_polyfill_v24.10.0.patch +++ /dev/null @@ -1,30 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Load external localeCompare polyfill for small-icu builds -# @requires: lib/internal/socketsecurity_polyfills/localeCompare.js -# -# Loads Socket CLI's external localeCompare polyfill from lib/internal/socketsecurity_polyfills/ -# to provide fallback string comparison when native localeCompare() fails with small-icu. -# -# This patch adds a single require() call in primordials.js to load the external polyfill module. -# The polyfill is a safety layer that preserves native behavior when possible and provides -# basic alphabetical comparison as a fallback. -# -# PATCH CREATION PROCESS: -# This patch was created using standard unified diff format (not git format). -# Steps: -# 1. Clone Node.js v24.10.0 -# 2. Add require() call in lib/internal/per_context/primordials.js before ObjectFreeze -# 3. Generate patch with: diff -u original.js modified.js -# 4. Validate with: patch -p1 --dry-run < patch-file -# ---- a/lib/internal/per_context/primordials.js -+++ b/lib/internal/per_context/primordials.js -@@ -720,5 +720,8 @@ primordials.SafeStringPrototypeSearch = (str, regexp) => { - return match ? match.index : -1; - }; - -+// Socket CLI: Polyfill localeCompare for small-icu builds. -+require('internal/socketsecurity_polyfills/localeCompare'); -+ - ObjectSetPrototypeOf(primordials, null); - ObjectFreeze(primordials); diff --git a/packages/node-smol-builder/patches/009-socketsecurity_normalize_polyfill_v24.10.0.patch b/packages/node-smol-builder/patches/009-socketsecurity_normalize_polyfill_v24.10.0.patch deleted file mode 100644 index 43e4b9146..000000000 --- a/packages/node-smol-builder/patches/009-socketsecurity_normalize_polyfill_v24.10.0.patch +++ /dev/null @@ -1,28 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Load external String.prototype.normalize polyfill for small-icu builds -# @requires: lib/internal/socketsecurity_polyfills/normalize.js -# -# Loads Socket CLI's external normalize polyfill from lib/internal/socketsecurity_polyfills/ -# to provide fallback Unicode normalization when native normalize() fails with small-icu. -# -# This patch adds a single require() call in bootstrap/node.js to load the external polyfill module. -# The polyfill is a safety layer that preserves native behavior when possible and provides -# identity function (return unchanged) as a fallback. -# -# PATCH CREATION PROCESS: -# This patch was created using standard unified diff format (not git format). -# Steps: -# 1. Clone Node.js v24.10.0 -# 2. Add require() call at end of lib/internal/bootstrap/node.js -# 3. Generate patch with: diff -u original.js modified.js -# 4. Validate with: patch -p1 --dry-run < patch-file -# ---- a/lib/internal/bootstrap/node.js -+++ b/lib/internal/bootstrap/node.js -@@ -473,3 +473,6 @@ function setupBuffer() { - configurable: true, - }); - } -+ -+// Socket CLI: Polyfill String.prototype.normalize for small-icu builds. -+require('internal/socketsecurity_polyfills/normalize'); diff --git a/packages/node-smol-builder/patches/010-socketsecurity_fix_gyp_py3_hashlib_v24.10.0.patch b/packages/node-smol-builder/patches/010-socketsecurity_fix_gyp_py3_hashlib_v24.10.0.patch deleted file mode 100644 index eb5b69cd0..000000000 --- a/packages/node-smol-builder/patches/010-socketsecurity_fix_gyp_py3_hashlib_v24.10.0.patch +++ /dev/null @@ -1,25 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Fix gyp Python 3 hashlib.md5() compatibility -# -# Python 3's hashlib.md5() requires bytes, not strings. -# Fixes: TypeError: Strings must be encoded before hashing -# At: tools/gyp/pylib/gyp/generator/ninja.py:813 -# -# This issue affects all Python 3 versions when gyp tries to hash -# file paths for unique_name generation on Windows. -# -# References: -# - https://github.com/nodejs/node/blob/main/BUILDING.md -# - https://docs.python.org/3/library/hashlib.html - ---- a/tools/gyp/pylib/gyp/generator/ninja.py -+++ b/tools/gyp/pylib/gyp/generator/ninja.py -@@ -810,7 +810,7 @@ - if self.flavor == "win": - # WriteNewNinjaRule uses unique_name to create a rsp file on win. - extra_bindings.append( -- ("unique_name", hashlib.md5(outputs[0]).hexdigest()) -+ ("unique_name", hashlib.md5(outputs[0].encode()).hexdigest()) - ) - - self.ninja.build( diff --git a/packages/node-smol-builder/patches/011-socketsecurity_fix_abseil_windows_duplicate_symbols_v24.10.0.patch b/packages/node-smol-builder/patches/011-socketsecurity_fix_abseil_windows_duplicate_symbols_v24.10.0.patch deleted file mode 100644 index 2825c38ad..000000000 --- a/packages/node-smol-builder/patches/011-socketsecurity_fix_abseil_windows_duplicate_symbols_v24.10.0.patch +++ /dev/null @@ -1,38 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Fix abseil duplicate symbol errors on Windows -# -# Adds /FORCE:MULTIPLE linker flag globally for Windows builds to allow -# duplicate symbol definitions between abseil.lib and v8_libbase.lib. -# -# Root cause: Node.js v24 extracted abseil to a separate build target -# (tools/v8_gypfiles/abseil.gyp) to share code between V8 and perfetto. -# v8_libbase depends on and links against abseil.lib, but some abseil -# symbols end up defined in both libraries, causing LNK2005 errors when -# linking executables that use both (like torque.exe). -# -# Solution: Add /FORCE:MULTIPLE to all Windows link operations via the -# toolchain configuration. The linker will pick one definition of each -# duplicate symbol. This is safe because the implementations are identical. -# -# Original errors: -# LNK2005: absl::Mutex::Dtor already defined in v8_libbase.lib -# LNK1169: one or more multiply defined symbols found -# -# References: -# - https://github.com/nodejs/node/pull/57289 (abseil extraction) -# - https://learn.microsoft.com/en-us/cpp/build/reference/force-force-file-output - ---- a/tools/v8_gypfiles/toolchain.gypi -+++ b/tools/v8_gypfiles/toolchain.gypi -@@ -525,6 +525,11 @@ - '/bigobj', # Prevent C1128: number of sections exceeded object file format limit. - ], - }, -+ 'VCLinkerTool': { -+ 'AdditionalOptions': [ -+ '/FORCE:MULTIPLE', # Allow duplicate symbol definitions (abseil.lib vs v8_libbase.lib). -+ ], -+ }, - }, - }], - ['v8_target_arch=="ia32"', { diff --git a/packages/node-smol-builder/patches/012-socketsecurity_fix_inspector_protocol_windows_v24.10.0.patch b/packages/node-smol-builder/patches/012-socketsecurity_fix_inspector_protocol_windows_v24.10.0.patch deleted file mode 100644 index f34d54ef0..000000000 --- a/packages/node-smol-builder/patches/012-socketsecurity_fix_inspector_protocol_windows_v24.10.0.patch +++ /dev/null @@ -1,69 +0,0 @@ -# @node-versions: v24.10.0+ -# @description: Fix inspector protocol code generator on Windows -# -# On Windows, the --config_value argument passing through gyp-win-tool and -# ninja response files fails to properly set config.protocol.path, causing: -# AttributeError: 'X' object has no attribute 'path' -# -# This patch modifies code_generator.py to accept the config file path and -# compute the protocol path from it when --config_value fails to set it. -# -# Root cause: When ninja executes the action through gyp-win-tool action-wrapper -# on Windows, arguments in response files (.rsp) don't preserve the -# --config_value protocol.path=... argument correctly, so the path attribute -# never gets added to the protocol config object by init_defaults(). -# -# Solution: -# 1. Modify Protocol.__init__() to accept config_file parameter -# 2. Compute protocol path from config file location when missing -# 3. Add the path to config.protocol so main() can use it -# -# Original error: -# AttributeError: 'X' object has no attribute 'path' -# At: code_generator.py:365 in Protocol.__init__ -# -# References: -# - Node.js v24 gyp build system Windows argument passing -# - tools/v8_gypfiles/v8.gyp protocol_generated_sources action - ---- a/deps/v8/third_party/inspector_protocol/code_generator.py -+++ b/deps/v8/third_party/inspector_protocol/code_generator.py -@@ -357,12 +357,27 @@ - - class Protocol(object): - -- def __init__(self, config): -+ def __init__(self, config, config_file=None): - self.config = config - self.json_api = {"domains": []} - self.imported_domains = [] - self.exported_domains = [] -- self.generate_domains = self.read_protocol_file(config.protocol.path) -+ # Windows gyp-win-tool may fail to pass --config_value correctly. -+ # Fall back to computing the path from the config file location. -+ if hasattr(config.protocol, 'path'): -+ protocol_path = config.protocol.path -+ else: -+ # Compute path from config file: deps/v8/src/inspector -> deps/v8/include -+ if config_file: -+ config_dir = os.path.dirname(config_file) -+ protocol_path = os.path.normpath(os.path.join(config_dir, '../../include/js_protocol.pdl')) -+ else: -+ raise Exception("config.protocol.path not set and config_file not provided") -+ # Add path to config so main() can use it later. -+ protocol_obj = config.protocol._replace(path=protocol_path) -+ config = config._replace(protocol=protocol_obj) -+ self.config = config -+ self.generate_domains = self.read_protocol_file(protocol_path) - - if config.protocol.options: - self.generate_domains = [rule.domain for rule in config.protocol.options] -@@ -604,7 +619,7 @@ - def main(): - jinja_dir, config_file, config = read_config() - -- protocol = Protocol(config) -+ protocol = Protocol(config, config_file) - - if not config.exported and len(protocol.exported_domains): - sys.stderr.write(("Domains [%s] are exported, but config is missing export " diff --git a/packages/node-smol-builder/patches/README.md b/packages/node-smol-builder/patches/README.md deleted file mode 100644 index eca4baf37..000000000 --- a/packages/node-smol-builder/patches/README.md +++ /dev/null @@ -1,1081 +0,0 @@ -# Socket CLI Node.js Patches - -This directory contains Socket-specific patches applied on top of yao-pkg patches when building custom Node.js binaries for executable packaging. - -## 🎯 The Big Picture: Why We Need This - -### The Problem -Node.js executables created by standard tools are **80+ MB** and include unnecessary features. We need: -- Smaller binaries (~50MB instead of 80+MB) -- V8 bytecode compilation (compile without shipping source) -- PKG compatibility (yao-pkg is the only pkg that supports Node.js v24+) -- SEA detection (Single Executable Application detection must work) - -### The Solution -We build a **custom Node.js binary** that: -1. **Removes bloat**: No npm, corepack, inspector, amaro, sqlite (~30MB savings) -2. **Adds PKG support**: yao-pkg patches enable bytecode compilation -3. **Fixes bugs**: Socket patches fix V8 include paths and SEA detection -4. **Optimizes size**: Strip debug symbols (82MB → 54MB) -5. **Works with pkg**: Install to cache so pkg uses our custom binary - -## 📐 Architecture Overview - -### Three-Layer System - -``` -┌────────────────────────────────────────────────────────┐ -│ Layer 3: Socket CLI Application │ -│ - Your JavaScript code │ -│ - Bundled and compiled to V8 bytecode │ -│ - Embedded into executable │ -└────────────────────────────────────────────────────────┘ - ▼ -┌────────────────────────────────────────────────────────┐ -│ Layer 2: Socket Patches │ -│ - Fix V8 include paths (build fix) │ -│ - Override isSea() to return true (SEA detection) │ -└────────────────────────────────────────────────────────┘ - ▼ -┌────────────────────────────────────────────────────────┐ -│ Layer 1: yao-pkg Patches │ -│ - V8 bytecode compilation API │ -│ - PKG bootstrap system │ -│ - BAKERY placeholder system │ -└────────────────────────────────────────────────────────┘ - ▼ -┌────────────────────────────────────────────────────────┐ -│ Layer 0: Node.js v24.10.0 Source │ -│ - Upstream Node.js codebase │ -│ - Configured with size optimizations │ -│ - Stripped of debug symbols │ -└────────────────────────────────────────────────────────┘ -``` - -## 🔄 Complete Build → Test Flow - -### Phase 1: Pre-Flight (Safety Checks) - -**Purpose**: Verify environment is ready before starting expensive build - -``` -┌─────────────────────────────────────────────────┐ -│ 1. Check Required Tools │ -│ - git, curl, patch, make, strip, codesign │ -│ - Exit early if missing (save time) │ -├─────────────────────────────────────────────────┤ -│ 2. Check yao-pkg Patch Availability │ -│ - HEAD request to GitHub │ -│ - Fail fast if patch doesn't exist │ -│ - Provide helpful error with alternatives │ -├─────────────────────────────────────────────────┤ -│ 3. Check Disk Space (TODO) │ -│ - Build requires ~5GB free space │ -│ - Warn if low disk space │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- Detecting missing tools after 30 minutes of building is frustrating -- Network issues should be caught before cloning 2GB of Node.js source -- Disk space issues can corrupt builds - -### Phase 2: Setup (Preparation) - -**Purpose**: Get clean Node.js source and patches - -``` -┌─────────────────────────────────────────────────┐ -│ 1. Download yao-pkg Patch │ -│ - Curl from GitHub raw URL │ -│ - Cache locally (.custom-node-build/patches)│ -│ - Reuse on subsequent builds │ -├─────────────────────────────────────────────────┤ -│ 2. Clone Node.js Source (or Reset) │ -│ - Clone: New build (no directory exists) │ -│ - Reset: Re-build (directory exists) │ -│ - Clean: --clean flag (force fresh start) │ -│ - Why reset: Ensures clean state │ -├─────────────────────────────────────────────────┤ -│ 3. Git Clean │ -│ - Remove untracked files │ -│ - Remove modified files │ -│ - Reset to exact tag state │ -│ - Why: Previous builds may have left artifacts│ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- Dirty source trees cause unpredictable build failures -- Cached patches speed up rebuilds -- Reset allows rebuilds without re-downloading 2GB - -### Phase 3: Patching (Modifications) - -**Purpose**: Apply yao-pkg patches, then Socket patches - -``` -┌─────────────────────────────────────────────────┐ -│ 1. Apply yao-pkg Patch │ -│ - Adds V8 bytecode compilation API │ -│ - Adds lib/internal/bootstrap/pkg.js │ -│ - Modifies src/node_main.cc for PKG_EXECPATH│ -│ - Adds BAKERY placeholder system │ -│ - CRITICAL: Without this, pkg won't work │ -├─────────────────────────────────────────────────┤ -│ 2. Find Socket Patches │ -│ - Try: socket-node-modifications-v24-10-0.patch│ -│ - Try: individual patches for v24.10.0 │ -│ - Try: generic v24 patches │ -│ - Fallback: Apply modifications directly │ -│ - Why flexible: Handles version bumps gracefully│ -├─────────────────────────────────────────────────┤ -│ 3. Apply Socket Patches (or Direct Mods) │ -│ - Fix V8 include paths (build would fail) │ -│ - Override isSea() → true (SEA detection) │ -│ - CRITICAL: Without this, SEA check fails │ -├─────────────────────────────────────────────────┤ -│ 4. VERIFY Modifications │ -│ - Check lib/sea.js has isSea override │ -│ - Check V8 includes are fixed │ -│ - FAIL BUILD if verification fails │ -│ - Why: Catch issues early before 30min build │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- **Patch order is critical**: yao-pkg THEN Socket (Socket patches expect yao-pkg base) -- **Verification prevents wasted builds**: 30 minutes of building just to discover patches didn't apply -- **Flexible fallback**: Version bumps don't break the build - -**Deep Dive: Why Each Socket Patch Matters** - -1. **V8 Include Path Fix** (`fix-v8-include-paths-*.patch`): - ```cpp - // BEFORE (Node.js v24.9.0+ has this bug): - #include "src/base/hashmap.h" // ← WRONG! Fails to compile - - // AFTER (Socket patch fixes it): - #include "base/hashmap.h" // ← CORRECT! Compiles successfully - ``` - - **Why it's broken**: Node.js v24.9.0+ introduced incorrect include paths - - **Impact if not fixed**: Build fails with "file not found" errors - - **Affects**: 5 V8 header files - -2. **SEA Detection Override** (`enable-sea-for-pkg-binaries-*.patch`): - ```javascript - // BEFORE (stock Node.js): - const { isSea, getAsset, getAssetKeys } = internalBinding('sea'); - // isSea() returns FALSE for pkg binaries (wrong!) - - // AFTER (Socket patch): - const isSea = () => true; // ← Override to always return true - const { getAsset, getAssetKeys } = internalBinding('sea'); - // isSea() returns TRUE for pkg binaries (correct!) - ``` - - **Why it's needed**: PKG binaries ARE SEAs but native binding doesn't detect them - - **Impact if not fixed**: `require('node:sea').isSea()` returns false - - **Affects**: SEA-aware code won't recognize pkg binaries as SEAs - -### Phase 4: Configuration (Build Settings) - -**Purpose**: Configure Node.js build with size optimizations - -``` -┌─────────────────────────────────────────────────┐ -│ ./configure Flags │ -│ │ -│ ✅ KEEP: │ -│ - V8 full compiler (bytecode compilation) │ -│ - V8 JIT (performance) │ -│ - WASM support │ -│ - SSL/crypto (https support) │ -│ - libuv (async I/O) │ -│ │ -│ ❌ REMOVE (saves ~30MB): │ -│ --without-npm (~10MB) │ -│ --without-corepack (~5MB) │ -│ --without-inspector (~8MB) │ -│ --without-amaro (~2MB) │ -│ --without-sqlite (~5MB) │ -│ │ -│ 🌍 MINIMIZE: │ -│ --with-intl=small-icu (~5MB savings) │ -│ (English-only ICU data) │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- **npm**: Not needed in executable (Socket CLI doesn't use it) -- **inspector**: Debugging not needed in production executables -- **ICU**: Full ICU is 30MB, small-icu is 5MB (English is enough) -- **Result**: 82MB → 54MB after stripping - -### Phase 5: Build (Compilation) - -**Purpose**: Compile Node.js with all optimizations - -``` -┌─────────────────────────────────────────────────┐ -│ make -j │ -│ │ -│ Time: 30-60 minutes │ -│ CPUs: All cores (parallel compilation) │ -│ Output: out/Release/node (82MB with debug symbols)│ -│ │ -│ What's happening: │ -│ 1. C++ compilation (Node.js + V8 + libuv) │ -│ 2. JavaScript compilation (lib/*.js) │ -│ 3. Linking all components together │ -│ 4. Embedding ICU data │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- **Parallel build**: Uses all CPU cores for faster compilation -- **Debug symbols**: Included by default (we'll strip them) -- **Binary size**: 82MB before optimization - -### Phase 6: Post-Processing (Optimization) - -**Purpose**: Optimize binary and verify correctness - -``` -┌─────────────────────────────────────────────────┐ -│ 1. Test Binary (smoke test) │ -│ - Run: node --version │ -│ - Run: node -e "console.log('test')" │ -│ - Why: Catch build failures early │ -├─────────────────────────────────────────────────┤ -│ 2. Strip Debug Symbols │ -│ - Command: strip out/Release/node │ -│ - Before: 82MB │ -│ - After: 54MB │ -│ - Saves: 28MB (~34% size reduction) │ -│ - Trade-off: Lose debug symbols, keep functionality│ -├─────────────────────────────────────────────────┤ -│ 3. Verify Size │ -│ - Expected: 50-60MB │ -│ - Warn if: <50MB (missing features?) │ -│ - Warn if: >70MB (strip failed?) │ -├─────────────────────────────────────────────────┤ -│ 4. Code Sign (macOS ARM64 only) │ -│ - Command: codesign --sign - --force │ -│ - Why: macOS requires signing for ARM64 │ -│ - Without: Binary won't run on ARM Macs │ -├─────────────────────────────────────────────────┤ -│ 5. Install to pkg Cache │ -│ - Copy to: ~/.pkg-cache/v3.5/built-v24.10.0-*│ -│ - Name format: built---[-signed]│ -│ - Why: pkg looks here for custom binaries │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- **Stripping**: Removes debugging info we don't need (34% smaller!) -- **Signing**: macOS ARM64 won't run unsigned binaries -- **Cache location**: pkg has specific naming convention and location expectations - -### Phase 7: Verification (Correctness) - -**Purpose**: Verify the build is correct BEFORE using it - -``` -┌─────────────────────────────────────────────────┐ -│ scripts/verify-node-build.mjs │ -│ │ -│ ✅ Verification Checks: │ -│ 1. Binary exists in cache │ -│ 2. lib/sea.js modification applied │ -│ 3. V8 include paths fixed │ -│ 4. Binary size reasonable (50-60MB) │ -│ 5. Binary functional (--version works) │ -│ 6. Binary can execute JS │ -│ 7. SEA detection returns true │ -│ 8. macOS signature valid (if applicable) │ -│ │ -│ Why every check matters: │ -│ - Socket mods not applied? Binary won't work with pkg│ -│ - V8 not fixed? Build should have failed │ -│ - SEA returns false? PKG executables broken │ -│ - Size wrong? Something went wrong in config │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- **Catch bugs before pkg**: Finding issues AFTER creating pkg executables wastes time -- **Verify critical features**: SEA detection is make-or-break for pkg -- **Size checks**: Wrong size indicates configuration problems - -### Phase 8: Integration Testing (End-to-End) - -**Purpose**: Test the entire build → pkg → execute flow - -``` -┌─────────────────────────────────────────────────┐ -│ scripts/test-yao-pkg-integration.mjs │ -│ │ -│ 🧪 Integration Tests: │ -│ 1. Build Socket CLI (pnpm run build) │ -│ 2. Create test package.json + test-cli.js │ -│ 3. Run pkg to create executable │ -│ 4. Execute the binary │ -│ 5. Verify SEA detection works │ -│ 6. Verify file system access │ -│ 7. Verify module loading │ -│ 8. Clean up test artifacts │ -│ │ -│ Why end-to-end: │ -│ - Unit tests pass, integration fails? This catches it│ -│ - Tests the ACTUAL use case (build + pkg + run)│ -│ - Verifies pkg uses our custom binary │ -│ - Confirms SEA detection in real executable │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- **Real-world scenario**: Tests exactly how Socket CLI will use the binary -- **Catches integration bugs**: Unit tests can't catch pkg-specific issues -- **Automated**: Can run in CI/CD to prevent regressions - -## 🛡️ Error Recovery & Resilience - -### Automatic Fallback System - -The build script has **4 layers of fallback** for Socket patches: - -``` -┌─────────────────────────────────────────────────┐ -│ Tier 1: Versioned Combined Patch │ -│ socket-node-modifications-v24-10-0.patch │ -│ ├─ Exists? Use it ✅ │ -│ └─ Missing? Try Tier 2 ⬇ │ -├─────────────────────────────────────────────────┤ -│ Tier 2: Individual Version Patches │ -│ fix-v8-include-paths-v24-10-0.patch │ -│ enable-sea-for-pkg-binaries-v24-10-0.patch │ -│ ├─ Exists? Use them ✅ │ -│ └─ Missing? Try Tier 3 ⬇ │ -├─────────────────────────────────────────────────┤ -│ Tier 3: Generic v24 Patches │ -│ fix-v8-include-paths-v24.patch │ -│ enable-sea-for-pkg-binaries-v24.patch │ -│ ├─ Exists? Use them ✅ │ -│ └─ Missing? Try Tier 4 ⬇ │ -├─────────────────────────────────────────────────┤ -│ Tier 4: Direct Modification Application │ -│ Apply changes directly to source files │ -│ ├─ Always works ✅ │ -│ └─ No patches needed │ -└─────────────────────────────────────────────────┘ -``` - -**Why This Matters**: -- **Version bump resilience**: v24.10.0 → v24.11.0 still works (uses Tier 3 or 4) -- **No patch maintenance needed**: Tier 4 always works -- **Optimization available**: Can generate patches later for reproducibility -- **Never fails**: Build always succeeds regardless of patch availability - -### Common Failure Scenarios & Recovery - -#### Scenario 1: yao-pkg Patch Not Available - -``` -❌ Problem: User sets NODE_VERSION = 'v24.11.0' but yao-pkg hasn't released patches yet - -✅ Recovery: Build script detects this BEFORE cloning (saves 30+ minutes) - Provides clear error message: - - Link to yao-pkg patches page - - Suggests using previous version - - Explains how to update NODE_VERSION -``` - -#### Scenario 2: Socket Patches Fail to Apply - -``` -❌ Problem: Socket patches exist but don't apply (Node.js source changed) - -✅ Recovery: Build script automatically falls back to direct modification - - Catches patch failure - - Applies modifications directly - - Suggests regenerating patches - - Build continues successfully -``` - -#### Scenario 3: Modifications Not Applied - -``` -❌ Problem: Patches applied but modifications not actually in source - (corrupted patch, wrong files, etc.) - -✅ Recovery: Verification step catches this BEFORE 30-minute build - - Checks lib/sea.js has isSea override - - Checks V8 includes fixed - - Fails with clear error - - Suggests: --clean flag to rebuild -``` - -#### Scenario 4: Build Artifacts from Previous Build - -``` -❌ Problem: Previous build failed midway, left partial artifacts - -✅ Recovery: Git reset + clean at start of build - - Resets to exact tag state - - Removes all untracked files - - Or use --clean flag for nuclear option -``` - -#### Scenario 5: Binary Size Wrong - -``` -❌ Problem: Binary is 80MB instead of 54MB (stripping failed?) - -✅ Recovery: Post-build verification detects size issues - - Warns if outside 50-60MB range - - Suggests checking strip command - - Suggests checking configure flags - - Binary still works, just larger -``` - -#### Scenario 6: SEA Detection Returns False - -``` -❌ Problem: Binary built successfully but isSea() returns false - -✅ Recovery: Verification script tests SEA detection - - Runs test script in binary - - Checks isSea() return value - - FAILS if false (critical bug) - - Provides instructions to rebuild with --clean -``` - -## 📜 Scripts Reference - -### Build Scripts - -#### `scripts/build-yao-pkg-node.mjs` -**Purpose**: Main build script - creates custom Node.js binary - -**Usage**: -```bash -# Normal build -node scripts/build-yao-pkg-node.mjs - -# Force fresh start (clean + rebuild) -node scripts/build-yao-pkg-node.mjs --clean - -# Build and verify -node scripts/build-yao-pkg-node.mjs --verify -``` - -**What It Does**: -1. Pre-flight checks (tools, patch availability) -2. Downloads/clones Node.js source -3. Applies yao-pkg + Socket patches -4. Verifies modifications applied -5. Configures with optimizations -6. Builds (30-60 minutes) -7. Strips debug symbols -8. Signs (macOS only) -9. Installs to pkg cache - -**When To Use**: -- First time setup -- Node.js version bump -- Build failed and need fresh start -- Patches updated - ---- - -#### `scripts/verify-node-build.mjs` -**Purpose**: Comprehensive verification of built binary - -**Usage**: -```bash -# Verify current build -node scripts/verify-node-build.mjs - -# Verify specific version -node scripts/verify-node-build.mjs --node-version=v24.10.0 -``` - -**What It Does**: -1. Checks binary exists in cache -2. Verifies lib/sea.js modification -3. Verifies V8 include fixes -4. Tests binary functionality -5. Tests SEA detection -6. Checks binary size -7. Verifies signature (macOS) - -**When To Use**: -- After building -- Before creating pkg executables -- Debugging issues -- CI/CD pipeline - ---- - -#### `scripts/test-yao-pkg-integration.mjs` -**Purpose**: End-to-end integration test with pkg - -**Usage**: -```bash -# Run full integration test -node scripts/test-yao-pkg-integration.mjs - -# Test specific version -node scripts/test-yao-pkg-integration.mjs --node-version=v24.10.0 -``` - -**What It Does**: -1. Builds Socket CLI -2. Creates test package -3. Runs pkg to create executable -4. Executes and tests the binary -5. Verifies SEA detection -6. Cleans up test artifacts - -**When To Use**: -- Before releasing -- After major changes -- CI/CD pipeline -- Debugging pkg issues - ---- - -### Patch Generation Scripts - -#### `scripts/regenerate-node-patches.mjs` -**Purpose**: Generate Socket patches for new Node.js versions - -**Usage**: -```bash -# Generate patches for v24.11.0 -node scripts/regenerate-node-patches.mjs --version=v24.11.0 -``` - -**What It Does**: -1. Clones fresh Node.js source -2. Applies yao-pkg patches -3. Commits baseline -4. Applies Socket modifications -5. Generates patch from diff -6. Saves to build/patches/socket/ - -**When To Use**: -- Node.js version bumps -- Want reproducible builds -- Sharing patches with team -- CI/CD needs consistent patches - ---- - -#### `scripts/apply-socket-mods.mjs` -**Purpose**: Apply Socket modifications directly (for testing) - -**Usage**: -```bash -# Apply mods to existing Node.js source -node scripts/apply-socket-mods.mjs -``` - -**What It Does**: -1. Modifies lib/sea.js -2. Fixes V8 include paths -3. Does NOT commit or create patches - -**When To Use**: -- Testing modifications -- Debugging patch issues -- Manual patch creation - -## 🚀 Common Workflows - -### Workflow 1: First Time Setup - -```bash -# 1. Build custom Node.js binary -node scripts/build-yao-pkg-node.mjs - -# Expected output: -# - Binary built: .custom-node-build/node-yao-pkg/out/Release/node -# - Installed to: ~/.pkg-cache/v3.5/built-v24.10.0-darwin-arm64-signed -# - Time: 30-60 minutes - -# 2. Verify build -node scripts/verify-node-build.mjs - -# Expected output: -# ✅ All verifications passed - -# 3. Build Socket CLI -pnpm run build - -# 4. Create executable -pnpm exec pkg . - -# 5. Test executable -./pkg-binaries/socket-macos-arm64 --version -``` - ---- - -### Workflow 2: Node.js Version Bump (e.g., v24.10.0 → v24.11.0) - -```bash -# 1. Update NODE_VERSION in build script -vim scripts/build-yao-pkg-node.mjs -# Change: const NODE_VERSION = 'v24.11.0' - -# 2. Check if yao-pkg patch exists -curl -I https://raw.githubusercontent.com/yao-pkg/pkg-fetch/main/patches/node.v24.11.0.cpp.patch - -# 3. If patch exists, build normally -node scripts/build-yao-pkg-node.mjs -# (Uses fallback if Socket patches missing) - -# 4. If build succeeds, generate Socket patches for future use -node scripts/regenerate-node-patches.mjs --version=v24.11.0 - -# 5. Commit new patches -git add build/patches/socket/*v24-11-0.patch -git commit -m "Add patches for Node.js v24.11.0" -``` - ---- - -### Workflow 3: Build Failed / Something Went Wrong - -```bash -# 1. Try clean rebuild first -node scripts/build-yao-pkg-node.mjs --clean - -# If that doesn't work: - -# 2. Manually clean everything -rm -rf .custom-node-build/node-yao-pkg -rm -rf ~/.pkg-cache/v3.5/built-v24.10.0-* - -# 3. Rebuild from scratch -node scripts/build-yao-pkg-node.mjs - -# 4. If still failing, check build log for specific error -# Common issues: -# - Missing tools: brew install git curl -# - yao-pkg patch missing: Use previous Node version -# - Disk space: Free up 5GB+ -# - Network issues: Check internet connection -``` - ---- - -### Workflow 4: Socket Patches Outdated - -```bash -# Situation: Socket patches exist but fail to apply - -# Build script automatically handles this: -# 1. Tries to apply patches -# 2. Catches failure -# 3. Falls back to direct modifications -# 4. Build succeeds - -# After build succeeds: - -# Regenerate patches for this version -node scripts/regenerate-node-patches.mjs --version=v24.10.0 - -# Commit updated patches -git add build/patches/socket/*v24-10-0.patch -git commit -m "Update patches for Node.js v24.10.0" -``` - ---- - -### Workflow 5: CI/CD Pipeline - -```yaml -# Example GitHub Actions workflow - -name: Build Custom Node.js - -on: - push: - branches: [main] - workflow_dispatch: - -jobs: - build: - runs-on: macos-latest # Or ubuntu-latest, windows-latest - - steps: - - uses: actions/checkout@v4 - - - name: Setup pnpm - uses: pnpm/action-setup@v2 - - - name: Build custom Node.js binary - run: node scripts/build-yao-pkg-node.mjs - timeout-minutes: 90 - - - name: Verify build - run: node scripts/verify-node-build.mjs - - - name: Run integration tests - run: node scripts/test-yao-pkg-integration.mjs - - - name: Upload binary artifact - uses: actions/upload-artifact@v4 - with: - name: node-binary-${{ runner.os }}-${{ runner.arch }} - path: ~/.pkg-cache/v3.5/built-v24.10.0-* -``` - -## 🎯 Quick Reference - -### Build Commands -```bash -# First time build -node scripts/build-yao-pkg-node.mjs - -# Clean rebuild -node scripts/build-yao-pkg-node.mjs --clean - -# Build + verify -node scripts/build-yao-pkg-node.mjs --verify -``` - -### Verification Commands -```bash -# Verify build correctness -node scripts/verify-node-build.mjs - -# Full integration test -node scripts/test-yao-pkg-integration.mjs -``` - -### Patch Commands -```bash -# Generate patches for new version -node scripts/regenerate-node-patches.mjs --version=v24.11.0 - -# Apply modifications directly (testing) -node scripts/apply-socket-mods.mjs -``` - -### File Locations -```bash -# Node.js source -.custom-node-build/node-yao-pkg/ - -# Built binary -.custom-node-build/node-yao-pkg/out/Release/node - -# pkg cache -~/.pkg-cache/v3.5/built-v24.10.0-darwin-arm64-signed - -# Socket and PKG patches -build/patches/ - -# yao-pkg patch cache -.custom-node-build/patches/node.v24.10.0.cpp.patch -``` - -## Complete Build Process - -### End-to-End Flow (Detailed) - -## Patch Layers - -### Layer 1: yao-pkg Patches (Upstream) - -**Source**: `https://github.com/yao-pkg/pkg-fetch/tree/main/patches` - -**Purpose**: Enable V8 bytecode compilation and PKG executable embedding - -**Key Modifications**: -1. **V8 Bytecode API** - `EnableCompilationForSourcelessUse()`, `FixSourcelessScript()` -2. **V8 Snapshot Serialization** - Pointer compression fixes -3. **PKG Bootstrap** - `lib/internal/bootstrap/pkg.js` (NEW FILE) -4. **Environment Detection** - `PKG_EXECPATH` checking in `src/node_main.cc` -5. **BAKERY System** - Placeholder for node flags + entry point -6. **Bootstrap Routing** - Use `internal/bootstrap/pkg` instead of `internal/main/run_main_module` - -### Layer 2: Socket Patches (Custom) - -**Location**: `/build/patches/` - -#### Patch 1: V8 Include Path Fixes - -**Files**: `fix-v8-include-paths-v24-10-0.patch` - -**Problem**: Node.js v24.9.0+ has incorrect V8 include paths causing build failures - -**Solution**: Remove erroneous `src/` prefix from V8 internal includes - -```diff --#include "src/base/hashmap.h" -+#include "base/hashmap.h" -``` - -**Affected Files**: -- `deps/v8/src/ast/ast-value-factory.h` -- `deps/v8/src/heap/new-spaces-inl.h` -- `deps/v8/src/heap/factory-inl.h` -- `deps/v8/src/objects/js-objects-inl.h` -- `deps/v8/src/heap/cppgc/heap-page.h` - -#### Patch 2: SEA Detection for PKG Binaries - -**Files**: `enable-sea-for-pkg-binaries-v24-10-0.patch` - -**Problem**: `require('node:sea').isSea()` returns false for pkg binaries - -**Solution**: Override `isSea()` to always return `true` - -```diff --const { isSea, getAsset, getAssetKeys } = internalBinding('sea'); -+const isSea = () => true; -+const { getAsset, getAssetKeys } = internalBinding('sea'); -``` - -**Why**: PKG binaries are functionally SEAs - this ensures consistent detection. - -## Automatic Patch Management - -### Build Script Intelligence - -The build script (`scripts/build-yao-pkg-node.mjs`) automatically handles patches: - -```javascript -// 1. Try versioned patches first -socket-node-modifications-v24-10-0.patch - -// 2. Fall back to individual patches -fix-v8-include-paths-v24-10-0.patch -enable-sea-for-pkg-binaries-v24-10-0.patch - -// 3. Fall back to generic v24 patches -fix-v8-include-paths-v24.patch -enable-sea-for-pkg-binaries-v24.patch - -// 4. If no patches found → Apply modifications directly to source -``` - -**No manual intervention needed** - the build works regardless of patch availability! - -## Regenerating Patches for New Node Versions - -### When Node.js Version Bumps (e.g., v24.9.0 → v24.11.0) - -**Option 1: Let Build Apply Directly** (Recommended) -```bash -# Just run the build - it will apply modifications directly -node scripts/build-yao-pkg-node.mjs -``` - -**Option 2: Generate Patches Explicitly** -```bash -# Generate versioned patches -node scripts/regenerate-node-patches.mjs --version=v24.11.0 - -# This creates: -# - build/patches/socket/socket-node-modifications-v24-11-0.patch -``` - -### Manual Patch Creation (Advanced) - -If you need to create patches manually: - -```bash -# 1. Build Node with yao-pkg patches -node scripts/build-yao-pkg-node.mjs - -# 2. Navigate to Node source -cd .custom-node-build/node-yao-pkg - -# 3. Create a commit for baseline -git add -A -git commit -m "Baseline after yao-pkg patches" - -# 4. Make your modifications -vim lib/sea.js -vim deps/v8/src/ast/ast-value-factory.h - -# 5. Generate patch -git diff > ../../build/patches/socket/my-custom-patch-v24-10-0.patch - -# 6. Update build script SOCKET_PATCHES array (if not using auto-discovery) -``` - -## Verification - -### Build Flags Verification - -```bash -# Check configure flags were applied -cat .custom-node-build/node-yao-pkg/config.gypi | grep -E "(npm|inspector|intl)" - -# Expected: -# node_install_npm: false -# node_use_sqlite: false -# v8_enable_inspector: 0 -``` - -### Patch Verification - -```bash -# Check patches were applied -cd .custom-node-build/node-yao-pkg -git diff HEAD | head -50 - -# Should show: -# - V8 include path changes -# - lib/sea.js isSea modification -# - yao-pkg BAKERY system -# - PKG bootstrap file -``` - -### Binary Verification - -```bash -# Check binary size and signature -ls -lh ~/.pkg-cache/v3.5/built-v24.10.0-darwin-arm64-signed -# Expected: ~54MB (after stripping) - -# Check PKG placeholder exists -strings ~/.pkg-cache/v3.5/built-v24.10.0-darwin-arm64-signed | grep -i "pkg" -# Should show: BAKERY, PKG_EXECPATH, internal/bootstrap/pkg - -# Check signature (macOS) -codesign -dv ~/.pkg-cache/v3.5/built-v24.10.0-darwin-arm64-signed -``` - -## Configuration - -### Build Settings - -All size optimizations are in `scripts/build-yao-pkg-node.mjs`: - -```javascript -'--with-intl=small-icu', // English-only ICU (~5MB savings) -'--without-npm', // Remove npm (~10MB savings) -'--without-corepack', // Remove corepack -'--without-inspector', // Remove V8 inspector/debugger -'--without-amaro', // Remove amaro -'--without-sqlite', // Remove SQLite -``` - -### PKG Configuration - -PKG settings are in `pkg.json`: - -```json -{ - "node": "/path/to/.custom-node-build/node-yao-pkg/out/Release/node", - "targets": ["node24-macos-arm64"], - "outputPath": "pkg-binaries" -} -``` - -## Troubleshooting - -### Build Fails: "patch failed to apply" - -**Cause**: Patch format mismatch or Node.js source changed - -**Solution**: -```bash -# Let build apply modifications directly -node scripts/build-yao-pkg-node.mjs -# It will auto-apply without patches - -# Then regenerate patches if needed -node scripts/regenerate-node-patches.mjs --version=v24.10.0 -``` - -### Build Fails: "Cannot find yao-pkg patch" - -**Cause**: yao-pkg hasn't released patches for this Node version yet - -**Solution**: Use previous Node version or wait for yao-pkg update - -### Binary Too Large (>60MB) - -**Check**: -1. Debug symbols stripped? (script does this automatically) -2. Configure flags applied? (check config.gypi) -3. ICU data included? (should be small-icu) - -### pkg Can't Find Binary - -**Check**: -```bash -# Verify cache location -ls ~/.pkg-cache/v3.5/built-v24* - -# Verify pkg.json points to right path -cat pkg.json | grep "node" -``` - -## Scripts Reference - -| Script | Purpose | -|--------|---------| -| `build-yao-pkg-node.mjs` | **Main build script** - Builds complete patched Node binary | -| `regenerate-node-patches.mjs` | Generate patches for new Node versions | -| `apply-socket-mods.mjs` | Apply Socket modifications to Node source | -| `generate-node-patches.mjs` | Legacy patch generator (use regenerate instead) | - -## Development Workflow - -### Standard Development -```bash -# Just build - patches auto-applied or mods applied directly -node scripts/build-yao-pkg-node.mjs - -# Build CLI -pnpm run build - -# Create pkg binary -pnpm exec pkg . -``` - -### Adding New Modifications - -1. **Modify `applySocketModificationsDirectly()` in `build-yao-pkg-node.mjs`** -2. **Test the build** -3. **Optionally generate patches** (for reproducibility) - -### Version Bump Workflow - -```bash -# 1. Update NODE_VERSION in build-yao-pkg-node.mjs -vim scripts/build-yao-pkg-node.mjs # Change to v24.11.0 - -# 2. Build (auto-applies mods) -node scripts/build-yao-pkg-node.mjs - -# 3. If build succeeds, optionally generate patches -node scripts/regenerate-node-patches.mjs --version=v24.11.0 - -# 4. Commit new patches -git add build/patches/socket/*v24-11-0.patch -git commit -m "Add patches for Node.js v24.11.0" -``` - -## Why This Design? - -1. **Zero Manual Intervention**: Build works with or without patches -2. **Version Resilient**: Automatically handles version bumps -3. **Reproducible**: Patches capture exact changes for CI/CD -4. **Maintainable**: Modifications defined in code, not scattered patches -5. **Debuggable**: Can see what's being applied in build logs - -## Related Documentation - -- [yao-pkg Documentation](https://github.com/yao-pkg/pkg) -- [Node.js SEA Documentation](https://nodejs.org/api/single-executable-applications.html) -- [V8 Bytecode](https://v8.dev/blog/understanding-ecmascript-part-4) diff --git a/packages/node-smol-builder/scripts/build.mjs b/packages/node-smol-builder/scripts/build.mjs deleted file mode 100755 index 01e080cd6..000000000 --- a/packages/node-smol-builder/scripts/build.mjs +++ /dev/null @@ -1,2158 +0,0 @@ -/** - * @fileoverview Build Node.js v24.10.0 with custom patches - * - * This script produces a custom Node binary for Socket CLI distribution. - * It clones Node.js source, applies custom patches, configures with - * size optimizations, and builds a standalone binary. - * - * Binary Size Optimization Strategy: - * - * Starting size: ~49 MB (default Node.js v24 build) - * - * Stage 1: Configure flags - * + --with-intl=small-icu: ~44 MB (-5 MB: English-only ICU) - * + --v8-lite-mode: ~29 MB (-20 MB: Disable TurboFan JIT) - * + --disable-SEA: ~28 MB (-21 MB: Remove SEA support) - * + --without-* flags: ~27 MB (-22 MB: Remove npm, inspector, etc.) - * - * Stage 2: Binary stripping - * + strip (platform-specific): ~25 MB (-24 MB: Remove debug symbols) - * - * Stage 3: Compression (this script) - * + pkg Brotli (VFS): ~23 MB (-26 MB: Compress Socket CLI code) - * + Node.js lib/ minify+Brotli:~21 MB (-28 MB: Compress built-in modules) - * - * TARGET EXPECTED: ~21 MB (small-icu adds ~3MB vs intl=none) - * - * Size Breakdown: - * - Node.js lib/ (compressed): ~2.5 MB (minified + Brotli) - * - Socket CLI (VFS): ~13 MB (pkg Brotli) - * - Native code (V8, libuv): ~2.5 MB (stripped) - * - * Compression Approach: - * 1. Node.js built-in modules: esbuild minify → Brotli quality 11 - * 2. Socket CLI application: pkg automatic Brotli compression - * - * Performance Impact: - * - Startup overhead: ~50-100 ms (one-time decompression) - * - Runtime performance: ~5-10x slower JS (V8 Lite mode) - * - WASM performance: Unaffected (Liftoff baseline compiler) - * - * Usage: - * node scripts/load.mjs build-custom-node # Normal build - * node scripts/load.mjs build-custom-node --clean # Force fresh build - * node scripts/load.mjs build-custom-node --yes # Auto-yes to prompts - * node scripts/load.mjs build-custom-node --verify # Verify after build - * node scripts/load.mjs build-custom-node --test # Build + run smoke tests - * node scripts/load.mjs build-custom-node --test-full # Build + run full tests - */ - -import { existsSync, readdirSync, promises as fs } from 'node:fs' -import { cpus, platform } from 'node:os' -import { dirname, join } from 'node:path' -import { fileURLToPath } from 'node:url' -import { brotliCompressSync, constants as zlibConstants } from 'node:zlib' - -import { parseArgs } from '@socketsecurity/lib/argv/parse' -import { whichBinSync } from '@socketsecurity/lib/bin' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { safeDelete, safeMkdir } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' -import nodeVersionConfig from '@socketsecurity/bootstrap/node-version.json' with { type: 'json' } - -const { version: NODE_VERSION } = nodeVersionConfig -import colors from 'yoctocolors-cjs' - -import { - checkCompiler, - checkDiskSpace, - checkNetworkConnectivity, - checkPythonVersion, - cleanCheckpoint, - createCheckpoint, - estimateBuildTime, - formatDuration, - getBuildLogPath, - getLastLogLines, - saveBuildLog, - smokeTestBinary, - verifyGitTag, -} from '@socketsecurity/build-infra/lib/build-helpers' -import { - generateHashComment, - shouldExtract, -} from '@socketsecurity/build-infra/lib/extraction-cache' -import { printError, printHeader, printWarning } from '@socketsecurity/build-infra/lib/build-output' -import { - analyzePatchContent, - checkPatchConflicts, - testPatchApplication, - validatePatch, -} from '@socketsecurity/build-infra/lib/patch-validator' -import { - ensureAllToolsInstalled, - ensurePackageManagerAvailable, - getInstallInstructions, - getPackageManagerInstructions, -} from '@socketsecurity/build-infra/lib/tool-installer' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = dirname(__filename) - -// Hoist logger for consistent usage throughout the script. -const logger = getDefaultLogger() - -/** - * Execute command using spawn (replacement for exec). - * - * @param {string} command - Command to execute - * @param {string[]} args - Command arguments - * @param {object} options - Spawn options - * @returns {Promise} - */ -async function exec(command, args = [], options = {}) { - const result = await spawn( - Array.isArray(args) ? command : `${command} ${args}`, - Array.isArray(args) ? args : [], - { - stdio: 'inherit', - shell: WIN32, - ...options, - } - ) - if (result.code !== 0) { - throw new Error(`Command failed with exit code ${result.code}: ${command}`) - } -} - -// Parse arguments. -const { values } = parseArgs({ - options: { - arch: { type: 'string' }, - clean: { type: 'boolean' }, - dev: { type: 'boolean' }, - platform: { type: 'string' }, - prod: { type: 'boolean' }, - test: { type: 'boolean' }, - 'test-full': { type: 'boolean' }, - verify: { type: 'boolean' }, - yes: { type: 'boolean', short: 'y' }, - }, - strict: false, -}) - -const TARGET_PLATFORM = values.platform || platform() -const TARGET_ARCH = values.arch || process.arch -const CLEAN_BUILD = !!values.clean -const RUN_VERIFY = !!values.verify -const RUN_TESTS = !!values.test -const RUN_FULL_TESTS = !!values['test-full'] || !!values.testFull -const AUTO_YES = !!values.yes - -// Build mode: dev (fast builds) vs prod (optimized builds). -// Default to dev unless CI or --prod specified. -const IS_PROD_BUILD = values.prod || (!values.dev && 'CI' in process.env) -const IS_DEV_BUILD = !IS_PROD_BUILD - -// Configuration -const ROOT_DIR = join(__dirname, '..') -const NODE_SOURCE_DIR = join(ROOT_DIR, 'build', 'node-source') -const NODE_DIR = NODE_SOURCE_DIR // Alias for compatibility. -const BUILD_DIR = join(ROOT_DIR, 'build') -const PATCHES_DIR = join(ROOT_DIR, 'patches') -const ADDITIONS_DIR = join(ROOT_DIR, 'additions') - -// Directory structure. -// build/node-source/ - Node.js source code (gitignored). -// build/node-source/out/Release/node - Node.js build output (gitignored). -// build/out/Release/node - Copy of Release binary (gitignored). -// build/out/Stripped/node - Stripped binary (gitignored). -// build/out/Signed/node - Stripped + signed binary (macOS ARM64 only, gitignored). -// build/out/Final/node - Final binary for distribution (gitignored). -// build/out/Sea/node - Binary for SEA builds (gitignored). -// build/out/Distribution/node - Final distribution binary (gitignored). -// build/patches/ - All Node.js custom patches (tracked in git). - -/** - * Collect all source files that contribute to the smol build. - * Used for hash-based caching to detect when rebuild is needed. - * - * Cache Key Strategy (Local Script): - * =================================== - * This function generates a content-based hash using @socketsecurity/build-infra/lib/extraction-cache. - * The cache key is determined by hashing the CONTENT of these files: - * - * 1. All patch files (patches/*.patch) - * - Any change to Node.js patches invalidates cache - * - Example: patches/enable-brotli-loading-v24.patch - * - * 2. All addition files (additions/**) - * - Includes headers, source files, tools added to Node.js source tree - * - Example: additions/003-compression-tools/socketsecurity_macho_decompress - * - * 3. This build script itself (scripts/build.mjs) - * - Changes to build configuration flags invalidate cache - * - Example: modifying --without-node-code-cache flag - * - * NOTE: This differs from GitHub Actions cache key (see .github/workflows/build-smol.yml): - * - GitHub: Hashes file PATHS and includes bootstrap dependencies - * - Local: Hashes file CONTENT only (more precise, no bootstrap dependency) - * - Both: Stored in build/.cache/node.hash (local) or Actions cache (CI) - * - * @returns {string[]} Array of absolute paths to all source files - */ -function collectBuildSourceFiles() { - const sources = [] - - // Add all patch files. - if (existsSync(PATCHES_DIR)) { - const patchFiles = readdirSync(PATCHES_DIR) - .filter(f => f.endsWith('.patch')) - .map(f => join(PATCHES_DIR, f)) - sources.push(...patchFiles) - } - - // Add all addition files recursively. - if (existsSync(ADDITIONS_DIR)) { - const addFiles = readdirSync(ADDITIONS_DIR, { recursive: true }) - .filter(f => { - const fullPath = join(ADDITIONS_DIR, f) - try { - return existsSync(fullPath) && !readdirSync(fullPath, { withFileTypes: true }).length - } catch { - return true // It's a file, not a directory. - } - }) - .map(f => join(ADDITIONS_DIR, f)) - sources.push(...addFiles) - } - - // Add this build script itself (changes to build logic should trigger rebuild). - sources.push(__filename) - - return sources -} - -/** - * Find Socket patches for this Node version. - * Includes both static patches (patches/) and dynamic patches (build/patches/). - */ -function findSocketPatches() { - const patches = [] - - // Get static patches from patches/ directory. - if (existsSync(PATCHES_DIR)) { - const staticPatches = readdirSync(PATCHES_DIR) - .filter(f => f.endsWith('.patch') && !f.endsWith('.template.patch')) - .map(f => ({ name: f, path: join(PATCHES_DIR, f), source: 'patches/' })) - patches.push(...staticPatches) - } - - // Get dynamic patches from build/patches/ directory. - const buildPatchesDir = join(BUILD_DIR, 'patches') - if (existsSync(buildPatchesDir)) { - const dynamicPatches = readdirSync(buildPatchesDir) - .filter(f => f.endsWith('.patch')) - .map(f => ({ name: f, path: join(buildPatchesDir, f), source: 'build/patches/' })) - patches.push(...dynamicPatches) - } - - // Sort by name for consistent ordering. - patches.sort((a, b) => a.name.localeCompare(b.name)) - - if (patches.length > 0) { - logger.log(` Found ${patches.length} patch file(s):`) - for (const patch of patches) { - logger.log(` → ${patch.name} (${patch.source})`) - } - } - - return patches -} - -/** - * Copy build additions to Node.js source tree - */ -async function copyBuildAdditions() { - if (!existsSync(ADDITIONS_DIR)) { - logger.log(' No build additions directory found, skipping') - return - } - - printHeader('Copying Build Additions') - - // Recursively copy entire additions directory structure to Node.js source. - await fs.cp(ADDITIONS_DIR, NODE_DIR, { - recursive: true, - force: true, - errorOnExist: false, - }) - - logger.log( - `✅ Copied ${ADDITIONS_DIR.replace(`${ROOT_DIR}/`, '')}/ → ${NODE_DIR}/`, - ) - - // Fix: The brotli header needs to be in src/ for node_builtins.cc to find it. - const brotliHeaderSource = join(NODE_DIR, '001-brotli-integration', 'socketsecurity_brotli_builtin_loader.h') - const brotliHeaderDest = join(NODE_DIR, 'src', 'socketsecurity_brotli_builtin_loader.h') - - if (existsSync(brotliHeaderSource)) { - await fs.copyFile(brotliHeaderSource, brotliHeaderDest) - logger.log(`✅ Copied socketsecurity_brotli_builtin_loader.h to src/`) - } - - // Fix: The bootstrap loader needs to be in lib/internal/ for Node.js to embed it as an internal module. - const bootstrapLoaderSource = join(NODE_DIR, '002-bootstrap-loader', 'internal', 'socketsecurity_bootstrap_loader.js') - const bootstrapLoaderDest = join(NODE_DIR, 'lib', 'internal', 'socketsecurity_bootstrap_loader.js') - - if (existsSync(bootstrapLoaderSource)) { - await fs.copyFile(bootstrapLoaderSource, bootstrapLoaderDest) - logger.log(`✅ Copied socketsecurity_bootstrap_loader.js to lib/internal/`) - } - - // Fix: Copy polyfills to lib/internal/socketsecurity_polyfills/ for external loading. - const polyfillsSourceDir = join(NODE_DIR, '004-polyfills') - const polyfillsDestDir = join(NODE_DIR, 'lib', 'internal', 'socketsecurity_polyfills') - - if (existsSync(polyfillsSourceDir)) { - await safeMkdir(polyfillsDestDir) - - const localeCompareSource = join(polyfillsSourceDir, 'localeCompare.js') - const localeCompareDest = join(polyfillsDestDir, 'localeCompare.js') - if (existsSync(localeCompareSource)) { - await fs.copyFile(localeCompareSource, localeCompareDest) - logger.log(`✅ Copied localeCompare.js to lib/internal/socketsecurity_polyfills/`) - } - - const normalizeSource = join(polyfillsSourceDir, 'normalize.js') - const normalizeDest = join(polyfillsDestDir, 'normalize.js') - if (existsSync(normalizeSource)) { - await fs.copyFile(normalizeSource, normalizeDest) - logger.log(`✅ Copied normalize.js to lib/internal/socketsecurity_polyfills/`) - } - } - - // Fix: The brotli2c tool needs to be in tools/ for node.gyp build target. - const brotli2cSource = join(NODE_DIR, '003-compression-tools', 'socketsecurity_brotli2c.cc') - const brotli2cDest = join(NODE_DIR, 'tools', 'socketsecurity_brotli2c.cc') - - if (existsSync(brotli2cSource)) { - await fs.copyFile(brotli2cSource, brotli2cDest) - logger.log(`✅ Copied socketsecurity_brotli2c.cc to tools/`) - } - - logger.log('') -} - -/** - * Embed Socket security bootstrap in minimal injection approach. - * This processes the loader template with embedded bootstrap, - * then copies the minimal patch (no placeholder replacement needed). - * (Optional - only runs if bootstrap file exists) - */ -async function embedSocketSecurityBootstrap() { - // Use transformed bootstrap from bootstrap package (compatible with Node.js internal bootstrap context). - const bootstrapSource = join(ROOT_DIR, '..', 'bootstrap', 'dist', 'bootstrap-smol.js') - - // Always rebuild bootstrap to ensure latest version. - // Bootstrap build is fast (~5 seconds) and ensures version placeholders are current. - logger.log('') - logger.info(`Rebuilding @socketsecurity/bootstrap package...`) - logger.log('') - - const result = await spawn( - 'pnpm', - ['--filter', '@socketsecurity/bootstrap', 'run', 'build'], - { - cwd: join(ROOT_DIR, '../..'), - shell: WIN32, - stdio: 'inherit', - } - ) - - if (result.code !== 0) { - throw new Error(`Failed to build @socketsecurity/bootstrap package (exit code ${result.code})`) - } - - // Verify bootstrap was built. - if (!existsSync(bootstrapSource)) { - // Try to show what files exist to help diagnose. - logger.error(`Bootstrap file not found at: ${bootstrapSource}`) - logger.info(`Checking for bootstrap files...`) - - const bootstrapDir = dirname(bootstrapSource) - if (existsSync(bootstrapDir)) { - logger.info(`Directory exists: ${bootstrapDir}`) - try { - const files = await fs.readdir(bootstrapDir) - logger.info(`Files in directory: ${files.join(', ')}`) - } catch (e) { - logger.warn(`Could not list directory contents`) - } - } else { - logger.error(`Directory does not exist: ${bootstrapDir}`) - } - - throw new Error(`Bootstrap build succeeded but dist file not found at: ${bootstrapSource}`) - } - - logger.log('') - - printHeader('Embedding Socket Security Bootstrap (Minimal Injection)') - - // Read the bootstrap code. - const bootstrapCode = await fs.readFile(bootstrapSource, 'utf8') - const bootstrapSize = Buffer.byteLength(bootstrapCode, 'utf8') - - // Base64 encode the bootstrap (will be decoded at runtime in Node.js). - const bootstrapB64 = Buffer.from(bootstrapCode, 'utf8').toString('base64') - const bootstrapB64Size = bootstrapB64.length - - logger.log(`📦 Bootstrap size: ${(bootstrapSize / 1024).toFixed(1)}KB`) - logger.log(`📦 Base64 encoded: ${(bootstrapB64Size / 1024).toFixed(1)}KB`) - - // Split base64 into chunks to avoid line length issues. - // 80 characters per line is safe for all environments. - const chunkSize = 80 - const base64Chunks = [] - for (let i = 0; i < bootstrapB64.length; i += chunkSize) { - base64Chunks.push(bootstrapB64.slice(i, i + chunkSize)) - } - - // Format as multi-line JavaScript string concatenation for loader template. - const base64MultiLine = base64Chunks - .map((chunk, index) => { - if (index === 0) { - return `'${chunk}'` - } - // Continuation lines with proper indentation. - return ` '${chunk}'` - }) - .join(' +\n') - - // Read the loader template. - const loaderTemplatePath = join(ADDITIONS_DIR, '002-bootstrap-loader', 'internal', 'socketsecurity_bootstrap_loader.js.template') - const loaderTemplate = await fs.readFile(loaderTemplatePath, 'utf8') - - // Embed the bootstrap in the loader template. - const finalLoader = loaderTemplate.replace( - 'SOCKET_BOOTSTRAP_BASE64_PLACEHOLDER', - base64MultiLine - ) - - // Write the processed loader to additions/ (will be copied during copyBuildAdditions phase). - const finalLoaderPath = join(ADDITIONS_DIR, '002-bootstrap-loader', 'internal', 'socketsecurity_bootstrap_loader.js') - await safeMkdir(dirname(finalLoaderPath), { recursive: true }) - await fs.writeFile(finalLoaderPath, finalLoader, 'utf8') - - logger.log(`✅ Generated loader: ${finalLoaderPath.replace(`${ROOT_DIR}/`, '')}`) - logger.log(` ${(finalLoader.length / 1024).toFixed(1)}KB (includes embedded bootstrap)`) - - // Copy the minimal patch template to build/patches/ (no placeholder replacement needed). - const minimalPatchTemplatePath = join(PATCHES_DIR, '001-socketsecurity_bootstrap_preexec_v24.10.0.template.patch') - const buildPatchesDir = join(BUILD_DIR, 'patches') - await safeMkdir(buildPatchesDir, { recursive: true }) - - const finalPatchPath = join(buildPatchesDir, 'socketsecurity_bootstrap_preexec_v24.10.0.patch') - await fs.copyFile(minimalPatchTemplatePath, finalPatchPath) - - logger.log(`✅ Copied minimal patch: ${finalPatchPath.replace(`${ROOT_DIR}/`, '')}`) - logger.log(` 1-line injection calling internal/socketsecurity_bootstrap_loader`) - logger.log('') -} - -const CPU_COUNT = cpus().length -const IS_MACOS = TARGET_PLATFORM === 'darwin' -const IS_WINDOWS = TARGET_PLATFORM === 'win32' -const ARCH = TARGET_ARCH - -/** - * Check if Node.js source has uncommitted changes. - */ -async function isNodeSourceDirty() { - try { - const result = await spawn('git', ['status', '--porcelain'], { - cwd: NODE_DIR, - stdio: 'pipe', - stdioString: true, - }) - return result.code === 0 && (result.stdout ?? '').trim().length > 0 - } catch { - return false - } -} - -/** - * Reset Node.js source to pristine state. - */ -async function resetNodeSource() { - logger.log('Fetching latest tags...') - await exec( - 'git', - [ - 'fetch', - '--depth', - '1', - 'origin', - `refs/tags/${NODE_VERSION}:refs/tags/${NODE_VERSION}`, - ], - { - cwd: NODE_DIR, - }, - ) - logger.log('Resetting to clean state...') - await exec('git', ['reset', '--hard', NODE_VERSION], { cwd: NODE_DIR }) - await exec('git', ['clean', '-fdx'], { cwd: NODE_DIR }) - logger.log(`${colors.green('✓')} Node.js source reset to clean state`) - logger.log('') -} - -/** - * Get file size in human-readable format. - */ -async function getFileSize(filePath) { - const stats = await fs.stat(filePath) - const bytes = stats.size - - if (bytes === 0) return '0B' - - const k = 1024 - const sizes = ['B', 'K', 'M', 'G', 'T'] - const i = Math.floor(Math.log(bytes) / Math.log(k)) - const size = (bytes / k ** i).toFixed(1) - - return `${size}${sizes[i]}` -} - -/** - * Get cache directory for compiled binaries. - * - * @param {string} buildDir - Build directory path - * @returns {string} Cache directory path - */ -function getCacheDir(buildDir) { - return join(buildDir, 'cache') -} - -/** - * Get cache file path for compiled binary. - * - * @param {string} buildDir - Build directory path - * @param {string} platform - Target platform - * @param {string} arch - Target architecture - * @returns {string} Cache file path - */ -function getCachePath(buildDir, platform, arch) { - return join(getCacheDir(buildDir), `node-compiled-${platform}-${arch}`) -} - -/** - * Get cache metadata file path. - * - * @param {string} buildDir - Build directory path - * @param {string} platform - Target platform - * @param {string} arch - Target architecture - * @returns {string} Cache metadata file path - */ -function getCacheMetadataPath(buildDir, platform, arch) { - return join(getCacheDir(buildDir), `node-compiled-${platform}-${arch}.json`) -} - -/** - * Cache compiled binary after successful build. - * This allows resuming from this point if post-processing fails. - * - * @param {string} buildDir - Build directory path - * @param {string} nodeBinary - Path to compiled Node.js binary - * @param {string} platform - Target platform - * @param {string} arch - Target architecture - * @param {string} version - Node.js version - * @returns {Promise} - */ -async function cacheCompiledBinary(buildDir, nodeBinary, platform, arch, version) { - const cacheDir = getCacheDir(buildDir) - const cacheFile = getCachePath(buildDir, platform, arch) - const cacheMetaFile = getCacheMetadataPath(buildDir, platform, arch) - - // Create cache directory. - await safeMkdir(cacheDir, { recursive: true }) - - // Copy binary to cache. - await fs.copyFile(nodeBinary, cacheFile) - - // Get binary stats for metadata. - const stats = await fs.stat(nodeBinary) - const size = await getFileSize(nodeBinary) - - // Save metadata. - const metadata = { - platform, - arch, - version, - timestamp: Date.now(), - size: stats.size, - humanSize: size, - } - await fs.writeFile(cacheMetaFile, JSON.stringify(metadata, null, 2)) - - logger.log(`${colors.green('✓')} Cached compiled binary (${size})`) - logger.log(` Cache location: ${cacheFile}`) -} - -/** - * Restore cached binary if available and valid. - * Returns true if restore successful, false if no valid cache exists. - * - * @param {string} buildDir - Build directory path - * @param {string} nodeBinary - Path where to restore Node.js binary - * @param {string} platform - Target platform - * @param {string} arch - Target architecture - * @param {string} version - Expected Node.js version - * @returns {Promise} True if restored, false if no valid cache - */ -async function restoreCachedBinary(buildDir, nodeBinary, platform, arch, version) { - const cacheFile = getCachePath(buildDir, platform, arch) - const cacheMetaFile = getCacheMetadataPath(buildDir, platform, arch) - - // Check if cache files exist. - if (!existsSync(cacheFile) || !existsSync(cacheMetaFile)) { - return false - } - - try { - // Validate metadata matches current build. - const metaContent = await fs.readFile(cacheMetaFile, 'utf8') - const meta = JSON.parse(metaContent) - - if (meta.platform !== platform || meta.arch !== arch) { - logger.warn('Cached binary is for different platform/arch, ignoring cache') - return false - } - - if (meta.version !== version) { - logger.warn(`Cached binary is for Node.js ${meta.version}, expected ${version}, ignoring cache`) - return false - } - - // Ensure output directory exists. - await safeMkdir(dirname(nodeBinary), { recursive: true }) - - // Restore binary. - await fs.copyFile(cacheFile, nodeBinary) - - const size = await getFileSize(nodeBinary) - logger.log(`${colors.green('✓')} Restored cached binary (${size})`) - logger.log(` From: ${cacheFile}`) - - // Smoke test: verify binary can execute --version. - try { - const versionResult = await spawn(nodeBinary, ['--version'], { timeout: 5_000 }) - if (versionResult.code === 0) { - logger.log(`${colors.green('✓')} Binary smoke test passed (--version)`) - } else { - logger.warn('Binary exists but failed smoke test, will rebuild') - return false - } - } catch (e) { - logger.warn(`Binary smoke test failed: ${e.message}, will rebuild`) - return false - } - - return true - } catch (e) { - logger.warn(`Failed to restore cache: ${e.message}`) - return false - } -} - -/** - * Check if required tools are available, auto-installing if possible. - */ -async function checkRequiredTools() { - printHeader('Pre-flight Checks') - - // Step 1: Ensure package manager is available. - const pmResult = await ensurePackageManagerAvailable({ - autoInstall: AUTO_YES, - autoYes: AUTO_YES, - }) - - const canAutoInstall = pmResult.available - - if (pmResult.installed) { - logger.success(`Package manager (${pmResult.manager}) installed successfully`) - } else if (pmResult.available) { - logger.log(`📦 Package manager detected: ${pmResult.manager}`) - } else { - logger.warn('No package manager available for auto-installing tools') - const pmInstructions = getPackageManagerInstructions() - for (const instruction of pmInstructions) { - logger.substep(instruction) - } - } - - // Step 2: Tools that support auto-installation. - const autoInstallableTools = ['git', 'curl', 'patch', 'make'] - - // Step 3: Tools that must be checked manually (no package manager support). - const manualTools = [ - // macOS strip doesn't support --version, just check if it exists. - { name: 'strip', cmd: 'strip', checkExists: true }, - ] - - if (IS_MACOS && ARCH === 'arm64') { - // macOS codesign doesn't support --version, just check if it exists. - manualTools.push({ - name: 'codesign', - cmd: 'codesign', - checkExists: true, - }) - } - - // Step 4: Attempt auto-installation for missing tools. - const result = await ensureAllToolsInstalled(autoInstallableTools, { - autoInstall: canAutoInstall, - autoYes: AUTO_YES, - }) - - // Step 5: Report results. - for (const tool of autoInstallableTools) { - if (result.installed.includes(tool)) { - logger.success(`${tool} installed automatically`) - } else if (!result.missing.includes(tool)) { - logger.log(`${colors.green('✓')} ${tool} is available`) - } - } - - // Step 6: Check manual tools. - let allManualAvailable = true - for (const { checkExists, cmd, name } of manualTools) { - const binPath = whichBinSync(cmd, { nothrow: true }) - if (binPath) { - logger.log(`${colors.green('✓')} ${name} is available`) - } else { - logger.error(`${colors.red('✗')} ${name} is NOT available`) - allManualAvailable = false - } - } - - // Step 7: Handle missing tools. - if (!result.allAvailable || !allManualAvailable) { - const missingTools = [...result.missing, ...manualTools.filter(t => !whichBinSync(t.cmd, { nothrow: true })).map(t => t.name)] - - if (missingTools.length > 0) { - const instructions = [] - instructions.push('Missing required build tools:') - instructions.push('') - - for (const tool of missingTools) { - const toolInstructions = getInstallInstructions(tool) - instructions.push(...toolInstructions) - instructions.push('') - } - - if (IS_MACOS) { - instructions.push('For Xcode Command Line Tools:') - instructions.push(' xcode-select --install') - } - - printError('Missing Required Tools', 'Some required build tools are not available.', instructions) - throw new Error('Missing required build tools') - } - } - - logger.log('') -} - -/** - * Check build environment (Python, compiler, disk space, network). - */ -async function checkBuildEnvironment() { - printHeader('Build Environment Checks') - - let allChecks = true - - // Check 1: Disk space. - logger.log('Checking available disk space...') - const diskSpace = await checkDiskSpace(BUILD_DIR) - if (diskSpace.availableGB !== null) { - if (diskSpace.sufficient) { - logger.success( - `Disk space: ${diskSpace.availableGB}GB available (need 5GB)`, - ) - } else { - logger.fail( - `Disk space: Only ${diskSpace.availableGB}GB available (need 5GB)`, - ) - logger.substep('Free up disk space before building') - allChecks = false - } - } else { - logger.warn('Could not check disk space (continuing anyway)') - } - - // Check 2: Python version. - logger.log('Checking Python version...') - const python = await checkPythonVersion() - if (python.available && python.sufficient) { - logger.success(`Python ${python.version} is available`) - } else if (python.available && !python.sufficient) { - logger.fail(`Python ${python.version} is too old (need Python 3.6+)`) - allChecks = false - } else { - logger.fail('Python is not available') - logger.substep('Node.js build requires Python 3.6 or later') - allChecks = false - } - - // Check 3: C++ compiler. - logger.log('Checking C++ compiler...') - const compiler = await checkCompiler() - if (compiler.available) { - logger.success(`C++ compiler (${compiler.compiler}) is available`) - } else { - logger.fail('C++ compiler is not available') - logger.substep('Node.js build requires clang++, g++, or c++') - allChecks = false - } - - // Check 4: Network connectivity. - logger.log('Checking network connectivity...') - const network = await checkNetworkConnectivity() - if (network.connected) { - logger.success('Network connection to GitHub is working') - } else { - logger.fail('Cannot reach GitHub') - logger.substep('Check your internet connection') - allChecks = false - } - - logger.logNewline() - - if (!allChecks) { - printError( - 'Build Environment Not Ready', - 'Some required build environment checks failed.', - [ - 'Fix the issues above before building', - 'Disk space: Free up space if needed', - 'Python: Install Python 3.6+ (python.org or brew install python)', - 'Compiler: Install Xcode Command Line Tools (xcode-select --install)', - 'Network: Check your internet connection', - ], - ) - throw new Error('Build environment checks failed') - } - - logger.success('Build environment is ready') - logger.logNewline() -} - -/** - * Verify Socket modifications were applied correctly. - */ -async function verifySocketModifications() { - printHeader('Verifying Socket Modifications') - - let allApplied = true - - // Check 1: lib/sea.js modification. - logger.log('Checking lib/sea.js modification...') - const seaFile = join(NODE_DIR, 'lib', 'sea.js') - try { - const content = await fs.readFile(seaFile, 'utf8') - if (content.includes('const isSea = () => true;')) { - logger.success('lib/sea.js correctly modified (SEA override applied)') - } else { - logger.fail('lib/sea.js modification FAILED') - logger.substep('Expected: const isSea = () => true;') - allApplied = false - } - } catch (e) { - logger.fail(`Cannot read lib/sea.js: ${e.message}`) - allApplied = false - } - - // Check 2: V8 include paths (v24.10.0+ doesn't need fixes). - logger.log('Checking V8 include paths...') - const testFile = join(NODE_DIR, 'deps/v8/src/heap/cppgc/heap-page.h') - try { - const content = await fs.readFile(testFile, 'utf8') - // For v24.10.0+, the CORRECT include has "src/" prefix. - if (content.includes('#include "src/base/iterator.h"')) { - logger.success( - 'V8 include paths are correct (no modification needed for v24.10.0+)', - ) - } else if (content.includes('#include "base/iterator.h"')) { - logger.fail('V8 include paths were incorrectly modified!') - logger.substep('v24.10.0+ needs "src/" prefix in includes') - logger.substep('Build will fail - source was corrupted') - allApplied = false - } else { - logger.warn('V8 include structure may have changed (cannot verify)') - } - } catch (e) { - logger.warn(`Cannot verify V8 includes: ${e.message}`) - } - - // Check 3: localeCompare polyfill (kept as safety layer with small-icu). - logger.log('Checking localeCompare polyfill...') - const primordialFile = join( - NODE_DIR, - 'lib', - 'internal', - 'per_context', - 'primordials.js', - ) - try { - const content = await fs.readFile(primordialFile, 'utf8') - if (content.includes('Socket CLI: Polyfill localeCompare')) { - logger.success( - 'primordials.js correctly modified (localeCompare polyfill)', - ) - } else { - logger.warn('localeCompare polyfill not applied (may not be needed with small-icu)') - } - } catch (e) { - logger.warn(`Cannot verify primordials.js: ${e.message}`) - } - - // Check 4: String.prototype.normalize polyfill (kept as safety layer with small-icu). - logger.log('Checking normalize polyfill...') - const bootstrapFile = join( - NODE_DIR, - 'lib', - 'internal', - 'bootstrap', - 'node.js', - ) - try { - const content = await fs.readFile(bootstrapFile, 'utf8') - if (content.includes('Socket CLI: Polyfill String.prototype.normalize')) { - logger.success( - 'bootstrap/node.js correctly modified (normalize polyfill)', - ) - } else { - logger.warn('normalize polyfill not applied (may not be needed with small-icu)') - } - } catch (e) { - logger.warn(`Cannot verify bootstrap/node.js: ${e.message}`) - } - - logger.logNewline() - - if (!allApplied) { - printError( - 'Socket Modifications Not Applied', - 'Critical Socket modifications were not applied to Node.js source.', - [ - 'This is a BUG in the build script', - 'The binary will NOT work correctly with pkg', - 'Run: node scripts/build-custom-node.mjs --clean', - 'Report this issue if it persists', - ], - ) - throw new Error('Socket modifications verification failed') - } - - logger.success( - 'All Socket modifications verified for --with-intl=small-icu', - ) - logger.logNewline() -} - -/** - * Apply Socket modifications for --with-intl=none compatibility. - * - * These source transforms help ensure Node.js APIs work correctly - * when compiled without ICU (International Components for Unicode). - */ -// Function removed: applySocketModificationsDirectly(). -// Socket modifications must be applied via patches only. -// If patches fail, the build should fail with helpful error messages. - - -/** - * Main build function. - */ -async function main() { - logger.log('') - logger.log('🔨 Socket CLI - Custom Node.js Builder') - logger.log(` Building Node.js ${NODE_VERSION} with custom patches`) - logger.log('') - - // Start timing total build. - const totalStart = Date.now() - - // Initialize build log. - await saveBuildLog(BUILD_DIR, '━'.repeat(60)) - await saveBuildLog(BUILD_DIR, ' Socket CLI - Custom Node.js Builder') - await saveBuildLog(BUILD_DIR, ` Node.js ${NODE_VERSION} with custom patches`) - await saveBuildLog(BUILD_DIR, ` Started: ${new Date().toISOString()}`) - await saveBuildLog(BUILD_DIR, '━'.repeat(60)) - await saveBuildLog(BUILD_DIR, '') - - // Phase 1: Pre-flight checks. - await saveBuildLog(BUILD_DIR, 'Phase 1: Pre-flight Checks') - await checkRequiredTools() - await checkBuildEnvironment() - await saveBuildLog(BUILD_DIR, 'Pre-flight checks completed') - await saveBuildLog(BUILD_DIR, '') - - // Ensure build directory exists. - await safeMkdir(BUILD_DIR, { recursive: true }) - - // Check if we can use cached build (skip if --clean). - if (!CLEAN_BUILD) { - const finalOutputBinary = join(BUILD_DIR, 'out', 'Final', IS_WINDOWS ? 'node.exe' : 'node') - const distBinary = join(ROOT_DIR, 'dist', 'socket-smol') - const distSeaBinary = join(ROOT_DIR, 'dist', 'socket-sea') - - // Collect all source files that affect the build. - const sourcePaths = collectBuildSourceFiles() - - // Check if build is needed based on source file hashes. - // Store hash in centralized build/.cache/ directory. - const cacheDir = join(BUILD_DIR, '.cache') - const hashFilePath = join(cacheDir, 'node.hash') - const needsExtraction = await shouldExtract({ - sourcePaths, - outputPath: hashFilePath, - validateOutput: () => { - // Verify final binary, hash file, and at least one dist binary exist. - return existsSync(finalOutputBinary) && - existsSync(hashFilePath) && - (existsSync(distBinary) || existsSync(distSeaBinary)) - }, - }) - - if (!needsExtraction) { - // Cache hit! Binary is up to date. - logger.log('') - printHeader('✅ Using Cached Build') - logger.log('All source files unchanged since last build.') - logger.log('') - logger.substep(`Final binary: ${finalOutputBinary}`) - logger.substep(`E2E binary: ${existsSync(distBinary) ? distBinary : distSeaBinary}`) - logger.log('') - logger.success('Cached build is ready to use') - logger.log('') - return - } - } - - // Phase 3: Verify Git tag exists before cloning. - printHeader('Verifying Node.js Version') - logger.log(`Checking if ${NODE_VERSION} exists in Node.js repository...`) - const tagCheck = await verifyGitTag(NODE_VERSION) - if (!tagCheck.exists) { - printError( - 'Invalid Node.js Version', - `Version ${NODE_VERSION} does not exist in Node.js repository.`, - [ - 'Check available versions: https://github.com/nodejs/node/tags', - 'Update NODE_VERSION in this script to a valid version', - 'Make sure version starts with "v" (e.g., v24.10.0)', - ], - ) - throw new Error('Invalid Node.js version') - } - logger.log(`${colors.green('✓')} ${NODE_VERSION} exists in Node.js repository`) - logger.log('') - - // Clone or reset Node.js repository. - if (!existsSync(NODE_DIR) || CLEAN_BUILD) { - if (existsSync(NODE_DIR) && CLEAN_BUILD) { - printHeader('Clean Build Requested') - logger.log('Removing existing Node.js source directory...') - const { rm } = await import('node:fs/promises') - await safeDelete(NODE_DIR, { recursive: true, force: true }) - await cleanCheckpoint(BUILD_DIR) - logger.log(`${colors.green('✓')} Cleaned build directory`) - logger.log('') - } - - printHeader('Cloning Node.js Source') - logger.log(`Version: ${NODE_VERSION}`) - logger.log('Repository: https://github.com/nodejs/node.git') - logger.log('') - logger.info('This will download ~200-300 MB (shallow clone with --depth=1 --single-branch)...') - logger.log('Retry: Up to 3 attempts if clone fails') - logger.log('') - - // Git clone with retry (network can fail during long downloads). - let cloneSuccess = false - for (let attempt = 1; attempt <= 3; attempt++) { - try { - if (attempt > 1) { - logger.log(`Retry attempt ${attempt}/3...`) - logger.log('') - } - - await exec( - 'git', - [ - 'clone', - '--depth', - '1', - '--single-branch', - '--branch', - NODE_VERSION, - 'https://github.com/nodejs/node.git', - NODE_DIR, - ], - { cwd: ROOT_DIR }, - ) - cloneSuccess = true - break - } catch (e) { - if (attempt === 3) { - printError( - 'Git Clone Failed', - `Failed to clone Node.js repository after 3 attempts: ${e.message}`, - [ - 'Check your internet connection', - 'Try again in a few minutes', - 'Manually clone:', - ` cd ${ROOT_DIR}`, - ` git clone --depth 1 --branch ${NODE_VERSION} https://github.com/nodejs/node.git ${NODE_DIR}`, - ], - ) - throw new Error('Git clone failed after retries') - } - - logger.warn(`${colors.yellow('⚠')} Clone attempt ${attempt} failed: ${e.message}`) - - // Clean up partial clone. - try { - const { rm } = await import('node:fs/promises') - await safeDelete(NODE_DIR, { recursive: true, force: true }) - } catch { - // Ignore cleanup errors. - } - - // Wait before retry. - const waitTime = 2000 * attempt - logger.log(`${colors.blue('ℹ')} Waiting ${waitTime}ms before retry...`) - logger.log('') - await new Promise(resolve => setTimeout(resolve, waitTime)) - } - } - - if (cloneSuccess) { - logger.log(`${colors.green('✓')} Node.js source cloned successfully`) - await createCheckpoint(BUILD_DIR, 'cloned') - logger.log('') - } - } else { - printHeader('Using Existing Node.js Source') - - // Check if source has uncommitted changes. - const isDirty = await isNodeSourceDirty() - if (isDirty && !AUTO_YES) { - printWarning( - 'Node.js Source Has Uncommitted Changes', - 'The build/node-source directory has uncommitted changes from a previous build or crash.', - [ - 'These changes will be discarded to ensure a clean build', - 'Press Ctrl+C now if you want to inspect the changes first', - 'Or wait 5 seconds to continue with automatic reset...', - ], - ) - - // Wait 5 seconds before proceeding. - await new Promise(resolve => setTimeout(resolve, 5000)) - logger.log('') - } else if (isDirty && AUTO_YES) { - logger.log( - '⚠️ Node.js source has uncommitted changes (auto-resetting with --yes)', - ) - logger.log('') - } - - await resetNodeSource() - } - - // Embed Socket security bootstrap in minimal injection approach. - // This must run BEFORE copyBuildAdditions() so the processed loader is copied. - await embedSocketSecurityBootstrap() - - // Copy build additions (includes processed bootstrap loader). - await copyBuildAdditions() - - // Apply Socket patches (including the dynamically generated bootstrap loader). - const socketPatches = findSocketPatches() - - if (socketPatches.length > 0) { - // Validate Socket patches before applying. - printHeader('Validating Socket Patches') - logger.log(`Found ${socketPatches.length} patch(es) for ${NODE_VERSION}`) - logger.log('Checking integrity, compatibility, and conflicts...') - logger.log('') - - const patchData = [] - let allValid = true - - for (const patch of socketPatches) { - logger.group(` ${colors.blue('ℹ')} Validating ${patch.name}`) - - const isValid = await validatePatch(patch.path, NODE_DIR) - if (!isValid) { - logger.error(`${colors.red('✗')} INVALID: Patch validation failed`) - logger.groupEnd() - allValid = false - continue - } - - const content = await fs.readFile(patch.path, 'utf8') - const analysis = analyzePatchContent(content) - - patchData.push({ - name: patch.name, - path: patch.path, - analysis, - }) - if (analysis.modifiesV8Includes) { - logger.log(`${colors.green('✓')} Modifies V8 includes`) - } - if (analysis.modifiesSEA) { - logger.log(`${colors.green('✓')} Modifies SEA detection`) - } - logger.log(`${colors.green('✓')} Valid`) - logger.groupEnd() - } - - if (!allValid) { - throw new Error( - 'Socket patch validation failed.\n\n' + - `One or more Socket patches are invalid or incompatible with Node.js ${NODE_VERSION}.\n\n` + - 'Possible causes:\n' + - ' - Patch files are corrupted\n' + - ` - Patches don't match this Node.js version\n` + - ' - Node.js source has unexpected modifications\n\n' + - 'To fix:\n' + - ` 1. Verify patch files in ${PATCHES_DIR}\n` + - ' 2. Regenerate patches if needed:\n' + - ` node scripts/regenerate-node-patches.mjs --version=${NODE_VERSION}\n` + - ' 3. Check build/patches/README.md for patch creation guide', - ) - } - // Check for conflicts between patches. - const conflicts = checkPatchConflicts(patchData, NODE_VERSION) - if (conflicts.length > 0) { - logger.warn(`${colors.yellow('⚠')} Patch Conflicts Detected:`) - logger.warn() - for (const conflict of conflicts) { - if (conflict.severity === 'error') { - logger.error(` ${colors.red('✗')} ERROR: ${conflict.message}`) - allValid = false - } else { - logger.warn(` ${colors.yellow('⚠')} WARNING: ${conflict.message}`) - } - } - logger.warn() - - if (!allValid) { - throw new Error( - 'Critical patch conflicts detected.\n\n' + - `Socket patches have conflicts and cannot be applied to Node.js ${NODE_VERSION}.\n\n` + - 'Conflicts found:\n' + - conflicts - .filter(c => c.severity === 'error') - .map(c => ` - ${c.message}`) - .join('\n') + - '\n\n' + - 'To fix:\n' + - ' 1. Remove conflicting patches\n' + - ` 2. Use version-specific patches for ${NODE_VERSION}\n` + - ' 3. Regenerate patches:\n' + - ` node scripts/regenerate-node-patches.mjs --version=${NODE_VERSION}\n` + - ' 4. See build/patches/socket/README.md for guidance', - ) - } - } else { - logger.log(`${colors.green('✓')} All Socket patches validated successfully`) - logger.log(`${colors.green('✓')} No conflicts detected`) - logger.log('') - } - - // Patches validated successfully, ready to apply. - - // Apply patches if validation and dry-run passed. - if (allValid) { - printHeader('Applying Socket Patches') - for (const { name, path: patchPath } of patchData) { - logger.log(`Applying ${name}...`) - try { - // Use -p1 to match Git patch format (strips a/ and b/ prefixes). - // Use --batch to avoid interactive prompts. - // Use --forward to skip if already applied. - await exec( - 'sh', - ['-c', `patch -p1 --batch --forward < "${patchPath}"`], - { cwd: NODE_DIR }, - ) - logger.log(`${colors.green('✓')} ${name} applied`) - } catch (e) { - throw new Error( - 'Socket patch application failed.\n\n' + - `Failed to apply patch: ${name}\n` + - `Node.js version: ${NODE_VERSION}\n` + - `Patch path: ${patchPath}\n\n` + - `Error: ${e.message}\n\n` + - 'This usually means:\n' + - ' - The patch is outdated for this Node.js version\n' + - ' - Node.js source has unexpected modifications\n' + - ' - Patch file format is invalid\n\n' + - 'To fix:\n' + - ' 1. Verify Node.js source is clean\n' + - ' 2. Regenerate patches:\n' + - ` node scripts/regenerate-node-patches.mjs --version=${NODE_VERSION}\n` + - ' 3. See build/patches/README.md for troubleshooting', - ) - } - } - logger.log(`${colors.green('✓')} All Socket patches applied successfully`) - logger.log('') - } - } else { - throw new Error( - `No Socket patches found for Node.js ${NODE_VERSION}.\n\n` + - `Expected patches in: ${PATCHES_DIR}\n\n` + - 'Socket patches are required for all Node.js builds. Patches must exist before building.\n\n' + - 'To fix:\n' + - ` 1. Create patches for ${NODE_VERSION}:\n` + - ` node scripts/regenerate-node-patches.mjs --version=${NODE_VERSION}\n` + - ' 2. See build/patches/README.md for patch creation guide\n' + - ' 3. Patches must be committed to the repository before building\n\n' + - 'Note: For new Node.js versions, you must create patches following the standard\n' + - 'patch creation process documented in build/patches/README.md', - ) - } - - // Verify modifications were applied. - await verifySocketModifications() - - // Configure Node.js with optimizations. - printHeader('Configuring Node.js Build') - - if (IS_DEV_BUILD) { - logger.log(`${colors.cyan('🚀 DEV BUILD MODE')} - Fast builds, larger binaries`) - logger.log('') - logger.log('Optimization flags:') - logger.log( - ` ${colors.green('✓')} KEEP: Full V8 (TurboFan JIT), WASM, SSL/crypto`, - ) - logger.log( - ` ${colors.green('✓')} REMOVE: npm, corepack, inspector, amaro, sqlite, SEA`, - ) - logger.log( - ` ${colors.green('✓')} DISABLED: LTO (Link Time Optimization) for faster builds`, - ) - logger.log( - ` ${colors.green('✓')} DISABLED: V8 Lite Mode for faster JS execution`, - ) - logger.log('') - logger.log( - 'Expected binary size: ~80-90MB (before stripping), ~40-50MB (after)', - ) - logger.log('Expected build time: ~50% faster than production builds') - } else { - logger.log(`${colors.magenta('⚡ PRODUCTION BUILD MODE')} - Optimized for size/distribution`) - logger.log('') - logger.log('Optimization flags:') - logger.log( - ` ${colors.green('✓')} KEEP: V8 Lite Mode (baseline compiler), WASM (Liftoff), SSL/crypto`, - ) - logger.log( - ` ${colors.green('✓')} REMOVE: npm, corepack, inspector, amaro, sqlite, SEA, ICU, TurboFan JIT`, - ) - logger.log(` ${colors.green('✓')} ICU: none (no internationalization, saves ~6-8 MB)`) - logger.log( - ` ${colors.green('✓')} V8 Lite Mode: Disables TurboFan optimizer (saves ~15-20 MB)`, - ) - const ltoNote = WIN32 ? ', LTCG' : ', LTO' - logger.log( - ` ${colors.green('✓')} OPTIMIZATIONS: no-snapshot, with-code-cache (for errors), no-SEA, V8 Lite${ltoNote}`, - ) - logger.log('') - logger.log( - ` ${colors.green('✓')} V8 LITE MODE: JavaScript runs 5-10x slower (CPU-bound code)`, - ) - logger.log(` ${colors.green('✓')} WASM: Full speed (uses Liftoff compiler, unaffected)`) - logger.log(` ${colors.green('✓')} I/O: No impact (network, file operations)`) - logger.log('') - logger.log( - 'Expected binary size: ~60MB (before stripping), ~23-27MB (after)', - ) - } - logger.log('') - - const configureFlags = [ - '--ninja', // Use Ninja build system (faster parallel builds than make) - '--with-intl=small-icu', // -5 MB: English-only ICU (supports Unicode property escapes) - // Note: small-icu provides essential Unicode support while keeping binary small - '--without-npm', - '--without-corepack', - '--without-inspector', - '--without-amaro', - '--without-sqlite', - '--without-node-snapshot', - '--without-node-code-cache', // Enable code cache (prevents error info dump). - // Note: --v8-disable-object-print disabled to enable proper error output. - // '--v8-disable-object-print', - '--without-node-options', - '--disable-single-executable-application', // -1-2 MB: SEA not needed for pkg - ] - - // Production-only optimizations (slow builds, smaller binaries). - if (IS_PROD_BUILD) { - configureFlags.push('--v8-lite-mode') // -15-20 MB: Disables TurboFan JIT (JS slower, WASM unaffected) - // Link Time Optimization (very slow, saves ~5-10MB). - // NOTE: LTCG disabled on Windows due to LNK2005 multiply defined symbol errors. - // See: https://github.com/nodejs/node/pull/21186 (Node.js made LTCG optional) - // Error: abseil.lib(mutex.obj) conflicts with v8_libbase.lib(mutex.obj) - if (!WIN32) { - configureFlags.push('--enable-lto') // Unix/Linux/macOS: Use standard LTO. - } - } - - // Add architecture flag for cross-compilation or explicit targeting. - if (ARCH === 'arm64') { - configureFlags.unshift('--dest-cpu=arm64') - } else if (ARCH === 'x64') { - configureFlags.unshift('--dest-cpu=x64') - } - - // Windows uses configure.py directly, Unix uses ./configure wrapper script. - // Use whichBinSync to resolve full path to python.exe since we use shell: false. - // Note: VS version is set via GYP_MSVS_VERSION and GYP_MSVS_OVERRIDE_PATH env vars - // (configured in workflow) which bypasses gyp's registry detection. - // https://github.com/nodejs/node/blob/main/BUILDING.md#windows - // https://github.com/nodejs/node/blob/main/tools/gyp/pylib/gyp/MSVSVersion.py - const configureCommand = WIN32 ? whichBinSync('python') : './configure' - const configureArgs = WIN32 ? ['configure.py', ...configureFlags] : configureFlags - - // DEBUG: Verify environment is being passed to subprocess. - if (WIN32) { - logger.log('') - logger.log('DEBUG: Checking environment before exec():') - const criticalVars = [ - 'GYP_MSVS_VERSION', - 'GYP_MSVS_OVERRIDE_PATH', - 'VCINSTALLDIR', - 'WindowsSDKVersion', - 'INCLUDE', - 'LIB', - ] - for (const varName of criticalVars) { - const value = process.env[varName] - if (value) { - const preview = value.substring(0, 60) + (value.length > 60 ? '...' : '') - logger.log(` ${colors.green('✓')} ${varName} = ${preview}`) - } else { - logger.log(` ${colors.red('✗')} ${varName} is NOT SET`) - } - } - logger.log('') - } - - logger.log(`::group::Running ${WIN32 ? 'python configure.py' : './configure'}`) - - // On Windows, explicitly pass environment to subprocess. - // IMPORTANT: Must use shell: false because cmd.exe doesn't properly - // propagate environment variables to subprocesses. - const execOptions = { - cwd: NODE_DIR, - env: process.env, - shell: false, - } - if (WIN32) { - logger.log(`DEBUG: Passing env with ${Object.keys(execOptions.env).length} variables (shell: false)`) - } - - await exec(configureCommand, configureArgs, execOptions) - logger.log('::endgroup::') - logger.log(`${colors.green('✓')} Configuration complete`) - logger.log('') - - // Build Node.js. - printHeader('Building Node.js') - - // Define binary path early (used for both cache and build). - const nodeBinary = join(NODE_DIR, 'out', 'Release', 'node') - - // Try to restore from cache (skip compilation if successful). - let restoredFromCache = false - if (!CLEAN_BUILD) { - logger.log('Checking for cached binary from previous build...') - restoredFromCache = await restoreCachedBinary( - BUILD_DIR, - nodeBinary, - TARGET_PLATFORM, - ARCH, - NODE_VERSION, - ) - logger.log('') - } - - // Skip compilation if restored from cache. - if (!restoredFromCache) { - const timeEstimate = estimateBuildTime(CPU_COUNT) - logger.log( - `⏱️ Estimated time: ${timeEstimate.estimatedMinutes} minutes (${timeEstimate.minMinutes}-${timeEstimate.maxMinutes} min range)`, - ) - logger.log(`🚀 Using ${CPU_COUNT} CPU cores for parallel compilation`) - logger.log('') - logger.log('You can:') - logger.log(' • Grab coffee ☕') - logger.log(' • Work on other tasks') - logger.log(' • Watch progress in this terminal (but seriously, go touch grass)') - logger.log('') - logger.log(`Build log: ${getBuildLogPath(BUILD_DIR)}`) - logger.log('') - logger.log('Starting build...') - logger.log('') - - const buildStart = Date.now() - - // Use GitHub Actions grouping to collapse compiler output. - logger.log('::group::Compiling Node.js with Ninja (this will take a while...)') - - try { - // Resolve full path to ninja since we use shell: false. - const ninjaCommand = whichBinSync('ninja') - await exec(ninjaCommand, ['-C', 'out/Release', `-j${CPU_COUNT}`], { - cwd: NODE_DIR, - env: process.env, - shell: false, - }) - logger.log('::endgroup::') - } catch (e) { - logger.log('::endgroup::') - logger.log('') - logger.log('::error::Ninja build failed - see collapsed "Compiling Node.js" section above for full compiler output') - logger.log('') - // Build failed - show last 100 lines of build log. - const lastLines = await getLastLogLines(BUILD_DIR, 100) - if (lastLines) { - logger.error() - logger.error('Last 100 lines of build log:') - logger.error('━'.repeat(60)) - logger.error(lastLines) - logger.error('━'.repeat(60)) - } - - printError( - 'Build Failed', - 'Node.js compilation failed. See build log for details.', - [ - `Full log: ${getBuildLogPath(BUILD_DIR)}`, - 'Common issues:', - ' - Out of memory: Close other applications', - ' - Disk full: Free up disk space', - ' - Compiler error: Check C++ compiler version', - 'Try again with: node scripts/build-custom-node.mjs --clean', - ], - ) - throw e - } - - const buildDuration = Date.now() - buildStart - const buildTime = formatDuration(buildDuration) - - logger.log('') - logger.log(`${colors.green('✓')} Build completed in ${buildTime}`) - await createCheckpoint(BUILD_DIR, 'built') - logger.log('') - - // Cache the compiled binary for future runs. - await cacheCompiledBinary(BUILD_DIR, nodeBinary, TARGET_PLATFORM, ARCH, NODE_VERSION) - logger.log('') - } else { - logger.log(`${colors.cyan('ℹ')} Skipped compilation (using cached binary)`) - logger.log('') - } - - // Sign early for macOS ARM64 (required before execution in CI). - if (IS_MACOS && ARCH === 'arm64') { - printHeader('Code Signing (macOS ARM64 - Initial)') - logger.log('Signing binary before testing for macOS ARM64 compatibility...') - logger.logNewline() - await exec('codesign', ['--sign', '-', '--force', nodeBinary]) - logger.success('Binary signed successfully') - logger.logNewline() - } - - // Test the binary. - printHeader('Testing Binary') - - logger.log('Running basic functionality tests...') - logger.log('') - - // Set SOCKET_CLI_BUILD_TEST=1 to skip CLI bootstrap during smoke tests. - // The CLI version doesn't exist on npm yet during build. - const smokeTestEnv = { - ...process.env, - SOCKET_CLI_BUILD_TEST: '1', - } - - await exec(nodeBinary, ['--version'], { env: smokeTestEnv }) - - logger.log('') - logger.log(`${colors.green('✓')} Binary is functional`) - logger.log('') - - // Copy unmodified binary to build/out/Release. - printHeader('Copying to Build Output (Release)') - logger.log('Copying unmodified binary to build/out/Release directory...') - logger.logNewline() - - const outputReleaseDir = join(BUILD_DIR, 'out', 'Release') - await safeMkdir(outputReleaseDir) - const outputReleaseBinary = join(outputReleaseDir, 'node') - await fs.cp(nodeBinary, outputReleaseBinary, { force: true, preserveTimestamps: true }) - - logger.substep(`Release directory: ${outputReleaseDir}`) - logger.substep('Binary: node (unmodified)') - logger.logNewline() - logger.success('Unmodified binary copied to build/out/Release') - logger.logNewline() - - // Strip debug symbols to reduce size. - printHeader('Optimizing Binary Size') - const sizeBeforeStrip = await getFileSize(nodeBinary) - logger.log(`Size before stripping: ${sizeBeforeStrip}`) - logger.log('Removing debug symbols and unnecessary sections...') - logger.log('') - - // Platform-specific strip flags: - // - macOS (LLVM strip): Use -x (remove local symbols) - // macOS strip does NOT support --strip-all (GNU-only flag) - // - Linux (GNU strip): Try --strip-all first, fall back to -x - // --strip-all removes all symbols + section headers (most aggressive) - // - Windows: Skip stripping (no strip command) - let stripArgs - if (IS_WINDOWS) { - logger.log('Windows detected - skipping strip (not supported)') - logger.log('') - } else if (IS_MACOS) { - // macOS always uses -x (LLVM strip doesn't support --strip-all). - stripArgs = ['-x', nodeBinary] - logger.log('Using macOS strip flags: -x (remove local symbols)') - } else { - // Linux/Alpine: Test if --strip-all is supported. - logger.log('Testing strip capabilities...') - const testResult = await spawn('strip', ['--help'], { - stdio: 'pipe', - stdioString: true, - }) - const supportsStripAll = (testResult.stdout ?? '').includes('--strip-all') - - if (supportsStripAll) { - stripArgs = ['--strip-all', nodeBinary] - logger.log('Using GNU strip flags: --strip-all (remove all symbols + sections)') - } else { - stripArgs = ['-x', nodeBinary] - logger.log('Using fallback strip flags: -x (GNU --strip-all not supported)') - } - } - - if (stripArgs) { - await exec('strip', stripArgs) - } - - const sizeAfterStrip = await getFileSize(nodeBinary) - logger.log(`Size after stripping: ${sizeAfterStrip}`) - - // Parse and check size. - const sizeMatch = sizeAfterStrip.match(/^(\d+)([KMG])/) - if (sizeMatch) { - const size = Number.parseInt(sizeMatch[1], 10) - const unit = sizeMatch[2] - - if (unit === 'M' && size >= 20 && size <= 30) { - logger.log(`${colors.green('✓')} Binary size is optimal (20-30MB with V8 Lite Mode)`) - } else if (unit === 'M' && size < 20) { - printWarning( - 'Binary Smaller Than Expected', - `Binary is ${sizeAfterStrip}, expected ~23-27MB.`, - [ - 'Some features may be missing', - 'Verify configure flags were applied correctly', - ], - ) - } else if (unit === 'M' && size > 35) { - printWarning( - 'Binary Larger Than Expected', - `Binary is ${sizeAfterStrip}, expected ~23-27MB.`, - [ - 'Debug symbols may not be fully stripped', - 'Configure flags may not be applied', - 'Binary will still work but will be larger', - ], - ) - } - } - - logger.log('') - - // Re-sign after stripping for macOS ARM64 (strip invalidates code signature). - if (IS_MACOS && ARCH === 'arm64') { - printHeader('Code Signing (macOS ARM64 - After Stripping)') - logger.log('Re-signing binary after stripping for macOS ARM64 compatibility...') - logger.log('(strip command invalidates code signature, re-signing required)') - logger.logNewline() - await exec('codesign', ['--sign', '-', '--force', nodeBinary]) - logger.success('Binary re-signed successfully after stripping') - logger.logNewline() - - // Smoke test after signing to ensure signature is valid. - logger.log('Testing binary after signing...') - const signTestPassed = await smokeTestBinary(nodeBinary) - - if (!signTestPassed) { - printError( - 'Binary Corrupted After Signing', - 'Binary failed smoke test after code signing', - [ - 'Code signing may have corrupted the binary', - 'Try rebuilding: node scripts/build-custom-node.mjs --clean', - 'Report this issue if it persists', - ], - ) - throw new Error('Binary corrupted after signing') - } - - logger.log(`${colors.green('✓')} Binary functional after signing`) - logger.log('') - } - - // Smoke test binary after stripping (ensure strip didn't corrupt it). - logger.log('Testing binary after stripping...') - const smokeTestPassed = await smokeTestBinary(nodeBinary) - - if (!smokeTestPassed) { - printError( - 'Binary Corrupted After Stripping', - 'Binary failed smoke test after stripping', - [ - 'Strip command may have corrupted the binary', - 'Try rebuilding: node scripts/build-custom-node.mjs --clean', - 'Report this issue if it persists', - ], - ) - throw new Error('Binary corrupted after stripping') - } - - logger.log(`${colors.green('✓')} Binary functional after stripping`) - logger.log('') - - // Copy stripped binary to build/out/Stripped. - printHeader('Copying to Build Output (Stripped)') - logger.log('Copying stripped binary to build/out/Stripped directory...') - logger.logNewline() - - const outputStrippedDir = join(BUILD_DIR, 'out', 'Stripped') - await safeMkdir(outputStrippedDir) - const outputStrippedBinary = join(outputStrippedDir, 'node') - await fs.cp(nodeBinary, outputStrippedBinary, { force: true, preserveTimestamps: true }) - - logger.substep(`Stripped directory: ${outputStrippedDir}`) - logger.substep('Binary: node (stripped)') - logger.logNewline() - logger.success('Stripped binary copied to build/out/Stripped') - logger.logNewline() - - // Compress binary for smaller distribution size (DEFAULT for smol builds). - // Uses native platform APIs (Apple Compression, liblzma, Windows Compression API) instead of UPX. - // Benefits: 75-79% compression (vs UPX's 50-60%), works with code signing, zero AV false positives. - // Opt-out: Set COMPRESS_BINARY=0 or COMPRESS_BINARY=false to disable compression. - let compressedBinary = null - const shouldCompress = process.env.COMPRESS_BINARY !== '0' && process.env.COMPRESS_BINARY !== 'false' - - if (shouldCompress) { - printHeader('Compressing Binary for Distribution') - logger.log('Compressing stripped binary using platform-specific compression...') - logger.logNewline() - - const compressedDir = join(BUILD_DIR, 'out', 'Compressed') - await safeMkdir(compressedDir) - compressedBinary = join(compressedDir, 'node') - - // Select compression quality based on platform. - // macOS: LZFSE (faster) or LZMA (better compression). - // Linux: LZMA (best for ELF). - // Windows: LZMS (best for PE). - const compressionQuality = IS_MACOS ? 'lzfse' : 'lzma' - - // Read socketbin package spec from actual package.json for socket-lib cache key generation. - // Format: @socketbin/cli-{platform}-{arch}@{version} - // This enables deterministic cache keys based on the published package. - const socketbinPkgPath = join(dirname(ROOT_DIR), `socketbin-cli-${TARGET_PLATFORM}-${ARCH}`, 'package.json') - let socketbinSpec = null - try { - const socketbinPkg = JSON.parse(await fs.readFile(socketbinPkgPath, 'utf-8')) - socketbinSpec = `${socketbinPkg.name}@${socketbinPkg.version}` - logger.substep(`Found socketbin package: ${socketbinSpec}`) - } catch (e) { - logger.warn(`Could not read socketbin package.json at ${socketbinPkgPath}`) - logger.warn('Compression will use fallback cache key generation') - } - - logger.substep(`Input: ${outputStrippedBinary}`) - logger.substep(`Output: ${compressedBinary}`) - logger.substep(`Algorithm: ${compressionQuality.toUpperCase()}`) - if (socketbinSpec) { - logger.substep(`Spec: ${socketbinSpec}`) - } - logger.logNewline() - - const sizeBeforeCompress = await getFileSize(outputStrippedBinary) - logger.log(`Size before compression: ${sizeBeforeCompress}`) - logger.log('Running compression tool...') - logger.logNewline() - - // Run platform-specific compression. - const compressArgs = [ - join(ROOT_DIR, 'scripts', 'compress-binary.mjs'), - outputStrippedBinary, - compressedBinary, - `--quality=${compressionQuality}`, - ] - if (socketbinSpec) { - compressArgs.push(`--spec=${socketbinSpec}`) - } - await exec(process.execPath, compressArgs, { cwd: ROOT_DIR }) - - const sizeAfterCompress = await getFileSize(compressedBinary) - logger.log(`Size after compression: ${sizeAfterCompress}`) - logger.logNewline() - - // Skip signing compressed binary - it's a self-extracting binary (decompressor stub + compressed data), - // not a standard Mach-O executable. The decompressor stub is already signed if needed. - // When executed, the stub extracts and runs the original Node.js binary. - logger.log('Skipping code signing for self-extracting binary...') - logger.substep('✓ Compressed binary ready (self-extracting, no signature needed)') - logger.logNewline() - - // Skip smoke test for self-extracting binary. - // TODO: The decompressor stub needs to be updated to properly handle command-line arguments. - // Currently it treats arguments as filenames instead of passing them to the decompressed binary. - // Once fixed, we can enable smoke testing for compressed binaries. - logger.log('Skipping smoke test for self-extracting binary...') - logger.substep('✓ Smoke test skipped (decompressor needs argument handling fix)') - logger.log('') - - logger.substep(`Compressed directory: ${compressedDir}`) - logger.substep('Binary: node (compressed)') - logger.logNewline() - logger.success('Binary compressed successfully') - logger.logNewline() - - // Copy decompression tool to Compressed directory for distribution. - printHeader('Bundling Decompression Tool') - logger.log('Copying platform-specific decompression tool for distribution...') - logger.logNewline() - - const toolsDir = join(ROOT_DIR, 'additions', '003-compression-tools') - const decompressTool = IS_MACOS - ? 'socketsecurity_macho_decompress' - : WIN32 - ? 'socketsecurity_pe_decompress.exe' - : 'socketsecurity_elf_decompress' - - const decompressToolSource = join(toolsDir, decompressTool) - const decompressToolDest = join(compressedDir, decompressTool) - - if (existsSync(decompressToolSource)) { - await fs.cp(decompressToolSource, decompressToolDest, { force: true, preserveTimestamps: true }) - - // Ensure tool is executable. - await exec('chmod', ['+x', decompressToolDest]) - - const toolSize = await getFileSize(decompressToolDest) - logger.substep(`Tool: ${decompressTool} (${toolSize})`) - logger.substep(`Location: ${compressedDir}`) - logger.logNewline() - logger.success('Decompression tool bundled for distribution') - logger.logNewline() - } else { - printWarning( - 'Decompression Tool Not Found', - `Could not find ${decompressTool} in ${toolsDir}`, - [ - 'Build the compression tools first:', - ` cd ${toolsDir}`, - ` make all`, - 'Then run this build again with COMPRESS_BINARY=1', - ], - ) - } - } else { - logger.log('') - logger.log(`${colors.blue('ℹ')} Binary compression skipped (optional)`) - logger.log(' To enable: COMPRESS_BINARY=1 node scripts/build.mjs') - logger.log('') - } - - // Copy final distribution binary to build/out/Final. - // Use compressed binary if available, otherwise use stripped binary. - printHeader('Copying to Build Output (Final)') - const finalDir = join(BUILD_DIR, 'out', 'Final') - await safeMkdir(finalDir) - const finalBinary = join(finalDir, 'node') - - if (compressedBinary && existsSync(compressedBinary)) { - logger.log('Copying compressed distribution package to Final directory...') - logger.logNewline() - - const compressedDir = join(BUILD_DIR, 'out', 'Compressed') - - // Copy compressed binary to Final. - await fs.cp(compressedBinary, finalBinary, { force: true, preserveTimestamps: true }) - - // Copy decompressor tool to Final. - const decompressTool = IS_MACOS - ? 'socketsecurity_macho_decompress' - : WIN32 ? 'socketsecurity_pe_decompress.exe' : 'socketsecurity_elf_decompress' - const decompressToolSource = join(compressedDir, decompressTool) - const decompressToolDest = join(finalDir, decompressTool) - - if (existsSync(decompressToolSource)) { - await fs.cp(decompressToolSource, decompressToolDest, { force: true, preserveTimestamps: true }) - await exec('chmod', ['+x', decompressToolDest]) - } - - const compressedSize = await getFileSize(finalBinary) - const decompressToolSize = existsSync(decompressToolDest) - ? await getFileSize(decompressToolDest) - : 'N/A' - - logger.substep('Source: build/out/Compressed/node (compressed + signed)') - logger.substep(`Binary: ${compressedSize}`) - logger.substep(`Decompressor: ${decompressToolSize}`) - logger.substep(`Location: ${finalDir}`) - logger.logNewline() - logger.success('Final distribution created with compressed package') - logger.logNewline() - } else { - logger.log('Copying stripped binary to Final directory...') - logger.logNewline() - - await fs.cp(outputStrippedBinary, finalBinary, { force: true, preserveTimestamps: true }) - - const binarySize = await getFileSize(finalBinary) - logger.substep('Source: build/out/Stripped/node (stripped, uncompressed)') - logger.substep(`Binary: ${binarySize}`) - logger.substep(`Location: ${finalDir}`) - logger.logNewline() - logger.success('Final distribution created with uncompressed binary') - logger.logNewline() - } - - // Copy signed binary to build/out/Sea (for SEA builds). - printHeader('Copying to Build Output (Sea)') - logger.log( - 'Copying signed binary to build/out/Sea directory for SEA builds...', - ) - logger.logNewline() - - const outputSeaDir = join(BUILD_DIR, 'out', 'Sea') - await safeMkdir(outputSeaDir) - const outputSeaBinary = join(outputSeaDir, 'node') - await fs.cp(nodeBinary, outputSeaBinary, { force: true, preserveTimestamps: true }) - - logger.substep(`Sea directory: ${outputSeaDir}`) - logger.substep('Binary: node (stripped + signed, ready for SEA)') - logger.logNewline() - logger.success('Binary copied to build/out/Sea') - logger.logNewline() - - // Copy to dist/ for E2E testing. - printHeader('Copying to dist/ for E2E Testing') - logger.log('Creating dist/socket-smol and dist/socket-sea for e2e test suite...') - logger.logNewline() - - const distDir = join(ROOT_DIR, 'dist') - await safeMkdir(distDir) - - // Copy final binary (compressed or stripped) to dist/socket-smol. - const distSmolBinary = join(distDir, 'socket-smol') - await fs.cp(finalBinary, distSmolBinary, { force: true, preserveTimestamps: true }) - await exec('chmod', ['+x', distSmolBinary]) - - // Copy SEA binary to dist/socket-sea. - const distSeaBinary = join(distDir, 'socket-sea') - await fs.cp(outputSeaBinary, distSeaBinary, { force: true, preserveTimestamps: true }) - await exec('chmod', ['+x', distSeaBinary]) - - logger.substep(`E2E smol binary: ${distSmolBinary}`) - logger.substep(`E2E sea binary: ${distSeaBinary}`) - logger.substep('Test commands:') - logger.substep(' pnpm --filter @socketsecurity/cli run e2e:smol') - logger.substep(' pnpm --filter @socketsecurity/cli run e2e:sea') - logger.logNewline() - logger.success('Binaries copied to dist/ for e2e testing') - logger.logNewline() - - // Write source hash to cache file for future builds. - const sourcePaths = collectBuildSourceFiles() - const sourceHashComment = await generateHashComment(sourcePaths) - const cacheDir = join(BUILD_DIR, '.cache') - await safeMkdir(cacheDir, { recursive: true }) - const hashFilePath = join(cacheDir, 'node.hash') - await fs.writeFile(hashFilePath, sourceHashComment, 'utf-8') - logger.substep(`Cache hash: ${hashFilePath}`) - logger.logNewline() - - // Report build complete. - const binarySize = await getFileSize(finalBinary) - await createCheckpoint(BUILD_DIR, 'complete') - await cleanCheckpoint(BUILD_DIR) - - // Calculate total build time. - const totalDuration = Date.now() - totalStart - const totalTime = formatDuration(totalDuration) - - printHeader('🎉 Build Complete!') - - // ASCII art success. - logger.logNewline() - logger.log(' ╔═══════════════════════════════════════╗') - logger.log(' ║ ║') - logger.log(' ║ ✨ Build Successful! ✨ ║') - logger.log(' ║ ║') - logger.log(' ╚═══════════════════════════════════════╝') - logger.logNewline() - - logger.log('📊 Build Statistics:') - logger.log(` Total time: ${totalTime}`) - logger.log(` Binary size: ${binarySize}`) - logger.log(` CPU cores used: ${CPU_COUNT}`) - logger.logNewline() - - logger.log('📁 Binary Locations:') - logger.log(` Source: ${nodeBinary}`) - logger.log(` Release: ${outputReleaseBinary}`) - logger.log(` Stripped: ${outputStrippedBinary}`) - if (compressedBinary) { - logger.log(` Compressed: ${compressedBinary} (signed, with decompression tool)`) - } - logger.log(` Final: ${finalBinary}`) - logger.log(` Distribution: ${finalBinary}`) - logger.logNewline() - - logger.log('🚀 Next Steps:') - if (compressedBinary) { - logger.log(' 1. Test compressed binary:') - logger.log(` cd ${join(BUILD_DIR, 'out', 'Compressed')}`) - const decompressTool = IS_MACOS - ? './socketsecurity_macho_decompress' - : WIN32 - ? './socketsecurity_pe_decompress.exe' - : './socketsecurity_elf_decompress' - logger.log(` ${decompressTool} ./node --version`) - logger.logNewline() - logger.log(' 2. Build Socket CLI with compressed Node:') - logger.log(' (Use compressed binary for pkg builds)') - logger.logNewline() - } else { - logger.log(' 1. Build Socket CLI:') - logger.log(' pnpm run build') - logger.logNewline() - logger.log(' 2. Create pkg executable:') - logger.log(' pnpm exec pkg .') - logger.logNewline() - logger.log(' 3. Test the executable:') - logger.log(' ./pkg-binaries/socket-macos-arm64 --version') - logger.logNewline() - } - - logger.log('💡 Helpful Commands:') - logger.log(' Verify build: node scripts/verify-node-build.mjs') - if (!shouldCompress) { - logger.log(' Enable compression: COMPRESS_BINARY=1 node scripts/build.mjs') - } - logger.logNewline() - - logger.log('📚 Documentation:') - logger.log(' Build process: build/patches/README.md') - logger.log(' Troubleshooting: See README for common issues') - logger.logNewline() - - if (RUN_VERIFY) { - printHeader('Running Verification') - logger.log('Running comprehensive verification checks...') - logger.logNewline() - - try { - await exec( - 'node', - ['scripts/verify-node-build.mjs', `--node-version=${NODE_VERSION}`], - { - cwd: ROOT_DIR, - }, - ) - } catch (_e) { - printWarning( - 'Verification Failed', - 'Build completed but verification found issues.', - [ - 'Review verification output above', - 'Run manually: node scripts/verify-node-build.mjs', - ], - ) - } - } else { - logger.info('Tip: Run verification checks:') - logger.substep('node scripts/verify-node-build.mjs') - logger.logNewline() - } - - // Step 10: Run tests if requested. - if (RUN_TESTS || RUN_FULL_TESTS) { - printHeader('Running Tests with Custom Node') - logger.log(`Testing Socket CLI with custom Node.js ${NODE_VERSION}...`) - logger.logNewline() - - try { - const testArgs = [ - 'scripts/test-with-custom-node.mjs', - `--node-version=${NODE_VERSION}`, - ] - if (RUN_FULL_TESTS) { - testArgs.push('--full') - } - - await exec('node', testArgs, { cwd: ROOT_DIR }) - - logger.logNewline() - logger.success('Tests passed with custom Node.js binary!') - logger.logNewline() - } catch (_e) { - printError( - 'Tests Failed', - 'Tests failed when using the custom Node.js binary.', - [ - 'Review test output above for details', - 'The binary may have issues with Socket CLI', - 'Consider rebuilding: node scripts/build-custom-node.mjs --clean', - 'Or run tests manually: node scripts/test-with-custom-node.mjs', - ], - ) - throw new Error('Tests failed with custom Node.js') - } - } else if (!RUN_VERIFY) { - logger.info('Tip: Test with custom Node:') - logger.substep('node scripts/test-with-custom-node.mjs') - logger.logNewline() - } -} - -// Run main function. -main().catch(e => { - logger.fail(`Build failed: ${e.message}`) - throw e -}) diff --git a/packages/node-smol-builder/scripts/compress-binary.mjs b/packages/node-smol-builder/scripts/compress-binary.mjs deleted file mode 100644 index 80253983e..000000000 --- a/packages/node-smol-builder/scripts/compress-binary.mjs +++ /dev/null @@ -1,292 +0,0 @@ -#!/usr/bin/env node -/** - * Cross-platform binary compression script. - * - * Automatically detects the platform and uses the appropriate compression tool: - * - macOS: socket_macho_compress (Apple Compression framework) - * - Linux: socket_elf_compress (liblzma) - * - Windows: socket_pe_compress (Windows Compression API) - * - * Why This Approach Over UPX? - * - * UPX (Ultimate Packer for eXecutables) is a popular packer, but has critical issues: - * - 50-60% compression vs our 75-79% (20-30% worse) - * - Breaks macOS code signing (Gatekeeper blocks) - * - 15-30% antivirus false positive rate (blacklisted packer signature) - * - Uses self-modifying code (triggers heuristic scanners) - * - Windows Defender often flags UPX-packed binaries - * - * Our approach uses native OS compression APIs: - * - 75-79% compression ratio (macOS LZMA: 76%, Linux LZMA: 77%, Windows LZMS: 73%) - * - Works with macOS code signing (preserves both inner and outer signatures) - * - Zero AV false positives (trusted platform APIs) - * - No self-modifying code (W^X compliant) - * - External decompressor (~90 KB) instead of packed stub - * - Decompresses to memory/tmpfs (fast, no disk I/O) - * - * Distribution: - * - Ship compressed binary + decompressor tool - * - Total overhead: ~90 KB (vs UPX's self-extracting overhead) - * - Example: 23 MB binary → 10 MB compressed + 90 KB tool = 10.09 MB - * - * Usage: - * node scripts/compress-binary.mjs [--quality=lzma|lzfse|xpress] - * node scripts/compress-binary.mjs ./node ./node.compressed --quality=lzma - */ - -import { existsSync, promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { spawn } from '@socketsecurity/lib/spawn' -import { getDefaultLogger } from '@socketsecurity/lib/logger' - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const logger = getDefaultLogger() -const TOOLS_DIR = path.resolve(__dirname, '../additions/003-compression-tools') - -/** - * Platform configuration. - */ -const PLATFORM_CONFIG = { - __proto__: null, - darwin: { - toolName: 'socketsecurity_macho_compress', - binaryFormat: 'Mach-O', - defaultQuality: 'lzfse', - qualityOptions: ['lz4', 'zlib', 'lzfse', 'lzma'], - buildCommand: '/usr/bin/make -f Makefile' - }, - linux: { - toolName: 'socketsecurity_elf_compress', - binaryFormat: 'ELF', - defaultQuality: 'lzma', - qualityOptions: ['lzma'], - buildCommand: '/usr/bin/make -f Makefile.linux' - }, - win32: { - toolName: 'socketsecurity_pe_compress', - binaryFormat: 'PE', - defaultQuality: 'lzms', - qualityOptions: ['xpress', 'xpress_huff', 'lzms'], - buildCommand: 'mingw32-make -f Makefile.windows' - } -} - -/** - * Parse command line arguments. - */ -function parseArgs() { - const args = process.argv.slice(2) - - if (args.length < 2) { - logger.error('Usage: compress-binary.mjs [--quality=lzma|lzfse|xpress] [--spec=package@version]') - logger.error('') - logger.error('Examples:') - logger.error(' node scripts/compress-binary.mjs ./node ./node.compressed') - logger.error(' node scripts/compress-binary.mjs ./node ./node.compressed --quality=lzma') - logger.error(' node scripts/compress-binary.mjs ./node ./node.compressed --spec=@socketbin/node-smol-builder-darwin-arm64@0.0.0-24.10.0') - process.exit(1) - } - - const inputPath = path.resolve(args[0]) - const outputPath = path.resolve(args[1]) - let quality = null - let spec = null - - for (const arg of args.slice(2)) { - if (arg.startsWith('--quality=')) { - quality = arg.substring('--quality='.length) - } else if (arg.startsWith('--spec=')) { - spec = arg.substring('--spec='.length) - } - } - - return { inputPath, outputPath, quality, spec } -} - -/** - * Get platform configuration. - */ -function getPlatformConfig() { - const platform = process.platform - const config = PLATFORM_CONFIG[platform] - - if (!config) { - throw new Error(`Unsupported platform: ${platform}. Supported: macOS, Linux, Windows`) - } - - return config -} - -/** - * Build compression tool if it doesn't exist. - */ -async function ensureToolBuilt(config) { - const toolPath = path.join(TOOLS_DIR, config.toolName) - const toolPathExe = `${toolPath}.exe` - - // Check if tool exists (with or without .exe extension). - if (existsSync(toolPath) || existsSync(toolPathExe)) { - return existsSync(toolPathExe) ? toolPathExe : toolPath - } - - logger.log(`Building ${config.binaryFormat} compression tool...`) - logger.log(` Command: ${config.buildCommand}`) - logger.log('') - - // Use the same spawn pattern as build.mjs exec function. - // Pass full command string with empty args array, using shell: WIN32. - const result = await spawn(config.buildCommand, [], { - cwd: TOOLS_DIR, - shell: WIN32, - stdio: 'inherit' - }) - - if (result.code !== 0) { - throw new Error(`Failed to build compression tool (exit code: ${result.code})`) - } - - // Verify tool was built. - if (!existsSync(toolPath) && !existsSync(toolPathExe)) { - throw new Error(`Tool ${config.toolName} was not created after build`) - } - - return existsSync(toolPathExe) ? toolPathExe : toolPath -} - -/** - * Get file size in MB. - */ -async function getFileSizeMB(filePath) { - const stats = await fs.stat(filePath) - return stats.size / 1024 / 1024 -} - -/** - * Compress binary using platform-specific tool. - */ -async function compressBinary(toolPath, inputPath, outputPath, quality, spec, config) { - // Validate input file exists. - if (!existsSync(inputPath)) { - throw new Error(`Input file not found: ${inputPath}`) - } - - // Get input file size. - const inputSizeMB = await getFileSizeMB(inputPath) - - logger.log(`Compressing ${config.binaryFormat} binary...`) - logger.log(` Input: ${inputPath} (${inputSizeMB.toFixed(2)} MB)`) - logger.log(` Output: ${outputPath}`) - logger.log(` Quality: ${quality || config.defaultQuality}`) - logger.log('') - - // Create temporary compressed data file. - const compressedDataPath = `${outputPath}.data` - - // Build command arguments. - const args = [inputPath, compressedDataPath] - if (quality) { - args.push(`--quality=${quality}`) - } - - // Execute compression tool. - const result = await spawn(toolPath, args, { - stdio: 'inherit' - }) - - if (result.code !== 0) { - throw new Error(`Compression failed (exit code: ${result.code})`) - } - - // Verify compressed data file was created. - if (!existsSync(compressedDataPath)) { - throw new Error(`Compressed data file was not created: ${compressedDataPath}`) - } - - // Get compressed data size. - const compressedSizeMB = await getFileSizeMB(compressedDataPath) - - logger.log('') - logger.log(`✓ Compression complete!`) - logger.log(` Original: ${inputSizeMB.toFixed(2)} MB`) - logger.log(` Compressed data: ${compressedSizeMB.toFixed(2)} MB`) - logger.log(` Reduction: ${(((inputSizeMB - compressedSizeMB) / inputSizeMB) * 100).toFixed(1)}%`) - logger.log(` Saved: ${(inputSizeMB - compressedSizeMB).toFixed(2)} MB`) - logger.log('') - - // Combine decompressor stub with compressed data to create self-extracting binary. - logger.log('Creating self-extracting binary...') - - const decompressorPath = path.join(TOOLS_DIR, config.toolName.replace('_compress', '_decompress')) - - if (!existsSync(decompressorPath)) { - throw new Error(`Decompressor not found: ${decompressorPath}`) - } - - // Get decompressor size. - const decompressorSizeMB = await getFileSizeMB(decompressorPath) - logger.log(` Decompressor stub: ${decompressorSizeMB.toFixed(2)} MB`) - logger.log(` Compressed data: ${compressedSizeMB.toFixed(2)} MB`) - - // Combine: decompressor + spec_header + compressed_data → self-extracting binary. - // Read both files. - const decompressor = await fs.readFile(decompressorPath) - const compressedData = await fs.readFile(compressedDataPath) - - // Create spec header if provided (for socket-lib cache key generation). - const specHeader = spec - ? Buffer.from(`SOCKET_SPEC:${spec}\n`, 'utf-8') - : Buffer.alloc(0) - - if (spec) { - logger.log(` Spec string: ${spec}`) - } - - // Concatenate: stub + spec_header + data. - const combined = Buffer.concat([decompressor, specHeader, compressedData]) - - // Write self-extracting binary. - await fs.writeFile(outputPath, combined, { mode: 0o755 }) - - // Clean up temporary compressed data file. - await fs.unlink(compressedDataPath) - - // Get final output size. - const outputSizeMB = await getFileSizeMB(outputPath) - const reduction = ((inputSizeMB - outputSizeMB) / inputSizeMB) * 100 - - logger.log(` Final binary: ${outputSizeMB.toFixed(2)} MB`) - logger.log('') - logger.log(`✓ Self-extracting binary created!`) - logger.log(` Total size: ${outputSizeMB.toFixed(2)} MB`) - logger.log(` Total reduction: ${reduction.toFixed(1)}%`) - logger.log(` Total saved: ${(inputSizeMB - outputSizeMB).toFixed(2)} MB`) -} - -/** - * Main function. - */ -async function main() { - try { - const { inputPath, outputPath, quality, spec } = parseArgs() - const config = getPlatformConfig() - - logger.log('Socket Binary Compression') - logger.log('=========================') - logger.log(`Platform: ${config.binaryFormat} (${process.platform})`) - logger.log('') - - // Ensure tool is built. - const toolPath = await ensureToolBuilt(config) - - // Compress binary. - await compressBinary(toolPath, inputPath, outputPath, quality, spec, config) - - } catch (e) { - logger.error(`Error: ${e.message}`) - process.exit(1) - } -} - -main() diff --git a/packages/node-smol-builder/scripts/compress-macho.mjs b/packages/node-smol-builder/scripts/compress-macho.mjs deleted file mode 100644 index 407f10eb2..000000000 --- a/packages/node-smol-builder/scripts/compress-macho.mjs +++ /dev/null @@ -1,176 +0,0 @@ -/** - * @fileoverview Compress macOS binaries using Apple's Compression framework. - * - * This script integrates socket_macho_compress with the Node.js build process. - * It provides an alternative to UPX that works with macOS code signing. - * - * Features: - * - Compresses Mach-O binaries using LZFSE/LZMA - * - Preserves code signature compatibility - * - ~20-30% size reduction beyond stripping - * - Creates decompressor for runtime execution - * - * Usage: - * node compress-macho.mjs [output_binary] [--quality=lzfse] - * - * Example: - * node compress-macho.mjs build/out/Signed/node build/out/Compressed/node - */ - -import { execFile } from 'node:child_process' -import { existsSync } from 'node:fs' -import { mkdir } from 'node:fs/promises' -import { dirname, join } from 'node:path' -import { promisify } from 'node:util' -import { fileURLToPath } from 'node:url' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import colors from 'yoctocolors-cjs' - -const execFileAsync = promisify(execFile) - -const __filename = fileURLToPath(import.meta.url) -const __dirname = dirname(__filename) - -// Path to compression tools. -const TOOLS_DIR = join(__dirname, '..', 'additions', 'tools') -const COMPRESS_TOOL = join(TOOLS_DIR, 'socket_macho_compress') -const DECOMPRESS_TOOL = join(TOOLS_DIR, 'socket_macho_decompress') - -/** - * Build compression tools if needed. - */ -async function buildTools() { - if (existsSync(COMPRESS_TOOL) && existsSync(DECOMPRESS_TOOL)) { - const logger = getDefaultLogger() - logger.log(`${colors.green('✓')} Compression tools already built`) - return - } - - logger.log('Building compression tools...') - logger.log(` Directory: ${TOOLS_DIR}`) - logger.log('') - - try { - const { stdout, stderr } = await execFileAsync('make', ['all'], { - cwd: TOOLS_DIR, - env: { ...process.env }, - }) - - if (stdout) logger.log(stdout) - if (stderr) logger.error(stderr) - - if (!existsSync(COMPRESS_TOOL)) { - throw new Error('Compressor tool was not built') - } - if (!existsSync(DECOMPRESS_TOOL)) { - throw new Error('Decompressor tool was not built') - } - - logger.log(`${colors.green('✓')} Tools built successfully`) - logger.log('') - } catch (error) { - logger.error(`${colors.red('✗')} Failed to build tools:`) - logger.error(error.message) - throw error - } -} - -/** - * Compress a Mach-O binary. - */ -async function compressBinary(inputPath, outputPath, quality = 'lzfse') { - logger.log('Compressing binary...') - logger.log(` Input: ${inputPath}`) - logger.log(` Output: ${outputPath}`) - logger.log(` Quality: ${quality}`) - logger.log('') - - // Ensure input exists. - if (!existsSync(inputPath)) { - throw new Error(`Input file not found: ${inputPath}`) - } - - // Create output directory. - await mkdir(dirname(outputPath), { recursive: true }) - - // Run compression tool. - try { - const args = [inputPath, outputPath, `--quality=${quality}`] - const { stdout, stderr } = await execFileAsync(COMPRESS_TOOL, args, { - maxBuffer: 10 * 1024 * 1024, // 10MB buffer for output. - }) - - if (stdout) logger.log(stdout) - if (stderr) logger.error(stderr) - - if (!existsSync(outputPath)) { - throw new Error('Compressed binary was not created') - } - - logger.log('') - logger.log(`${colors.green('✓')} Compression complete`) - } catch (error) { - logger.error(`${colors.red('✗')} Compression failed:`) - logger.error(error.message) - throw error - } -} - -/** - * Main function. - */ -async function main() { - const args = process.argv.slice(2) - - if (args.length < 1) { - logger.error('Usage: node compress-macho.mjs [output_binary] [--quality=lzfse|lz4|lzma|zlib]') - logger.error() - logger.error('Example:') - logger.error(' node compress-macho.mjs build/out/Signed/node build/out/Compressed/node') - logger.error() - logger.error('Quality options:') - logger.error(' lz4 - Fast decompression, lower compression (~20-30%)') - logger.error(' zlib - Balanced, good compatibility (~30-40%)') - logger.error(' lzfse - Apple default, best for binaries (~35-45%) [default]') - logger.error(' lzma - Maximum compression, slower (~40-50%)') - process.exit(1) - } - - const inputPath = args[0] - const outputPath = args[1] || inputPath.replace(/(\.[^.]+)?$/, '.compressed$1') - - // Parse quality argument. - let quality = 'lzfse' - for (const arg of args) { - if (arg.startsWith('--quality=')) { - quality = arg.substring(10) - } - } - - try { - // Build tools if needed. - await buildTools() - - // Compress binary. - await compressBinary(inputPath, outputPath, quality) - - logger.log('') - logger.log('📝 Next steps:') - logger.log('') - logger.log('1. Test the compressed binary:') - logger.log(` ${DECOMPRESS_TOOL} ${outputPath} --version`) - logger.log('') - logger.log('2. Sign the compressed binary (macOS):') - logger.log(` codesign --sign - --force ${outputPath}`) - logger.log('') - logger.log('3. Distribute the compressed binary with the decompressor') - logger.log(` cp ${DECOMPRESS_TOOL} /`) - logger.log('') - } catch (error) { - logger.error() - logger.error(`${colors.red('✗')} Compression failed`) - process.exit(1) - } -} - -main() diff --git a/packages/node-smol-builder/scripts/decompress-binary.mjs b/packages/node-smol-builder/scripts/decompress-binary.mjs deleted file mode 100644 index bfdc11f61..000000000 --- a/packages/node-smol-builder/scripts/decompress-binary.mjs +++ /dev/null @@ -1,173 +0,0 @@ -#!/usr/bin/env node -/** - * Cross-platform binary decompression script. - * - * Automatically detects the platform and uses the appropriate decompression tool: - * - macOS: socket_macho_decompress - * - Linux: socket_elf_decompress - * - Windows: socket_pe_decompress - * - * Usage: - * node scripts/decompress-binary.mjs [args...] - * node scripts/decompress-binary.mjs ./node.compressed --version - */ - -import { existsSync } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/registry/lib/spawn' -import colors from 'yoctocolors-cjs' - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const TOOLS_DIR = path.resolve(__dirname, '../additions/tools') - -/** - * Platform configuration. - */ -const PLATFORM_CONFIG = { - __proto__: null, - darwin: { - toolName: 'socket_macho_decompress', - binaryFormat: 'Mach-O', - buildCommand: 'make -f Makefile' - }, - linux: { - toolName: 'socket_elf_decompress', - binaryFormat: 'ELF', - buildCommand: 'make -f Makefile.linux' - }, - win32: { - toolName: 'socket_pe_decompress', - binaryFormat: 'PE', - buildCommand: 'mingw32-make -f Makefile.windows' - } -} - -/** - * Parse command line arguments. - */ -function parseArgs() { - const args = process.argv.slice(2) - - if (!args.length) { - const logger = getDefaultLogger() - logger.error('Usage: decompress-binary.mjs [args...]') - logger.error('') - logger.error('Examples:') - logger.error(' node scripts/decompress-binary.mjs ./node.compressed') - logger.error(' node scripts/decompress-binary.mjs ./node.compressed --version') - logger.error(' node scripts/decompress-binary.mjs ./node.compressed script.js') - process.exit(1) - } - - const compressedPath = path.resolve(args[0]) - const binaryArgs = args.slice(1) - - return { compressedPath, binaryArgs } -} - -/** - * Get platform configuration. - */ -function getPlatformConfig() { - const platform = process.platform - const config = PLATFORM_CONFIG[platform] - - if (!config) { - throw new Error(`Unsupported platform: ${platform}. Supported: macOS, Linux, Windows`) - } - - return config -} - -/** - * Build decompression tool if it doesn't exist. - */ -async function ensureToolBuilt(config) { - const toolPath = path.join(TOOLS_DIR, config.toolName) - const toolPathExe = `${toolPath}.exe` - - // Check if tool exists (with or without .exe extension). - if (existsSync(toolPath) || existsSync(toolPathExe)) { - return existsSync(toolPathExe) ? toolPathExe : toolPath - } - - logger.log(`Building ${config.binaryFormat} decompression tool...`) - logger.log(` Command: ${config.buildCommand}`) - logger.log('') - - const result = await spawn(config.buildCommand, { - cwd: TOOLS_DIR, - shell: WIN32, - stdio: 'inherit' - }) - - if (result.code !== 0) { - throw new Error(`Failed to build decompression tool (exit code: ${result.code})`) - } - - // Verify tool was built. - if (!existsSync(toolPath) && !existsSync(toolPathExe)) { - throw new Error(`Tool ${config.toolName} was not created after build`) - } - - return existsSync(toolPathExe) ? toolPathExe : toolPath -} - -/** - * Decompress and execute binary using platform-specific tool. - */ -async function decompressAndExecute(toolPath, compressedPath, binaryArgs, config) { - // Validate compressed file exists. - if (!existsSync(compressedPath)) { - throw new Error(`Compressed file not found: ${compressedPath}`) - } - - logger.log(`Decompressing ${config.binaryFormat} binary...`) - logger.log(` Compressed: ${compressedPath}`) - if (binaryArgs.length) { - logger.log(` Arguments: ${binaryArgs.join(' ')}`) - } - logger.log('') - - // Build command arguments. - const args = [compressedPath, ...binaryArgs] - - // Execute decompression tool (it will decompress and execute the binary). - const result = await spawn(toolPath, args, { - stdio: 'inherit' - }) - - // Exit with same code as the decompressed binary. - process.exit(result.code) -} - -/** - * Main function. - */ -async function main() { - try { - const { compressedPath, binaryArgs } = parseArgs() - const config = getPlatformConfig() - - logger.log('Socket Binary Decompression') - logger.log('===========================') - logger.log(`Platform: ${config.binaryFormat} (${process.platform})`) - logger.log('') - - // Ensure tool is built. - const toolPath = await ensureToolBuilt(config) - - // Decompress and execute binary. - await decompressAndExecute(toolPath, compressedPath, binaryArgs, config) - - } catch (e) { - logger.error(`Error: ${e.message}`) - process.exit(1) - } -} - -main() diff --git a/packages/node-smol-builder/scripts/optimize.mjs b/packages/node-smol-builder/scripts/optimize.mjs deleted file mode 100755 index e2a599e02..000000000 --- a/packages/node-smol-builder/scripts/optimize.mjs +++ /dev/null @@ -1,364 +0,0 @@ -#!/usr/bin/env node -/** - * Node.js Binary Optimization Script - * - * Applies platform-specific optimizations to reduce custom Node.js binary sizes: - * - macOS (darwin): strip, llvm-strip, code signing - * - Linux: strip --strip-all, objcopy section removal - * - Windows: strip --strip-all - * - * Target: Reduce from ~44MB to ~28-33MB per binary - * - * Usage: - * node packages/node-smol-builder/scripts/optimize.mjs [--platform=] - * node packages/node-smol-builder/scripts/optimize.mjs --all - * node packages/node-smol-builder/scripts/optimize.mjs # Optimize build/out/Release/node - */ - -import { execSync, spawn } from 'node:child_process' -import { existsSync, promises as fs } from 'node:fs' -import { platform as osPlatform } from 'node:os' -import path from 'node:path' -import { fileURLToPath } from 'node:url' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import colors from 'yoctocolors-cjs' - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const logger = getDefaultLogger() -const packageDir = path.join(__dirname, '..') -const rootDir = path.join(packageDir, '../..') - -// Parse command line arguments. -const args = process.argv.slice(2) -let binaryPath = null -let targetPlatform = null -let optimizeAll = false - -for (let i = 0; i < args.length; i++) { - const arg = args[i] - if (arg === '--all') { - optimizeAll = true - } else if (arg.startsWith('--platform=')) { - targetPlatform = arg.slice(11) - } else if (!arg.startsWith('--')) { - binaryPath = arg - } -} - -/** - * Get file size in MB. - */ -async function getFileSizeMB(filePath) { - const stats = await fs.stat(filePath) - return (stats.size / (1024 * 1024)).toFixed(2) -} - -/** - * Check if a command exists. - */ -function commandExists(cmd) { - try { - execSync(`which ${cmd}`, { stdio: 'ignore' }) - return true - } catch { - return false - } -} - -/** - * Execute a command with error handling. - */ -function exec(command, args, options = {}) { - logger.log(` $ ${command} ${args.join(' ')}`) - try { - execSync(`${command} ${args.join(' ')}`, { - stdio: 'inherit', - ...options, - }) - return true - } catch (e) { - logger.error(` ✗ Command failed: ${e.message}`) - return false - } -} - -/** - * Optimize binary for macOS (darwin). - */ -async function optimizeDarwin(binaryPath) { - logger.log('\n🍎 Optimizing macOS binary...') - - const beforeSize = await getFileSizeMB(binaryPath) - logger.log(` Before: ${beforeSize} MB`) - - // Phase 1: Basic stripping. - if (commandExists('strip')) { - logger.log('\n Phase 1: Basic stripping') - exec('strip', [binaryPath]) - } - - // Phase 2: Aggressive stripping with llvm-strip (often better than strip on macOS). - if (commandExists('llvm-strip')) { - logger.log('\n Phase 2: LLVM aggressive stripping') - exec('llvm-strip', [binaryPath]) - } else { - logger.log('\n Phase 2: Aggressive stripping (strip --strip-all)') - exec('strip', ['--strip-all', binaryPath]) - } - - // Phase 3: Remove unnecessary Mach-O sections. - logger.log('\n Phase 3: Remove unnecessary sections') - // Note: Most Mach-O section removal requires specialized tools. - // strip and llvm-strip already handle this well. - - const afterSize = await getFileSizeMB(binaryPath) - const savings = ((beforeSize - afterSize) / beforeSize * 100).toFixed(1) - logger.log(`\n After: ${afterSize} MB (${savings}% reduction)`) - - // Re-sign binary if on macOS ARM64 (required). - if (osPlatform() === 'darwin' && process.arch === 'arm64') { - logger.log('\n Phase 4: Code signing') - exec('codesign', ['--force', '--sign', '-', binaryPath]) - } - - return { before: parseFloat(beforeSize), after: parseFloat(afterSize), savings: parseFloat(savings) } -} - -/** - * Optimize binary for Linux. - */ -async function optimizeLinux(binaryPath) { - logger.log('\n🐧 Optimizing Linux binary...') - - const beforeSize = await getFileSizeMB(binaryPath) - logger.log(` Before: ${beforeSize} MB`) - - // Phase 1: Aggressive stripping. - logger.log('\n Phase 1: Aggressive stripping') - exec('strip', ['--strip-all', binaryPath]) - - // Phase 2: Remove unnecessary ELF sections. - if (commandExists('objcopy')) { - logger.log('\n Phase 2: Remove unnecessary ELF sections') - const sections = [ - '.note.ABI-tag', - '.note.gnu.build-id', - '.comment', - '.gnu.version', - ] - - for (const section of sections) { - exec('objcopy', [`--remove-section=${section}`, binaryPath]) - } - } - - // Phase 3: Super strip (sstrip) if available. - if (commandExists('sstrip')) { - logger.log('\n Phase 3: Super strip (removes section headers)') - exec('sstrip', [binaryPath]) - } - - const afterSize = await getFileSizeMB(binaryPath) - const savings = ((beforeSize - afterSize) / beforeSize * 100).toFixed(1) - logger.log(`\n After: ${afterSize} MB (${savings}% reduction)`) - - return { before: parseFloat(beforeSize), after: parseFloat(afterSize), savings: parseFloat(savings) } -} - -/** - * Optimize binary for Windows. - */ -async function optimizeWindows(binaryPath) { - logger.log('\n🪟 Optimizing Windows binary...') - - const beforeSize = await getFileSizeMB(binaryPath) - logger.log(` Before: ${beforeSize} MB`) - - // Phase 1: Aggressive stripping. - // Note: Windows binaries are typically cross-compiled on Linux/macOS with mingw. - logger.log('\n Phase 1: Aggressive stripping') - - // Try mingw-strip for Windows binaries. - if (commandExists('x86_64-w64-mingw32-strip')) { - exec('x86_64-w64-mingw32-strip', ['--strip-all', binaryPath]) - } else if (commandExists('strip')) { - exec('strip', ['--strip-all', binaryPath]) - } - - const afterSize = await getFileSizeMB(binaryPath) - const savings = ((beforeSize - afterSize) / beforeSize * 100).toFixed(1) - logger.log(`\n After: ${afterSize} MB (${savings}% reduction)`) - - return { before: parseFloat(beforeSize), after: parseFloat(afterSize), savings: parseFloat(savings) } -} - -/** - * Optimize a single binary. - */ -async function optimizeBinary(binaryPath, platform) { - // Detect platform from binary path if not specified. - if (!platform) { - if (binaryPath.includes('darwin')) { - platform = 'darwin' - } else if (binaryPath.includes('linux') || binaryPath.includes('alpine')) { - platform = 'linux' - } else if (binaryPath.includes('win32') || binaryPath.endsWith('.exe')) { - platform = 'win32` - } else { - platform = osPlatform() - } - } - - logger.log(`\n📦 Optimizing: ${path.basename(binaryPath)}`) - logger.log(` Platform: ${platform}`) - - // Check binary exists. - if (!existsSync(binaryPath)) { - logger.error(`\n${colors.red('✗')} Binary not found: ${binaryPath}`) - return null - } - - // Apply platform-specific optimizations. - let result - switch (platform) { - case `darwin': - result = await optimizeDarwin(binaryPath) - break - case 'linux': - case 'alpine': - result = await optimizeLinux(binaryPath) - break - case 'win32`: - result = await optimizeWindows(binaryPath) - break - default: - logger.error(`\n${colors.red('✗')} Unsupported platform: ${platform}`) - return null - } - - logger.log(`\n${colors.green('✓')} Optimization complete!`) - return result -} - -/** - * Find and optimize all platform binaries. - */ -async function optimizeAllBinaries() { - logger.log(`🔍 Finding all platform binaries...\n') - - const packagesDir = path.join(rootDir, 'packages') - const binaryPatterns = [ - 'socketbin-cli-*/bin/socket', - 'socketbin-cli-*/bin/socket.exe', - ] - - const binaries = [] - for (const pattern of binaryPatterns) { - const [dir, file] = pattern.split('/') - const packages = await fs.readdir(packagesDir) - - for (const pkg of packages) { - if (pkg.startsWith('socketbin-cli-')) { - const binPath = path.join(packagesDir, pkg, 'bin', file.replace('*', '')) - if (existsSync(binPath)) { - const stats = await fs.stat(binPath) - // Only process actual binaries (>1MB), not placeholders. - if (stats.size > 1024 * 1024) { - binaries.push(binPath) - } - } - } - } - } - - if (binaries.length === 0) { - logger.log(`${colors.yellow('⚠')} No binaries found to optimize`) - logger.log(' Run build first: pnpm run build:platforms') - return [] - } - - logger.log(`Found ${binaries.length} binaries to optimize:\n`) - binaries.forEach(b => logger.log(` - ${path.relative(rootDir, b)}`)) - - const results = [] - for (const binaryPath of binaries) { - const result = await optimizeBinary(binaryPath, null) - if (result) { - results.push({ path: binaryPath, ...result }) - } - } - - return results -} - -/** - * Main entry point. - */ -async function main() { - logger.log('⚡ Node.js Binary Size Optimizer') - logger.log('=' .repeat(50)) - - let results = [] - - if (optimizeAll) { - results = await optimizeAllBinaries() - } else { - // Default to build/out/Release/node if no binary specified. - if (!binaryPath) { - binaryPath = path.join(packageDir, 'build/out/Release/node') - if (!existsSync(binaryPath)) { - logger.error(`\n${colors.red('✗')} Error: No binary found at default path`) - logger.log('\nUsage:') - logger.log(' node packages/node-smol-builder/scripts/optimize.mjs [binary-path] [--platform=]') - logger.log(' node packages/node-smol-builder/scripts/optimize.mjs --all') - logger.log('\nExamples:') - logger.log(' node packages/node-smol-builder/scripts/optimize.mjs') - logger.log(' node packages/node-smol-builder/scripts/optimize.mjs build/out/Release/node') - logger.log(' node packages/node-smol-builder/scripts/optimize.mjs --all') - logger.log(`\nDefault path: ${binaryPath}`) - process.exit(1) - } - } - - const result = await optimizeBinary(binaryPath, targetPlatform) - if (result) { - results.push({ path: binaryPath, ...result }) - } - } - - // Summary. - if (results.length > 0) { - logger.log('\n' + '='.repeat(50)) - logger.log('📊 Optimization Summary') - logger.log('='.repeat(50)) - logger.log('') - - let totalBefore = 0 - let totalAfter = 0 - - for (const { path: binPath, before, after, savings } of results) { - totalBefore += before - totalAfter += after - logger.log(` ${path.basename(binPath)}:`) - logger.log(` Before: ${before.toFixed(2)} MB`) - logger.log(` After: ${after.toFixed(2)} MB`) - logger.log(` Saved: ${(before - after).toFixed(2)} MB (${savings.toFixed(1)}%)`) - logger.log('') - } - - if (results.length > 1) { - const totalSavings = ((totalBefore - totalAfter) / totalBefore * 100).toFixed(1) - logger.log(' Total:') - logger.log(` Before: ${totalBefore.toFixed(2)} MB`) - logger.log(` After: ${totalAfter.toFixed(2)} MB`) - logger.log(` Saved: ${(totalBefore - totalAfter).toFixed(2)} MB (${totalSavings}%)`) - } - - logger.log(`\n${colors.green('✓')} All optimizations complete!`) - } -} - -main().catch(error => { - logger.error(`\n${colors.red('✗')} Optimization failed:`, error.message) - process.exit(1) -}) diff --git a/packages/node-smol-builder/test/package.test.mjs b/packages/node-smol-builder/test/package.test.mjs deleted file mode 100644 index 46a179e15..000000000 --- a/packages/node-smol-builder/test/package.test.mjs +++ /dev/null @@ -1,219 +0,0 @@ -/** - * @fileoverview Tests for @socketbin/custom-node package structure and configuration. - */ - -import { existsSync } from 'node:fs' -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { describe, expect, it } from 'vitest' - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const packageDir = path.join(__dirname, '..') -const scriptsDir = path.join(packageDir, 'scripts') -const buildDir = path.join(packageDir, 'build') - -describe('@socketbin/custom-node package', () => { - describe('package.json validation', () => { - it('should have valid package.json metadata', async () => { - const pkgJson = JSON.parse( - await fs.readFile(path.join(packageDir, 'package.json'), 'utf-8'), - ) - - expect(pkgJson.name).toBe('@socketbin/custom-node') - expect(pkgJson.version).toMatch(/^\d+\.\d+\.\d+$/) - expect(pkgJson.license).toBe('MIT') - expect(pkgJson.description).toContain('Custom Node.js') - expect(pkgJson.private).toBe(true) - }) - - it('should have build scripts', async () => { - const pkgJson = JSON.parse( - await fs.readFile(path.join(packageDir, 'package.json'), 'utf-8'), - ) - - expect(pkgJson.scripts).toBeDefined() - expect(pkgJson.scripts.build).toBe('node scripts/build.mjs') - expect(pkgJson.scripts['build:all']).toBe('node scripts/build.mjs --all-platforms') - }) - }) - - describe('build scripts exist', () => { - it('should have build.mjs script', () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - expect(existsSync(buildPath)).toBe(true) - }) - - it('build.mjs should be valid JavaScript', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - // Should not throw syntax errors. - expect(content).toBeTruthy() - expect(content).toContain('import') - expect(content).toContain('Node.js') - }) - }) - - describe('build script documentation', () => { - it('build.mjs should document binary size optimization', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - expect(content).toContain('Binary Size Optimization') - expect(content).toContain('TARGET ACHIEVED') - expect(content).toContain('MB') - }) - - it('build.mjs should document configuration flags', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - expect(content).toContain('--with-intl=none') - expect(content).toContain('--v8-lite-mode') - }) - - it('build.mjs should document compression approach', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - expect(content).toContain('Compression Approach') - expect(content).toContain('Brotli') - }) - - it('build.mjs should document performance impact', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - expect(content).toContain('Performance Impact') - expect(content).toContain('Startup overhead') - expect(content).toContain('Runtime performance') - }) - - it('build.mjs should document usage options', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - expect(content).toContain('--clean') - expect(content).toContain('--verify') - expect(content).toContain('--test') - }) - }) - - describe('build directory structure', () => { - it('should have build directory', () => { - expect(existsSync(buildDir)).toBe(true) - }) - - it('should have wasm-bundle subdirectory', () => { - const wasmBundleDir = path.join(packageDir, 'wasm-bundle') - expect(existsSync(wasmBundleDir)).toBe(true) - }) - - it('wasm-bundle should have Cargo.toml', () => { - const cargoPath = path.join(packageDir, 'wasm-bundle', 'Cargo.toml') - expect(existsSync(cargoPath)).toBe(true) - }) - }) - - describe('README documentation', () => { - it('should have README.md', () => { - const readmePath = path.join(packageDir, 'README.md') - expect(existsSync(readmePath)).toBe(true) - }) - - it('README should document what it does', async () => { - const readmePath = path.join(packageDir, 'README.md') - const readme = await fs.readFile(readmePath, 'utf-8') - - expect(readme).toContain('Custom Node.js') - expect(readme).toContain('Socket security patches') - expect(readme).toContain('v24.10.0') - }) - - it('README should document build process', async () => { - const readmePath = path.join(packageDir, 'README.md') - const readme = await fs.readFile(readmePath, 'utf-8') - - expect(readme).toContain('pnpm run build') - expect(readme).toContain('Downloads') - expect(readme).toContain('patches') - expect(readme).toContain('compiles') - }) - - it('README should document output location', async () => { - const readmePath = path.join(packageDir, 'README.md') - const readme = await fs.readFile(readmePath, 'utf-8') - - expect(readme).toContain('build/out') - }) - }) - - describe('package is private', () => { - it('should be marked as private', async () => { - const pkgJson = JSON.parse( - await fs.readFile(path.join(packageDir, 'package.json'), 'utf-8'), - ) - - expect(pkgJson.private).toBe(true) - }) - - it('should not have publishConfig for npm', async () => { - const pkgJson = JSON.parse( - await fs.readFile(path.join(packageDir, 'package.json'), 'utf-8'), - ) - - // Private package should not configure npm publishing. - expect(pkgJson.publishConfig).toBeUndefined() - }) - }) - - describe('build script structure', () => { - it('build.mjs should import required dependencies', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - // Check for key imports. - expect(content).toContain("from 'node:fs'") - }) - - it('build.mjs should reference Node.js version', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - expect(content).toContain('v24.10.0') - }) - - it('build.mjs should reference Socket patches', async () => { - const buildPath = path.join(scriptsDir, 'build.mjs') - const content = await fs.readFile(buildPath, 'utf-8') - - expect(content).toContain('Socket') - expect(content).toContain('patch') - }) - }) - - // Note: Actual build execution tests are skipped because: - // - Builds take 5-10 minutes - // - Require compilation toolchain (gcc, make, python) - // - Require ~1GB disk space for source - // - Platform-specific build process - // - Best tested manually or in dedicated CI jobs - describe.skip('build execution (manual/CI only)', () => { - it('should build custom Node.js binary', async () => { - // This test is skipped by default. - // To run: FULL_BUILD_TEST=1 pnpm test - }) - - it('should apply Socket patches', async () => { - // This test is skipped by default. - // To run: FULL_BUILD_TEST=1 pnpm test - }) - - it('should produce binary under 30MB', async () => { - // This test is skipped by default. - // To run: FULL_BUILD_TEST=1 pnpm test - }) - }) -}) diff --git a/packages/node-smol-builder/vitest.config.mts b/packages/node-smol-builder/vitest.config.mts deleted file mode 100644 index 019736bcc..000000000 --- a/packages/node-smol-builder/vitest.config.mts +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Extends shared simple vitest config. - */ -import baseConfig from '../../vitest.config.simple.mts' - -export default baseConfig diff --git a/packages/node-smol-builder/wasm-bundle/.gitignore b/packages/node-smol-builder/wasm-bundle/.gitignore deleted file mode 100644 index a56e7724e..000000000 --- a/packages/node-smol-builder/wasm-bundle/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -# Rust build artifacts -/target/ -/pkg/ -Cargo.lock - -# Editor files -*.swp -*.swo -*~ -.DS_Store diff --git a/packages/node-smol-builder/wasm-bundle/Cargo.toml b/packages/node-smol-builder/wasm-bundle/Cargo.toml deleted file mode 100644 index 3872ac678..000000000 --- a/packages/node-smol-builder/wasm-bundle/Cargo.toml +++ /dev/null @@ -1,47 +0,0 @@ -[package] -name = "socket-ai" -version = "0.1.0" -edition = "2021" -authors = ["Socket Security "] -description = "Unified WASM bundle for Socket CLI AI features" -license = "MIT" - -[lib] -crate-type = ["cdylib"] - -[dependencies] -wasm-bindgen = "0.2" - -[features] -default = [] -no-models = [] # Build without embedding models (for testing build scripts) -minilm-only = [] # Build with only MiniLM model (~17 MB) -codet5-only = [] # Build with only CodeT5 model (~90 MB) -unoptimized-wasm = [] # Use original unoptimized WASM files - -[profile.release] -# Optimize for size (no backward compat concerns). -lto = "thin" # Thin LTO is faster than full, still effective (5-10% faster builds) -opt-level = "z" # Optimize for size (critical for WASM) -codegen-units = 1 # Single unit for maximum optimization -strip = true # Strip symbols for smaller WASM (saves 5-10%) -panic = "abort" # Reduce binary size by not including unwinding code -overflow-checks = false # Disable overflow checks in release (slight perf boost) -debug-assertions = false # Disable debug assertions (smaller binary) - -# OPTIMIZATION: Fast dev builds for WASM (3-5x faster iteration) -# Use: cargo build --target wasm32-unknown-unknown --profile dev-wasm -[profile.dev-wasm] -inherits = "dev" -opt-level = 1 # Minimal optimization for faster builds -lto = false # Disable LTO in dev -codegen-units = 16 # More units = faster parallel compilation -incremental = false # Disable incremental for WASM (conflicts with embed-bitcode) -debug = false # Disable debug info to reduce WASM size -strip = true # Strip symbols even in dev for smaller WASM -panic = "abort" # Reduce binary size - -# Strip debug info from dependencies. -[profile.release.package."*"] -opt-level = "z" -strip = true diff --git a/packages/node-smol-builder/wasm-bundle/README.md b/packages/node-smol-builder/wasm-bundle/README.md deleted file mode 100644 index 57c651ba3..000000000 --- a/packages/node-smol-builder/wasm-bundle/README.md +++ /dev/null @@ -1,317 +0,0 @@ -# Socket CLI Unified WASM Bundle - -Single WASM file containing all AI models and execution engines for Socket CLI. - -## Architecture - -``` -socket-ai.wasm (~115MB) - ├─ ONNX Runtime (~2-5MB) - ML execution engine - ├─ MiniLM model (~17MB int8) - Semantic understanding - ├─ CodeT5 encoder (~30MB int4) - Code generation (encoder) - ├─ CodeT5 decoder (~60MB int4) - Code generation (decoder) - ├─ Tokenizers (~1MB) - Text tokenization - └─ Yoga Layout (~95KB) - Flexbox layout engine -``` - -## Build Process - -### 1. Check Prerequisites - -```bash -node scripts/wasm/check-rust-toolchain.mjs -``` - -**What it does**: -- Checks for Rust/cargo installation -- Installs Rust via rustup if missing -- Installs wasm32-unknown-unknown target -- Installs wasm-pack - -### 2. Download Models - -```bash -node scripts/wasm/download-models.mjs -``` - -**What it downloads**: -- MiniLM model (pre-quantized from HuggingFace) -- CodeT5 tokenizer (from HuggingFace) -- ONNX Runtime WASM (from node_modules) -- Yoga Layout WASM (from node_modules) - -**What needs conversion** (see next step): -- CodeT5 encoder (PyTorch → ONNX int4) -- CodeT5 decoder (PyTorch → ONNX int4) - -### 3. Convert CodeT5 Models (One-Time) - -```bash -# Requires Python + optimum[onnxruntime] -pip install optimum[onnxruntime] torch - -# Convert models -node scripts/wasm/convert-codet5.mjs -``` - -**What it does**: -- Downloads `Salesforce/codet5-small` from HuggingFace -- Exports PyTorch → ONNX format -- Quantizes fp32 → int4 (4-bit weights, 50% smaller than int8) -- Saves to `.cache/models/` - -### 4. Build Unified WASM - -```bash -node scripts/wasm/build-unified-wasm.mjs -``` - -**What it does**: -1. Runs prerequisite checks (Rust, models) -2. Builds Rust project with wasm-pack -3. Optimizes with wasm-opt (if available) -4. Embeds WASM as brotli-compressed base64 in JavaScript -5. Generates `external/socket-ai-sync.mjs` - -**Output**: -- `wasm-bundle/pkg/socket_ai_bg.wasm` (~115MB) -- `external/socket-ai-sync.mjs` (brotli+base64 embedded) - -## Distribution Pipeline - -``` -external/socket-ai-sync.mjs (~50-70MB brotli+base64) - ↓ -Rollup bundles into dist/cli.js - ↓ -Brotli compress entire bundle - ↓ -dist/cli.js.bz (~20-30MB estimated) - ↓ -Native stub OR index.js detects .bz extension - ↓ -Decompresses with built-in zlib.brotliDecompress - ↓ -Runs in vm.Module -``` - -## Usage - -### Load WASM Module - -```javascript -import { loadWasmSync, getWasmExports } from './external/socket-ai-sync.mjs' - -// Initialize WASM (one-time, ~50-100ms) -loadWasmSync() - -// Access exports -const exports = getWasmExports() -``` - -### Load Models - -```javascript -import { - loadCodet5DecoderSync, - loadCodet5EncoderSync, - loadCodet5TokenizerSync, - loadMinilmModelSync, - loadMinilmTokenizerSync, -} from './external/socket-ai-sync.mjs' - -// Load MiniLM -const minilmModel = loadMinilmModelSync() // Uint8Array -const minilmTokenizer = loadMinilmTokenizerSync() // JSON object - -// Load CodeT5 -const encoder = loadCodet5EncoderSync() // Uint8Array -const decoder = loadCodet5DecoderSync() // Uint8Array -const tokenizer = loadCodet5TokenizerSync() // JSON object -``` - -### Use with ONNX Runtime - -```javascript -import { InferenceSession } from 'onnxruntime-web' -import { loadCodet5EncoderSync } from './external/socket-ai-sync.mjs' - -// Load encoder from WASM linear memory -const encoderBytes = loadCodet5EncoderSync() - -// Create ONNX session -const session = await InferenceSession.create(encoderBytes.buffer) - -// Run inference -const outputs = await session.run({ input_ids: inputTensor }) -``` - -## File Structure - -``` -wasm-bundle/ -├── Cargo.toml # Rust project config -├── src/ -│ └── lib.rs # WASM exports (model pointers) -├── pkg/ # wasm-pack output (gitignored) -│ └── socket_ai_bg.wasm # Built WASM bundle -└── README.md # This file - -scripts/wasm/ -├── check-rust-toolchain.mjs # Install Rust if needed -├── download-models.mjs # Download model assets -├── convert-codet5.mjs # Convert CodeT5 (TODO) -└── build-unified-wasm.mjs # Main build script - -external/ -└── socket-ai-sync.mjs # Generated loader (base64 WASM) - -.cache/models/ # Downloaded models (gitignored) -├── minilm-int8.onnx -├── minilm-tokenizer.json -├── codet5-encoder-int4.onnx -├── codet5-decoder-int4.onnx -├── codet5-tokenizer.json -├── ort-wasm-simd-threaded.wasm -└── yoga.wasm -``` - -## Next Steps - -### 1. Create CodeT5 Conversion Script - -```bash -# NOTE: Already created - see scripts/wasm/convert-codet5.mjs -# Uses Python + optimum to convert PyTorch → ONNX int4 -``` - -### 2. Update Native Stub for .bz Detection - -**File**: `bin/bootstrap.js` or native stub - -**Add**: -```javascript -const { brotliDecompressSync } = require('node:zlib') -const { readFileSync } = require('node:fs') - -const CLI_ENTRY_BZ = join(CLI_PACKAGE_DIR, 'dist', 'cli.js.bz') - -if (existsSync(CLI_ENTRY_BZ)) { - // Decompress and run - const compressed = readFileSync(CLI_ENTRY_BZ) - const decompressed = brotliDecompressSync(compressed) - - // Run in vm.Module or eval - require('vm').runInThisContext(decompressed, { - filename: 'cli.js', - }) -} else if (existsSync(CLI_ENTRY)) { - // Fallback to uncompressed - require(CLI_ENTRY) -} -``` - -### 3. Update Rollup Config - -Add brotli compression step after bundling: - -```javascript -// .config/rollup.dist.config.mjs -import { brotliCompressSync } from 'node:zlib' - -export default { - // ... existing config - - plugins: [ - // ... existing plugins - - { - name: 'brotli-compress', - writeBundle(options, bundle) { - for (const fileName in bundle) { - if (fileName === 'cli.js') { - const jsFile = join(options.dir, fileName) - const code = readFileSync(jsFile) - const compressed = brotliCompressSync(code) - writeFileSync(`${jsFile}.bz`, compressed) - console.log(`✓ Compressed ${fileName} → ${fileName}.bz`) - } - } - } - } - ] -} -``` - -### 4. Test End-to-End - -```bash -# Build everything -pnpm run build - -# Test compressed bundle -node bin/bootstrap.js --version -``` - -## Benefits - -| Metric | Before (3 files) | After (1 file) | -|--------|------------------|----------------| -| **Assets** | 3 separate WASM/JS | 1 unified WASM | -| **Initialization** | ~150-200ms | ~50-100ms | -| **Memory layout** | Fragmented | Contiguous | -| **Distribution** | Complex | Simple | -| **Size (raw)** | ~140MB total | ~115MB (30MB savings with int4) | -| **Size (brotli)** | N/A | ~50-70MB base64 | -| **Size (final)** | ~10MB | ~20-30MB (estimated) | - -## Troubleshooting - -### Rust Not Found - -```bash -# Install Rust manually -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -``` - -### wasm-pack Build Fails - -```bash -# Check Rust version -rustc --version - -# Update toolchain -rustup update stable - -# Clean and rebuild -rm -rf wasm-bundle/target wasm-bundle/pkg -node scripts/wasm/build-unified-wasm.mjs -``` - -### CodeT5 Models Missing - -Run the conversion script (requires Python): - -```bash -pip install optimum[onnxruntime] torch -node scripts/wasm/convert-codet5.mjs -``` - -### WASM Too Large - -The WASM file is ~115MB which is expected given: -- CodeT5 encoder: 30MB (int4) -- CodeT5 decoder: 60MB (int4) -- MiniLM: 17MB (int8) -- ONNX Runtime: 2-5MB -- Yoga: <1MB - -After brotli compression in the final distribution, it should be ~20-30MB. - -## References - -- [wasm-pack Documentation](https://rustwasm.github.io/wasm-pack/) -- [wasm-bindgen Guide](https://rustwasm.github.io/wasm-bindgen/) -- [ONNX Runtime Web](https://onnxruntime.ai/docs/tutorials/web/) -- [CodeT5 Paper](https://arxiv.org/abs/2109.00859) -- [MiniLM Model](https://huggingface.co/sentence-transformers/paraphrase-MiniLM-L3-v2) diff --git a/packages/node-smol-builder/wasm-bundle/scripts/build.mjs b/packages/node-smol-builder/wasm-bundle/scripts/build.mjs deleted file mode 100755 index d46ea05f8..000000000 --- a/packages/node-smol-builder/wasm-bundle/scripts/build.mjs +++ /dev/null @@ -1,218 +0,0 @@ -#!/usr/bin/env node -/** - * Build script for Socket AI unified WASM bundle. - * - * Creates optimized WASM bundle with embedded models: - * - MiniLM INT4 (17 MB) - * - CodeT5 INT4 (90 MB) - * - ONNX Runtime SIMD (3 MB) - * - Yoga Layout (95 KB) - * - * Directory structure: - * - build/ - Work in progress (cargo target, intermediate files) - * - dist/ - Final production build - * - * Usage: - * node scripts/build.mjs [options] - * - * Options: - * --no-models Build without embedding models (fast, for testing) - * --clean Clean build and dist directories before building - * --help Show this help message - */ - -import { execSync } from 'node:child_process' -import { existsSync, mkdirSync } from 'node:fs' -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = path.dirname(__filename) -const PACKAGE_ROOT = path.resolve(__dirname, '..') -const BUILD_DIR = path.join(PACKAGE_ROOT, 'build') -const DIST_DIR = path.join(PACKAGE_ROOT, 'dist') -const TARGET_DIR = path.join(PACKAGE_ROOT, 'target') - -// Parse command line arguments. -const args = process.argv.slice(2) -const noModels = args.includes('--no-models') -const clean = args.includes('--clean') -const help = args.includes('--help') - -if (help) { - console.log(` -Socket AI WASM Bundle Builder - -Usage: node scripts/build.mjs [options] - -Options: - --no-models Build without embedding models (fast, for testing) - --clean Clean build and dist directories before building - --help Show this help message - -Examples: - node scripts/build.mjs # Full build with models - node scripts/build.mjs --no-models # Fast build for testing - node scripts/build.mjs --clean # Clean and rebuild -`) - process.exit(0) -} - -function exec(command, options = {}) { - console.log(`$ ${command}`) - return execSync(command, { - cwd: PACKAGE_ROOT, - stdio: 'inherit', - ...options, - }) -} - -async function cleanDirectories() { - console.log('\nCleaning build directories...') - - const dirsToClean = [BUILD_DIR, DIST_DIR, TARGET_DIR] - - for (const dir of dirsToClean) { - if (existsSync(dir)) { - console.log(` Removing ${path.basename(dir)}/`) - await fs.rm(dir, { recursive: true, force: true }) - } - } -} - -async function createDirectories() { - console.log('\nCreating directories...') - - for (const dir of [BUILD_DIR, DIST_DIR]) { - if (!existsSync(dir)) { - console.log(` Creating ${path.basename(dir)}/`) - mkdirSync(dir, { recursive: true }) - } - } -} - -async function buildWasm() { - console.log('\nBuilding WASM bundle...') - - const features = [] - if (noModels) { - features.push('no-models') - console.log(' Mode: Fast build (no models embedded)') - } else { - console.log(' Mode: Full build (models embedded)') - } - - const featuresFlag = features.length > 0 ? `--features ${features.join(',')}` : '' - const cargoCommand = `cargo build --release --target wasm32-unknown-unknown ${featuresFlag}`.trim() - - exec(cargoCommand) - - // Copy built WASM to build directory. - const wasmSource = path.join( - TARGET_DIR, - 'wasm32-unknown-unknown', - 'release', - 'socket_ai.wasm' - ) - const wasmBuild = path.join(BUILD_DIR, 'socket_ai.wasm') - - if (existsSync(wasmSource)) { - await fs.copyFile(wasmSource, wasmBuild) - const stats = await fs.stat(wasmBuild) - const sizeMB = (stats.size / (1024 * 1024)).toFixed(1) - console.log(` Built: ${sizeMB} MB → build/socket_ai.wasm`) - } else { - throw new Error(`Build failed: ${wasmSource} not found`) - } - - return wasmBuild -} - -async function optimizeWasm(inputPath) { - console.log('\nOptimizing WASM...') - - // Check if wasm-opt is available. - try { - execSync('wasm-opt --version', { stdio: 'ignore' }) - } catch { - console.log(' Warning: wasm-opt not found, skipping optimization') - console.log(' Install: brew install binaryen') - return inputPath - } - - const optimizedPath = path.join(BUILD_DIR, 'socket_ai.optimized.wasm') - - exec( - `wasm-opt -Oz --enable-simd --enable-bulk-memory ${inputPath} -o ${optimizedPath}` - ) - - const originalSize = (await fs.stat(inputPath)).size - const optimizedSize = (await fs.stat(optimizedPath)).size - const reduction = (((originalSize - optimizedSize) / originalSize) * 100).toFixed(1) - - console.log( - ` Optimized: ${(optimizedSize / (1024 * 1024)).toFixed(1)} MB (${reduction}% reduction)` - ) - - return optimizedPath -} - -async function copyToDist(sourcePath) { - console.log('\nCopying to dist/...') - - const distPath = path.join(DIST_DIR, 'socket_ai.wasm') - await fs.copyFile(sourcePath, distPath) - - const stats = await fs.stat(distPath) - const sizeMB = (stats.size / (1024 * 1024)).toFixed(1) - console.log(` Final: ${sizeMB} MB → dist/socket_ai.wasm`) - - return distPath -} - -async function printSummary(finalPath) { - console.log('\n' + '='.repeat(50)) - console.log('Build Summary') - console.log('='.repeat(50)) - - const stats = await fs.stat(finalPath) - const sizeMB = (stats.size / (1024 * 1024)).toFixed(1) - - console.log(`Mode: ${noModels ? 'Fast (no models)' : 'Full (with models)'}`) - console.log(`Output: ${path.relative(PACKAGE_ROOT, finalPath)}`) - console.log(`Size: ${sizeMB} MB`) - - if (!noModels) { - console.log('\nEmbedded models:') - console.log(' - MiniLM INT4 (~17 MB)') - console.log(' - CodeT5 Encoder INT4 (~34 MB)') - console.log(' - CodeT5 Decoder INT4 (~56 MB)') - console.log(' - ONNX Runtime SIMD (~3 MB)') - console.log(' - Yoga Layout (~95 KB)') - } - - console.log('\nBuild complete!') -} - -async function main() { - try { - console.log('Socket AI WASM Bundle Builder') - console.log('=' .repeat(50)) - - if (clean) { - await cleanDirectories() - } - - await createDirectories() - const builtWasm = await buildWasm() - const optimizedWasm = await optimizeWasm(builtWasm) - const finalWasm = await copyToDist(optimizedWasm) - await printSummary(finalWasm) - } catch (error) { - console.error('\nBuild failed:', error.message) - process.exit(1) - } -} - -main() diff --git a/packages/node-smol-builder/wasm-bundle/src/lib.rs b/packages/node-smol-builder/wasm-bundle/src/lib.rs deleted file mode 100644 index f499ec942..000000000 --- a/packages/node-smol-builder/wasm-bundle/src/lib.rs +++ /dev/null @@ -1,191 +0,0 @@ -/** - * Socket CLI Unified WASM Bundle - * - * Embeds all AI models and WASM modules: - * - ONNX Runtime (~3MB SIMD-only, single-threaded) - * - MiniLM model (~17MB int8) - * - CodeT5 encoder (~30MB int4) - * - CodeT5 decoder (~60MB int4) - * - Tokenizers (~1MB) - * - Yoga Layout (~95KB) - * - * INT4 Quantization: - * - CodeT5 models use INT4 (4-bit weights) for 50% size reduction - * - Only 1-2% quality loss compared to INT8 - * - Total: ~115MB (vs ~145MB with INT8) - */ - -use wasm_bindgen::prelude::*; - -// Embed all models at compile time using include_bytes!(). -// These files are read during compilation and embedded in the .wasm file's data section. -// -// Feature flags: -// - `no-models`: Build without embedding models (for testing build scripts). -// - `minilm-only`: Build with only MiniLM model (~17 MB). -// - `codet5-only`: Build with only CodeT5 model (~90 MB). -// - `unoptimized-wasm`: Use unoptimized WASM files for faster iteration. - -#[cfg(all(not(feature = "no-models"), not(feature = "codet5-only")))] -static MINILM_MODEL: &[u8] = include_bytes!("../../../../.cache/models/minilm-int4.onnx"); -#[cfg(any(feature = "no-models", feature = "codet5-only"))] -static MINILM_MODEL: &[u8] = &[]; - -#[cfg(all(not(feature = "no-models"), not(feature = "codet5-only")))] -static MINILM_TOKENIZER: &[u8] = include_bytes!("../../../../.cache/models/minilm-tokenizer.json"); -#[cfg(any(feature = "no-models", feature = "codet5-only"))] -static MINILM_TOKENIZER: &[u8] = &[]; - -#[cfg(all(not(feature = "no-models"), not(feature = "minilm-only")))] -static CODET5_ENCODER: &[u8] = include_bytes!("../../../../.cache/models/codet5-encoder-int4.onnx"); -#[cfg(any(feature = "no-models", feature = "minilm-only"))] -static CODET5_ENCODER: &[u8] = &[]; - -#[cfg(all(not(feature = "no-models"), not(feature = "minilm-only")))] -static CODET5_DECODER: &[u8] = include_bytes!("../../../../.cache/models/codet5-decoder-int4.onnx"); -#[cfg(any(feature = "no-models", feature = "minilm-only"))] -static CODET5_DECODER: &[u8] = &[]; - -#[cfg(all(not(feature = "no-models"), not(feature = "minilm-only")))] -static CODET5_TOKENIZER: &[u8] = include_bytes!("../../../../.cache/models/codet5-tokenizer.json"); -#[cfg(any(feature = "no-models", feature = "minilm-only"))] -static CODET5_TOKENIZER: &[u8] = &[]; - -// Use optimized SIMD-only WASM (single-threaded). -// We don't use multi-threading (no session options, sequential batching). -// SIMD-only saves ~2 MB vs threaded version. -#[cfg(all(not(feature = "unoptimized-wasm"), not(feature = "no-models")))] -static ONNX_RUNTIME: &[u8] = include_bytes!("../../../../.cache/models/ort-wasm-simd-threaded.wasm"); -#[cfg(all(feature = "unoptimized-wasm", not(feature = "no-models")))] -static ONNX_RUNTIME: &[u8] = include_bytes!("../../../../.cache/models/ort-wasm-simd-threaded.wasm"); -#[cfg(feature = "no-models")] -static ONNX_RUNTIME: &[u8] = &[]; - -#[cfg(all(not(feature = "unoptimized-wasm"), not(feature = "no-models")))] -static YOGA_LAYOUT: &[u8] = include_bytes!("../../../../.cache/models/yoga.wasm"); -#[cfg(all(feature = "unoptimized-wasm", not(feature = "no-models")))] -static YOGA_LAYOUT: &[u8] = include_bytes!("../../../../.cache/models/yoga.wasm"); -#[cfg(feature = "no-models")] -static YOGA_LAYOUT: &[u8] = &[]; - -// ============================================================================= -// MiniLM Model -// ============================================================================= - -/// Get pointer to MiniLM model in WASM linear memory. -#[wasm_bindgen] -pub fn get_minilm_model_ptr() -> *const u8 { - MINILM_MODEL.as_ptr() -} - -/// Get size of MiniLM model in bytes. -#[wasm_bindgen] -pub fn get_minilm_model_size() -> usize { - MINILM_MODEL.len() -} - -/// Get pointer to MiniLM tokenizer in WASM linear memory. -#[wasm_bindgen] -pub fn get_minilm_tokenizer_ptr() -> *const u8 { - MINILM_TOKENIZER.as_ptr() -} - -/// Get size of MiniLM tokenizer in bytes. -#[wasm_bindgen] -pub fn get_minilm_tokenizer_size() -> usize { - MINILM_TOKENIZER.len() -} - -// ============================================================================= -// CodeT5 Models -// ============================================================================= - -/// Get pointer to CodeT5 encoder in WASM linear memory. -#[wasm_bindgen] -pub fn get_codet5_encoder_ptr() -> *const u8 { - CODET5_ENCODER.as_ptr() -} - -/// Get size of CodeT5 encoder in bytes. -#[wasm_bindgen] -pub fn get_codet5_encoder_size() -> usize { - CODET5_ENCODER.len() -} - -/// Get pointer to CodeT5 decoder in WASM linear memory. -#[wasm_bindgen] -pub fn get_codet5_decoder_ptr() -> *const u8 { - CODET5_DECODER.as_ptr() -} - -/// Get size of CodeT5 decoder in bytes. -#[wasm_bindgen] -pub fn get_codet5_decoder_size() -> usize { - CODET5_DECODER.len() -} - -/// Get pointer to CodeT5 tokenizer in WASM linear memory. -#[wasm_bindgen] -pub fn get_codet5_tokenizer_ptr() -> *const u8 { - CODET5_TOKENIZER.as_ptr() -} - -/// Get size of CodeT5 tokenizer in bytes. -#[wasm_bindgen] -pub fn get_codet5_tokenizer_size() -> usize { - CODET5_TOKENIZER.len() -} - -// ============================================================================= -// ONNX Runtime -// ============================================================================= - -/// Get pointer to ONNX Runtime WASM in linear memory. -#[wasm_bindgen] -pub fn get_onnx_runtime_ptr() -> *const u8 { - ONNX_RUNTIME.as_ptr() -} - -/// Get size of ONNX Runtime WASM in bytes. -#[wasm_bindgen] -pub fn get_onnx_runtime_size() -> usize { - ONNX_RUNTIME.len() -} - -// ============================================================================= -// Yoga Layout -// ============================================================================= - -/// Get pointer to Yoga Layout WASM in linear memory. -#[wasm_bindgen] -pub fn get_yoga_layout_ptr() -> *const u8 { - YOGA_LAYOUT.as_ptr() -} - -/// Get size of Yoga Layout WASM in bytes. -#[wasm_bindgen] -pub fn get_yoga_layout_size() -> usize { - YOGA_LAYOUT.len() -} - -// ============================================================================= -// Utility Functions -// ============================================================================= - -/// Get total embedded size in bytes. -#[wasm_bindgen] -pub fn get_total_embedded_size() -> usize { - MINILM_MODEL.len() - + MINILM_TOKENIZER.len() - + CODET5_ENCODER.len() - + CODET5_DECODER.len() - + CODET5_TOKENIZER.len() - + ONNX_RUNTIME.len() - + YOGA_LAYOUT.len() -} - -/// Get version string. -#[wasm_bindgen] -pub fn get_version() -> String { - env!("CARGO_PKG_VERSION").to_string() -} diff --git a/packages/onnxruntime/README.md b/packages/onnxruntime/README.md deleted file mode 100644 index 0de42bd8d..000000000 --- a/packages/onnxruntime/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# @socketsecurity/onnxruntime - -Custom ONNX Runtime WASM build optimized for Socket CLI. - -## Overview - -This package builds ONNX Runtime from source with Emscripten, optimized for: -- **Synchronous WASM instantiation** (`WASM_ASYNC_COMPILATION=0`) -- **Minimal size** (aggressive optimization flags) -- **Fast startup** (embedded WASM, no network fetch) - -## Building - -```bash -# Normal build with checkpoints -pnpm run build - -# Force rebuild (ignore checkpoints) -pnpm run build:force - -# Clean build artifacts -pnpm run clean -``` - -## Requirements - -- **Emscripten SDK** (emsdk) -- **CMake** (3.13+) -- **Git** - -Install Emscripten: https://emscripten.org/docs/getting_started/downloads.html - -## Output - -Built artifacts are exported to `build/wasm/`: -- `ort-wasm-simd-threaded.wasm` - ONNX Runtime WebAssembly module -- `ort-wasm-simd-threaded.js` - Emscripten JavaScript glue code - -## Integration - -The CLI's `extract-onnx-runtime.mjs` script: -1. Reads these built WASM artifacts -2. Embeds WASM as base64 -3. Generates `build/onnx-sync.mjs` for synchronous loading - -## Version - -Based on ONNX Runtime v1.21.1. diff --git a/packages/onnxruntime/package.json b/packages/onnxruntime/package.json deleted file mode 100644 index 2d6078f2e..000000000 --- a/packages/onnxruntime/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "@socketsecurity/onnxruntime", - "version": "1.21.1", - "description": "Custom ONNX Runtime WASM build optimized for Socket CLI", - "private": true, - "exports": { - "./build/wasm/ort-wasm-simd-threaded.mjs": "./build/wasm/ort-wasm-simd-threaded.mjs", - "./build/wasm/ort-wasm-simd-threaded.wasm": "./build/wasm/ort-wasm-simd-threaded.wasm" - }, - "scripts": { - "build": "node scripts/build.mjs", - "build:force": "node scripts/build.mjs --force", - "clean": "node scripts/clean.mjs" - }, - "dependencies": { - "@socketsecurity/build-infra": "workspace:*", - "@socketsecurity/lib": "workspace:*" - } -} diff --git a/packages/onnxruntime/scripts/build.mjs b/packages/onnxruntime/scripts/build.mjs deleted file mode 100644 index f50ea847f..000000000 --- a/packages/onnxruntime/scripts/build.mjs +++ /dev/null @@ -1,388 +0,0 @@ -/** - * Build onnxruntime - Size-optimized ONNX Runtime WASM for Socket CLI. - * - * This script builds ONNX Runtime from official source with Emscripten: - * - ONNX Runtime C++ (official Microsoft implementation) - * - Emscripten for C++ → WASM compilation - * - CMake configuration - * - Aggressive WASM optimizations - * - * Usage: - * node scripts/build.mjs # Normal build with checkpoints - * node scripts/build.mjs --force # Force rebuild (ignore checkpoints) - */ - -import { existsSync, promises as fs } from 'node:fs' -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { safeDelete, safeReadFile } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' -import { - printSetupResults, - setupBuildEnvironment, -} from '@socketsecurity/build-infra/lib/build-env' -import { - checkCompiler, - checkDiskSpace, - formatDuration, - getFileSize, -} from '@socketsecurity/build-infra/lib/build-helpers' -import { - printError, - printHeader, - printStep, - printSuccess, - printWarning, -} from '@socketsecurity/build-infra/lib/build-output' -import { - cleanCheckpoint, - createCheckpoint, - shouldRun, -} from '@socketsecurity/build-infra/lib/checkpoint-manager' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = path.dirname(__filename) - -// Parse arguments. -const args = process.argv.slice(2) -const FORCE_BUILD = args.includes('--force') -const CLEAN_BUILD = args.includes('--clean') - -// Configuration. -const ROOT_DIR = path.join(__dirname, '..') -const BUILD_DIR = path.join(ROOT_DIR, 'build') -const OUTPUT_DIR = path.join(BUILD_DIR, 'wasm') -// Read ONNX Runtime version from package.json (matches ONNX Runtime release version). -const packageJson = JSON.parse(await fs.readFile(path.join(ROOT_DIR, 'package.json'), 'utf-8')) -const ONNX_VERSION = `v${packageJson.version}` -const ONNX_REPO = 'https://github.com/microsoft/onnxruntime.git' -const ONNX_SOURCE_DIR = path.join(BUILD_DIR, 'onnxruntime-source') - -/** - * Clone ONNX Runtime source if not already present. - */ -async function cloneOnnxSource() { - if (!(await shouldRun('onnxruntime', 'cloned', FORCE_BUILD))) { - return - } - - printHeader('Cloning ONNX Runtime Source') - - // Check if source exists and if it has the patches. - if (existsSync(ONNX_SOURCE_DIR)) { - printStep('ONNX Runtime source already exists') - - // Define patches to verify. - const patches = [ - { - name: 'Eigen hash', - path: path.join(ONNX_SOURCE_DIR, 'cmake', 'deps.txt'), - marker: '51982be81bbe52572b54180454df11a3ece9a934', - }, - { - name: 'MLFloat16 build', - path: path.join(ONNX_SOURCE_DIR, 'cmake', 'onnxruntime_webassembly.cmake'), - marker: '# add_compile_definitions(\n # BUILD_MLAS_NO_ONNXRUNTIME', - }, - { - name: 'wasm_post_build.js', - path: path.join(ONNX_SOURCE_DIR, 'js', 'web', 'script', 'wasm_post_build.js'), - marker: 'if (matches.length === 0) {', - }, - ] - - // Check if all patches have been applied. - const results = await Promise.allSettled( - patches.map(async ({ path: filePath, marker }) => { - const content = await safeReadFile(filePath, 'utf-8') - return content?.includes(marker) ?? false - }) - ) - const allPatchesApplied = results.every( - r => r.status === 'fulfilled' && r.value === true - ) - - if (!allPatchesApplied) { - // Source exists but patches not applied - need to re-clone. - printWarning('Source exists but patches not applied') - printStep('Removing old source to re-clone with patches...') - await fs.rm(ONNX_SOURCE_DIR, { recursive: true, force: true }) - printSuccess('Old source removed') - } else { - printStep('All patches already applied, skipping clone') - await createCheckpoint('onnxruntime', 'cloned') - return - } - } - - await fs.mkdir(BUILD_DIR, { recursive: true }) - - printStep(`Cloning ONNX Runtime ${ONNX_VERSION}...`) - await spawn('git', ['clone', '--depth', '1', '--branch', ONNX_VERSION, ONNX_REPO, ONNX_SOURCE_DIR], { - shell: WIN32, - stdio: 'inherit', - }) - printSuccess(`ONNX Runtime ${ONNX_VERSION} cloned`) - - // Patch 1: Update Eigen hash (see docs/patches.md). - printStep('Patching deps.txt to accept current Eigen hash...') - const depsPath = path.join(ONNX_SOURCE_DIR, 'cmake', 'deps.txt') - const depsContent = await fs.readFile(depsPath, 'utf-8') - const updatedDeps = depsContent.replace( - /eigen;([^;]+);5ea4d05e62d7f954a46b3213f9b2535bdd866803/g, - 'eigen;$1;51982be81bbe52572b54180454df11a3ece9a934' - ) - await fs.writeFile(depsPath, updatedDeps, 'utf-8') - printSuccess('Eigen hash updated in deps.txt') - - // Patch 2: Fix MLFloat16 build (see docs/patches.md). - printStep('Patching onnxruntime_webassembly.cmake to fix MLFloat16 build...') - const cmakePath = path.join(ONNX_SOURCE_DIR, 'cmake', 'onnxruntime_webassembly.cmake') - let cmakeContent = await fs.readFile(cmakePath, 'utf-8') - cmakeContent = cmakeContent.replace( - /add_compile_definitions\(\s*BUILD_MLAS_NO_ONNXRUNTIME\s*\)/, - '# add_compile_definitions(\n # BUILD_MLAS_NO_ONNXRUNTIME\n # )' - ) - await fs.writeFile(cmakePath, cmakeContent, 'utf-8') - printSuccess('BUILD_MLAS_NO_ONNXRUNTIME commented out') - - // Patch 3: Modern Emscripten compatibility (see docs/patches.md). - // - // PROBLEM: ONNX Runtime's wasm_post_build.js expects specific Worker URL pattern - // from older Emscripten versions. Modern Emscripten (3.1.50+) doesn't generate - // this pattern, causing build to fail with "Unexpected number of matches" error. - // - // SOLUTION: Patch the script to handle modern Emscripten gracefully: - // 1. Allow zero matches (modern Emscripten generates correct code already) - // 2. Improve error message to show actual match count - // - // CACHE HANDLING: CMake copies wasm_post_build.js from source to build directory - // during configuration. GitHub Actions may restore cached builds with old unpatched - // copies, so we must: - // 1. Patch source file (single source of truth) - // 2. Delete cached build copy if present (forces CMake recopy from patched source) - // 3. Clear CMake cache (ensures full reconfiguration) - printStep('Patching wasm_post_build.js to handle modern Emscripten...') - const postBuildSourcePath = path.join(ONNX_SOURCE_DIR, 'js', 'web', 'script', 'wasm_post_build.js') - if (existsSync(postBuildSourcePath)) { - let postBuildContent = await fs.readFile(postBuildSourcePath, 'utf-8') - - // Patch 1: Allow zero matches (modern Emscripten case). - // Insert early return when no Worker URL pattern found. - postBuildContent = postBuildContent.replace( - /if \(matches\.length !== 1\) \{/, - `if (matches.length === 0) {\n console.log('No Worker URL pattern found - skipping post-build transformation (modern Emscripten)');\n return;\n }\n if (matches.length !== 1) {` - ) - - // Patch 2: Improve error message to show actual match count. - // Helps debug if we get unexpected pattern variations. - postBuildContent = postBuildContent.replace( - /Unexpected number of matches for "" in "": \./, - `Unexpected number of Worker URL matches: found \${matches.length}, expected 1. Pattern: \${regex}` - ) - - await fs.writeFile(postBuildSourcePath, postBuildContent, 'utf-8') - printSuccess('wasm_post_build.js (source) patched') - } - - await createCheckpoint('onnxruntime', 'cloned') -} - -/** - * Build ONNX Runtime with Emscripten using official build script. - */ -async function build() { - if (!(await shouldRun('onnxruntime', 'built', FORCE_BUILD))) { - return - } - - printHeader('Building ONNX Runtime with Emscripten') - - const startTime = Date.now() - - // Clean stale cached files before build. - // GitHub Actions may have restored old unpatched files from cache after clone step. - // Delete them now to force CMake to recopy patched versions from source. - printStep('Checking for stale cached build files...') - const platform = process.platform === 'darwin' ? 'Darwin' : 'Linux' - const buildCacheDir = path.join(ONNX_SOURCE_DIR, 'build', platform, 'Release') - - // Delete cached wasm_post_build.js (CMake will recopy from patched source). - const postBuildBuildPath = path.join(buildCacheDir, 'wasm_post_build.js') - if (existsSync(postBuildBuildPath)) { - await safeDelete(postBuildBuildPath) - printSuccess('Removed stale wasm_post_build.js from cache') - } - - // Clear CMake cache to force full reconfiguration. - const cmakeCachePath = path.join(buildCacheDir, 'CMakeCache.txt') - if (existsSync(cmakeCachePath)) { - await safeDelete(cmakeCachePath) - printSuccess('Cleared CMake cache') - } - - // ONNX Runtime has its own build script: ./build.sh --config Release --build_wasm - // We need to pass WASM_ASYNC_COMPILATION=0 via EMCC_CFLAGS environment variable. - - printStep('Running ONNX Runtime build script...') - printStep('This may take 30-60 minutes on first build...') - - const buildScript = path.join(ONNX_SOURCE_DIR, 'build.sh') - - // Note: WASM_ASYNC_COMPILATION=0 is required for bundling but causes compilation - // errors when passed via EMCC_CFLAGS (it's a linker flag, not compiler flag). - // ONNX Runtime's build system handles Emscripten settings through CMake. - // We pass it through --emscripten_settings which goes to EMSCRIPTEN_SETTINGS. - - // Enable WASM threading to avoid MLFloat16 build errors. - // Issue: https://github.com/microsoft/onnxruntime/issues/23769 - // When threading is disabled, BUILD_MLAS_NO_ONNXRUNTIME is defined, which causes - // MLFloat16 to be missing Negate(), IsNegative(), and FromBits() methods. - // Workaround (if threading can't be used): Comment out BUILD_MLAS_NO_ONNXRUNTIME - // in cmake/onnxruntime_webassembly.cmake after cloning. - await spawn(buildScript, [ - '--config', 'Release', - '--build_wasm', - '--skip_tests', - '--parallel', - '--enable_wasm_threads', // Required for ONNX Runtime v1.19.0+ (non-threaded builds deprecated). - '--enable_wasm_simd', // Enable SIMD for better performance. - ], { - cwd: ONNX_SOURCE_DIR, - shell: WIN32, - stdio: 'inherit', - }) - - const duration = formatDuration(Date.now() - startTime) - printSuccess(`Build completed in ${duration}`) - await createCheckpoint('onnxruntime', 'built') -} - -/** - * Export WASM to output directory. - */ -async function exportWasm() { - printHeader('Exporting WASM') - - await fs.mkdir(OUTPUT_DIR, { recursive: true }) - - // ONNX Runtime build outputs to: build/Linux/Release/ - // or build/Darwin/Release/ on macOS - const platform = process.platform === 'darwin' ? 'Darwin' : 'Linux' - const buildOutputDir = path.join(ONNX_SOURCE_DIR, 'build', platform, 'Release') - - // Look for threaded WASM files (threading + SIMD enabled). - // With threading enabled, outputs are: ort-wasm-simd-threaded.{wasm,mjs}. - const wasmFile = path.join(buildOutputDir, 'ort-wasm-simd-threaded.wasm') - const jsFile = path.join(buildOutputDir, 'ort-wasm-simd-threaded.mjs') - - if (!existsSync(wasmFile)) { - printError('WASM file not found - build failed') - printError(`Expected: ${wasmFile}`) - throw new Error(`Required WASM file not found: ${wasmFile}`) - } - - const outputWasm = path.join(OUTPUT_DIR, 'ort-wasm-simd-threaded.wasm') - const outputJs = path.join(OUTPUT_DIR, 'ort-wasm-simd-threaded.mjs') - - // Copy WASM file. - await fs.copyFile(wasmFile, outputWasm) - - // Copy JS glue code (ES6 module format with threading). - if (existsSync(jsFile)) { - await fs.copyFile(jsFile, outputJs) - printStep(`JS: ${outputJs}`) - } - - const wasmSize = await getFileSize(outputWasm) - printStep(`WASM: ${outputWasm}`) - printStep(`WASM size: ${wasmSize}`) - - printSuccess('WASM exported') -} - -/** - * Main build function. - */ -async function main() { - const totalStart = Date.now() - - printHeader('🔨 Building onnxruntime') - const logger = getDefaultLogger() - logger.info(`ONNX Runtime ${ONNX_VERSION} build for Socket CLI`) - logger.info('') - - // Clean checkpoints if requested or if output is missing. - const outputWasm = path.join(OUTPUT_DIR, 'ort-wasm-simd-threaded.wasm') - const outputJs = path.join(OUTPUT_DIR, 'ort-wasm-simd-threaded.mjs') - const outputMissing = !existsSync(outputWasm) || !existsSync(outputJs) - - if (CLEAN_BUILD || outputMissing) { - if (outputMissing) { - printStep('Output artifacts missing - cleaning stale checkpoints') - } - await cleanCheckpoint('onnxruntime') - } - - // Pre-flight checks. - printHeader('Pre-flight Checks') - - const diskOk = await checkDiskSpace(BUILD_DIR, 5) // ONNX needs more space. - if (!diskOk) { - printWarning('Could not check disk space') - } - - // Setup build environment (check for Emscripten SDK). - const envSetup = await setupBuildEnvironment({ - emscripten: true, - autoSetup: false, - }) - - printSetupResults(envSetup) - - if (!envSetup.success) { - // Fallback: Check if emcc is in PATH. - printStep('Checking for emcc in PATH...') - const emccCheck = await checkCompiler('emcc') - - if (emccCheck) { - printSuccess('Emscripten (emcc) found in PATH') - } else { - printError('') - printError('Build environment setup failed') - printError('Install Emscripten SDK:') - printError(' https://emscripten.org/docs/getting_started/downloads.html') - printError('') - throw new Error('Emscripten SDK required') - } - } - - printSuccess('Pre-flight checks passed') - - // Build phases. - await cloneOnnxSource() - await build() - await exportWasm() - - // Report completion. - const totalDuration = formatDuration(Date.now() - totalStart) - - printHeader('🎉 Build Complete!') - logger.success(`Total time: ${totalDuration}`) - logger.success(`Output: ${OUTPUT_DIR}`) - logger.info('') - logger.info('Next steps:') - logger.info(' 1. Test WASM with Socket CLI') - logger.info(' 2. Run extract-onnx-runtime.mjs to embed WASM') - logger.info('') -} - -// Run build. -main().catch((e) => { - printError('Build Failed') - logger.error(e.message) - throw e -}) diff --git a/packages/onnxruntime/scripts/clean.mjs b/packages/onnxruntime/scripts/clean.mjs deleted file mode 100644 index f612495ef..000000000 --- a/packages/onnxruntime/scripts/clean.mjs +++ /dev/null @@ -1,30 +0,0 @@ -/** - * Clean onnxruntime build artifacts. - */ - -import path from 'node:path' -import { fileURLToPath } from 'node:url' - -import { safeDelete } from '@socketsecurity/lib/fs' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { cleanCheckpoint } from '@socketsecurity/build-infra/lib/checkpoint-manager' - -const __dirname = path.dirname(fileURLToPath(import.meta.url)) -const ROOT_DIR = path.join(__dirname, '..') -const BUILD_DIR = path.join(ROOT_DIR, 'build') - -async function clean() { - const logger = getDefaultLogger() - logger.info('Cleaning onnxruntime build artifacts...') - - await safeDelete(BUILD_DIR) - logger.success('Build directory cleaned') - - await cleanCheckpoint('onnxruntime') - logger.success('Checkpoints cleaned') -} - -clean().catch(e => { - logger.error(e.message) - process.exit(1) -}) diff --git a/packages/sbom-generator/README.md b/packages/sbom-generator/README.md deleted file mode 100644 index 9675f981e..000000000 --- a/packages/sbom-generator/README.md +++ /dev/null @@ -1,369 +0,0 @@ -# sbom-generator - -Type-safe CycloneDX SBOM generator for multi-ecosystem projects with Socket.dev integration and CodeT5 optimization. - -## Why We Built This (Not Using cdxgen) - -**cdxgen limitations**: -- ❌ Untyped JavaScript (runtime errors, hard to maintain) -- ❌ Requires 10+ external tools (maven, gradle, pip, cargo, etc.) -- ❌ Fragile when tools not installed or wrong versions -- ❌ 50+ parsers (most we don't need) - -**Our TypeScript solution**: -- ✅ Fully typed (catch errors at compile time) -- ✅ Parse directly (no external tools for most ecosystems) -- ✅ Focused (6-10 ecosystems Socket cares about) -- ✅ Socket.dev + CodeT5 native integration -- ✅ Maintainable (clear contracts, comprehensive tests) - -## Supported Ecosystems - -### Tier 1 (Implemented) -- **JavaScript/TypeScript** - npm, Yarn, pnpm -- **Python** - pip, Poetry, Pipenv -- **Go** - Go modules -- **Rust** - Cargo -- **Ruby** - Bundler -- **PHP** - Composer - -### Tier 2 (Planned) -- **Java** - Maven, Gradle -- **C#/.NET** - NuGet -- **Swift** - SwiftPM - -## Key Features - -### 1. No External Tools Required - -Parse lockfiles directly without shelling out: - -```typescript -// ✅ Pure TypeScript parsing (no external tools) -const sbom = await generateSbom('./my-project') - -// ❌ cdxgen requires: npm, pip, cargo, go, bundle, composer, etc. -``` - -**How we do it**: -- **npm**: Parse package-lock.json (JSON), yarn.lock (`@yarnpkg/parsers`), pnpm-lock.yaml (YAML) -- **Python**: Parse poetry.lock (TOML), Pipfile.lock (JSON), requirements.txt (text) -- **Go**: Parse go.mod, go.sum (simple text format) -- **Rust**: Parse Cargo.lock (TOML) -- **Ruby**: Parse Gemfile.lock (custom text format) -- **PHP**: Parse composer.lock (JSON) - -Only **Gradle** requires external execution (Groovy/Kotlin DSL is code, not data). - -### 2. Type-Safe Throughout - -```typescript -// Every step is type-checked -interface Parser { - readonly ecosystem: Ecosystem - detect(projectPath: string): Promise - parse(projectPath: string, options?: ParseOptions): Promise -} - -// CycloneDX SBOM types match spec exactly -interface Sbom { - bomFormat: 'CycloneDX' - specVersion: '1.5' - components: Component[] - dependencies: Dependency[] - // ... fully typed -} -``` - -### 3. Multi-Ecosystem Auto-Detection - -Automatically detects all ecosystems in a project: - -```typescript -// Polyglot monorepo with Node.js, Python, Rust -const sbom = await generateSbom('./monorepo') - -// Auto-detects and parses: -// - package.json + lockfiles (Node.js) -// - pyproject.toml + poetry.lock (Python) -// - Cargo.toml + Cargo.lock (Rust) -``` - -### 4. Socket.dev Integration - -Enrich SBOM with real-time security data: - -```typescript -import { enrichSbomWithSocket } from '@socketsecurity/sbom-generator/enrichment' - -const sbom = await generateSbom('./project') -const enriched = await enrichSbomWithSocket(sbom, { - apiToken: process.env.SOCKET_API_TOKEN -}) - -// Each component now has Socket security data -console.log(enriched.components[0].socket) -// { -// score: 45, -// issues: [{ cve: 'CVE-2021-3749', severity: 'high', ... }], -// supplyChainRisk: 'medium' -// } -``` - -### 5. CodeT5 Optimized - -Format SBOM for maximum ML model performance: - -```typescript -import { formatSbomForCodeT5 } from '@socketsecurity/sbom-generator/formatters' - -const enriched = await enrichSbomWithSocket(sbom, { apiToken }) -const prompt = formatSbomForCodeT5(enriched, { - task: 'security-analysis', - includeGraph: true -}) - -// Optimized prompt (~300 tokens vs 50,000 for raw lockfiles) -// CodeT5 can now see 100% of critical information -``` - -**Optimization benefits**: -- **600x token reduction** (50,000 → 80 tokens) -- **Structured format** (consistent patterns for ML) -- **Context prioritization** (critical issues first) -- **Clear task definition** (guides model output) - -## Usage - -### Basic SBOM Generation - -```typescript -import { generateSbom } from '@socketsecurity/sbom-generator' - -// Generate SBOM for any project (auto-detects ecosystems) -const sbom = await generateSbom('./my-project', { - includeDevDependencies: true, - deep: true // Include transitive dependencies -}) - -console.log(sbom.metadata.component) -// { name: 'my-app', version: '1.0.0', type: 'application' } - -console.log(sbom.components.length) -// 47 components across npm, pypi, cargo -``` - -### Specific Ecosystem - -```typescript -// Limit to specific ecosystems -const sbom = await generateSbom('./project', { - ecosystems: ['npm', 'pypi'] // Only parse these -}) -``` - -### With Security Enrichment - -```typescript -import { generateSbom } from '@socketsecurity/sbom-generator' -import { enrichSbomWithSocket } from '@socketsecurity/sbom-generator/enrichment' - -const sbom = await generateSbom('./project') -const enriched = await enrichSbomWithSocket(sbom, { - apiToken: process.env.SOCKET_API_TOKEN -}) - -// Find critical issues -const critical = enriched.components.filter(c => - c.socket?.issues?.some(i => i.severity === 'critical' || i.severity === 'high') -) - -console.log(`Found ${critical.length} components with critical issues`) -``` - -### CodeT5 Analysis - -```typescript -import { generateSbom } from '@socketsecurity/sbom-generator' -import { enrichSbomWithSocket } from '@socketsecurity/sbom-generator/enrichment' -import { formatSbomForCodeT5 } from '@socketsecurity/sbom-generator/formatters' - -// Full pipeline: Generate → Enrich → Format → Analyze -const sbom = await generateSbom('./project') -const enriched = await enrichSbomWithSocket(sbom, { apiToken }) -const prompt = formatSbomForCodeT5(enriched) - -const analysis = await codeT5.generate(prompt) - -console.log(analysis) -// "CRITICAL: axios@0.21.0 has CVE-2021-3749 (CVSS 7.5). -// This SSRF vulnerability allows attackers to bypass protections. -// Fix: Update to axios@1.6.0..." -``` - -## Output Format - -Generates **CycloneDX v1.5** SBOM (industry standard): - -```json -{ - "bomFormat": "CycloneDX", - "specVersion": "1.5", - "serialNumber": "urn:uuid:3e671687-395b-41f5-a30f-a58921a69b79", - "version": 1, - "metadata": { - "timestamp": "2024-01-15T10:30:00Z", - "tools": [{ - "vendor": "Socket.dev", - "name": "@socketsecurity/sbom-generator" - }], - "component": { - "type": "application", - "name": "my-app", - "version": "1.0.0" - } - }, - "components": [{ - "type": "library", - "bom-ref": "pkg:npm/axios@0.21.0", - "name": "axios", - "version": "0.21.0", - "purl": "pkg:npm/axios@0.21.0", - "licenses": [{"license": {"id": "MIT"}}] - }], - "dependencies": [{ - "ref": "pkg:npm/my-app@1.0.0", - "dependsOn": ["pkg:npm/axios@0.21.0"] - }] -} -``` - -**Compatible with**: Grype, Syft, Trivy, Dependency-Track, and all CycloneDX tools. - -## Architecture - -### Modular Parser System - -Each ecosystem has its own parser: - -```typescript -// Base interface -interface Parser { - ecosystem: Ecosystem - detect(projectPath: string): Promise - parse(projectPath: string): Promise -} - -// Ecosystem-specific implementations -class NpmParser implements Parser { /* ... */ } -class PythonParser implements Parser { /* ... */ } -class GoParser implements Parser { /* ... */ } -class RustParser implements Parser { /* ... */ } -``` - -### Main Generator - -```typescript -export async function generateSbom( - projectPath: string, - options?: GenerateOptions -): Promise { - // 1. Auto-detect applicable parsers - const parsers = await detectParsers(projectPath, options?.ecosystems) - - // 2. Parse each ecosystem - const results = await Promise.all( - parsers.map(p => p.parse(projectPath, options)) - ) - - // 3. Combine into single SBOM - return combineSbom(results) -} -``` - -## Comparison - -| Feature | cdxgen | Our TypeScript Generator | -|---------|--------|-------------------------| -| **Type Safety** | ❌ None (plain JS) | ✅ Full TypeScript | -| **External Tools** | ❌ Requires 10+ tools | ✅ Parse directly | -| **Ecosystems** | 50+ (bloat) | 6-10 (focused) | -| **Maintenance** | ⚠️ Hard (no types) | ✅ Easy (typed) | -| **Reliability** | ⚠️ Fragile | ✅ Robust | -| **Performance** | ⚠️ Spawns processes | ✅ Pure JS parsing | -| **Socket Integration** | ❌ None | ✅ Native | -| **CodeT5 Optimized** | ❌ No | ✅ Yes | -| **Output** | CycloneDX | CycloneDX (same) | - -## Development Status - -- ✅ **npm** - Fully implemented (package-lock.json, yarn.lock, pnpm-lock.yaml) -- ⏳ **Python** - In progress (poetry.lock, Pipfile.lock, requirements.txt) -- ⏳ **Go** - Planned (go.mod, go.sum) -- ⏳ **Rust** - Planned (Cargo.toml, Cargo.lock) -- ⏳ **Ruby** - Planned (Gemfile.lock) -- ⏳ **PHP** - Planned (composer.lock) - -## API Reference - -### `generateSbom(projectPath, options?)` - -Generate CycloneDX SBOM for a project. - -**Parameters:** -- `projectPath` (string) - Path to project directory -- `options` (GenerateOptions) - - `ecosystems` (Ecosystem[]) - Limit to specific ecosystems - - `includeDevDependencies` (boolean) - Include dev dependencies - - `deep` (boolean) - Include transitive dependencies - -**Returns:** `Promise` - CycloneDX SBOM object - -### `enrichSbomWithSocket(sbom, options)` - -Enrich SBOM with Socket.dev security data. - -**Parameters:** -- `sbom` (Sbom) - CycloneDX SBOM -- `options` (EnrichOptions) - - `apiToken` (string) - Socket.dev API token - -**Returns:** `Promise` - SBOM with Socket security data - -### `formatSbomForCodeT5(sbom, options?)` - -Format SBOM into optimized prompt for CodeT5. - -**Parameters:** -- `sbom` (EnrichedSbom) - SBOM with security data -- `options` (FormatOptions) - - `task` (string) - Analysis task type - - `includeGraph` (boolean) - Include dependency graph - - `maxComponents` (number) - Limit components shown - -**Returns:** `string` - Optimized prompt for CodeT5 - -## Testing - -```bash -# Run tests -pnpm test - -# Watch mode -pnpm test:watch - -# Type checking -pnpm type -``` - -## Contributing - -Since this is Socket.dev internal tooling, contributions should focus on: -- Adding new ecosystem parsers -- Improving type definitions -- Optimizing CodeT5 prompts -- Enhancing Socket.dev enrichment - -## License - -Private - Socket.dev internal use only diff --git a/packages/sbom-generator/docs/README.md b/packages/sbom-generator/docs/README.md deleted file mode 100644 index f777831d8..000000000 --- a/packages/sbom-generator/docs/README.md +++ /dev/null @@ -1,85 +0,0 @@ -# sbom-generator Documentation - -Package-level documentation for Socket SBOM Generator - a type-safe CycloneDX SBOM generator for multi-ecosystem projects with Socket.dev integration and CodeT5 optimization. - -## Overview - -This package provides a pure TypeScript implementation for generating Software Bill of Materials (SBOM) documents in CycloneDX format. Unlike cdxgen which requires external tools, our implementation parses lockfiles directly and provides native Socket.dev security enrichment and CodeT5 ML model optimization. - -## Contents - -- **[architecture.md](./architecture.md)** - System architecture and modular parser design -- **[ecosystems.md](./ecosystems.md)** - Supported ecosystems and lockfile parsing strategies -- **[fidelity-analysis.md](./fidelity-analysis.md)** - SBOM accuracy and completeness analysis -- **[implementation.md](./implementation.md)** - Implementation details and code organization -- **[lock-step-compliance.md](./lock-step-compliance.md)** - CycloneDX spec compliance and validation - -## Quick Links - -- **Main README**: [`../README.md`](../README.md) -- **Source Code**: [`../src/`](../src/) -- **Tests**: [`../test/`](../test/) -- **Examples**: [`../examples/`](../examples/) - -## Key Features - -- **No External Tools** - Parse lockfiles directly without npm, pip, cargo, etc. -- **Type-Safe** - Full TypeScript implementation with comprehensive type checking -- **Multi-Ecosystem** - Auto-detect and parse npm, Python, Go, Rust, Ruby, PHP -- **Socket Integration** - Native Socket.dev security enrichment -- **CodeT5 Optimized** - Format SBOMs for maximum ML model performance (600x token reduction) - -## Architecture - -### Parser System - -Each ecosystem has a dedicated parser implementing the `Parser` interface: - -```typescript -interface Parser { - ecosystem: Ecosystem - detect(projectPath: string): Promise - parse(projectPath: string): Promise -} -``` - -Implementations: -- `NpmParser` - package-lock.json, yarn.lock, pnpm-lock.yaml -- `PythonParser` - poetry.lock, Pipfile.lock, requirements.txt -- `GoParser` - go.mod, go.sum -- `RustParser` - Cargo.lock -- `RubyParser` - Gemfile.lock -- `PhpParser` - composer.lock - -### Main Generator - -The `generateSbom()` function orchestrates the entire process: -1. Auto-detect applicable parsers -2. Parse each ecosystem in parallel -3. Combine results into single CycloneDX SBOM - -## Output Format - -Generates **CycloneDX v1.5** SBOM compatible with: -- Grype -- Syft -- Trivy -- Dependency-Track -- All CycloneDX-compliant tools - -## Development Status - -- ✅ **npm** - Fully implemented -- ⏳ **Python** - In progress -- ⏳ **Go** - Planned -- ⏳ **Rust** - Planned -- ⏳ **Ruby** - Planned -- ⏳ **PHP** - Planned - -## License - -Private - Socket.dev internal use only - ---- - -**Last Updated**: 2025-10-27 diff --git a/packages/sbom-generator/examples/basic-sbom.mts b/packages/sbom-generator/examples/basic-sbom.mts deleted file mode 100644 index c1b7df12e..000000000 --- a/packages/sbom-generator/examples/basic-sbom.mts +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Basic SBOM Generation Example - * - * Simple example showing how to generate a CycloneDX SBOM from a project. - * No Socket enrichment required - works immediately. - */ - -import { generateSbom } from '../src/index.mts' - -/** - * Run basic SBOM generation example. - */ -async function main() { - const projectPath = process.argv[2] || process.cwd() - - console.log('Generating SBOM for project:', projectPath) - console.log() - - try { - // Generate SBOM (auto-detects ecosystems). - const sbom = await generateSbom(projectPath, { - includeDevDependencies: false, - deep: true, - }) - - // Display summary. - console.log('✓ SBOM Generated Successfully') - console.log() - console.log('Project:', sbom.metadata?.component?.name) - console.log('Version:', sbom.metadata?.component?.version) - console.log('Serial Number:', sbom.serialNumber) - console.log('Timestamp:', sbom.metadata?.timestamp) - console.log() - console.log('Components:', sbom.components?.length) - console.log('Dependencies:', sbom.dependencies?.length) - console.log() - - // Display first 5 components. - console.log('Sample Components:') - for (const component of sbom.components?.slice(0, 5) || []) { - console.log(` - ${component.name}@${component.version}`) - if (component.licenses?.[0]?.license?.id) { - console.log(` License: ${component.licenses[0].license.id}`) - } - } - console.log() - - // Output full SBOM as JSON. - console.log('Full SBOM (JSON):') - console.log(JSON.stringify(sbom, null, 2)) - } catch (e) { - console.error('Error:', e instanceof Error ? e.message : String(e)) - process.exit(1) - } -} - -main() diff --git a/packages/sbom-generator/examples/full-pipeline.mts b/packages/sbom-generator/examples/full-pipeline.mts deleted file mode 100644 index f755f5abe..000000000 --- a/packages/sbom-generator/examples/full-pipeline.mts +++ /dev/null @@ -1,100 +0,0 @@ -/** - * Full Pipeline Example - * - * Demonstrates complete SBOM generation → enrichment → CodeT5 optimization. - * Shows dramatic token reduction while preserving critical information. - */ - -import { generateSbom } from '../src/index.mts' -import { enrichSbomWithSocket } from '../src/enrichment/index.mts' -import { - estimateTokenCount, - formatSbomForCodeT5, -} from '../src/formatters/index.mts' - -/** - * Run full pipeline example. - */ -async function main() { - const projectPath = process.cwd() - const apiToken = process.env.SOCKET_API_TOKEN - - if (!apiToken) { - console.error('Error: SOCKET_API_TOKEN environment variable required') - console.error('Get your token at: https://socket.dev/dashboard/settings') - process.exit(1) - } - - console.log('='.repeat(80)) - console.log('SBOM GENERATOR - FULL PIPELINE EXAMPLE') - console.log('='.repeat(80)) - console.log() - - // Step 1: Generate SBOM. - console.log('[1/4] Generating SBOM from project...') - const sbom = await generateSbom(projectPath, { - includeDevDependencies: false, - }) - - console.log(`✓ Generated SBOM`) - console.log(` - Project: ${sbom.metadata?.component?.name}`) - console.log(` - Components: ${sbom.components?.length}`) - console.log(` - Dependencies: ${sbom.dependencies?.length}`) - console.log() - - // Calculate raw SBOM token count. - const rawSbomJson = JSON.stringify(sbom, null, 2) - const rawTokens = estimateTokenCount(rawSbomJson) - console.log(` Raw SBOM size: ${rawSbomJson.length} chars (~${rawTokens} tokens)`) - console.log() - - // Step 2: Enrich with Socket.dev. - console.log('[2/4] Enriching with Socket.dev security data...') - const enriched = await enrichSbomWithSocket(sbom, { apiToken }) - - const issueCount = enriched.components?.reduce( - (total, c) => total + (c.socket?.issues.length || 0), - 0 - ) - - console.log(`✓ Enriched with Socket data`) - console.log(` - Security issues found: ${issueCount}`) - console.log() - - // Step 3: Format for CodeT5. - console.log('[3/4] Formatting for CodeT5 (token optimization)...') - const codeT5Prompt = formatSbomForCodeT5(enriched, { - task: 'security-analysis', - includeGraph: true, - maxComponents: 50, - minSeverity: 'low', - }) - - const optimizedTokens = estimateTokenCount(codeT5Prompt) - const reduction = Math.round((rawTokens / optimizedTokens) * 10) / 10 - - console.log(`✓ Formatted for CodeT5`) - console.log(` - Optimized size: ${codeT5Prompt.length} chars (~${optimizedTokens} tokens)`) - console.log(` - Token reduction: ${reduction}x`) - console.log() - - // Step 4: Display optimized prompt. - console.log('[4/4] CodeT5-Optimized Prompt:') - console.log('='.repeat(80)) - console.log(codeT5Prompt) - console.log('='.repeat(80)) - console.log() - - // Summary. - console.log('SUMMARY:') - console.log(` Raw SBOM: ~${rawTokens} tokens`) - console.log(` Optimized: ~${optimizedTokens} tokens`) - console.log(` Reduction: ${reduction}x smaller`) - console.log() - console.log('✓ CodeT5 can now see 100% of critical information within context window') -} - -main().catch((e: Error) => { - console.error('Error:', e.message) - process.exit(1) -}) diff --git a/packages/sbom-generator/examples/socket-facts-integration.mts b/packages/sbom-generator/examples/socket-facts-integration.mts deleted file mode 100644 index 9a57087b4..000000000 --- a/packages/sbom-generator/examples/socket-facts-integration.mts +++ /dev/null @@ -1,218 +0,0 @@ -/** - * Socket Facts Integration Example - * - * Demonstrates how to use Socket Facts (reachability analysis) to enhance - * CodeT5 prompts with vulnerability reachability context. - */ - -import { readFile } from 'node:fs/promises' -import path from 'node:path' -import type { SocketFacts } from '../src/types/socket-facts.mts' -import { - estimateSocketFactsTokenCount, - formatSocketFactsForCodeT5, -} from '../src/formatters/socket-facts.mts' - -/** - * Example 1: Basic Socket Facts formatting. - */ -async function basicExample(): Promise { - console.log('=== Example 1: Basic Socket Facts Formatting ===\n') - - // Load Socket Facts from file. - const socketFactsPath = path.join( - __dirname, - '../test/fixtures/socket-facts-sample.json' - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - // Format for CodeT5. - const prompt = formatSocketFactsForCodeT5(socketFacts) - - console.log(prompt) - console.log( - `\nToken count: ${estimateSocketFactsTokenCount(prompt)} tokens\n` - ) -} - -/** - * Example 2: Focus on reachable vulnerabilities only. - */ -async function reachableOnlyExample(): Promise { - console.log('\n=== Example 2: Reachable Vulnerabilities Only ===\n') - - const socketFactsPath = path.join( - __dirname, - '../test/fixtures/socket-facts-sample.json' - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - // Format with reachable-only filter. - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: false, - includeCallStacks: true, - maxCallStackDepth: 5, - }) - - console.log(prompt) - console.log( - `\nToken count: ${estimateSocketFactsTokenCount(prompt)} tokens` - ) - console.log('(Filtering unreachable vulnerabilities reduces noise)\n') -} - -/** - * Example 3: High-confidence reachability only. - */ -async function highConfidenceExample(): Promise { - console.log('\n=== Example 3: High-Confidence Reachability (>0.9) ===\n') - - const socketFactsPath = path.join( - __dirname, - '../test/fixtures/socket-facts-sample.json' - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - // Format with high confidence threshold. - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - minConfidence: 0.9, - includeUnreachable: false, - }) - - console.log(prompt) - console.log( - `\nToken count: ${estimateSocketFactsTokenCount(prompt)} tokens` - ) - console.log('(High-confidence filter focuses on certain threats)\n') -} - -/** - * Example 4: Include unreachable vulnerabilities for comparison. - */ -async function compareReachabilityExample(): Promise { - console.log('\n=== Example 4: Compare Reachable vs Unreachable ===\n') - - const socketFactsPath = path.join( - __dirname, - '../test/fixtures/socket-facts-sample.json' - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - // Format with both reachable and unreachable. - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: true, - includeCallStacks: true, - }) - - console.log(prompt) - console.log( - `\nToken count: ${estimateSocketFactsTokenCount(prompt)} tokens\n` - ) -} - -/** - * Example 5: Different analysis tasks. - */ -async function differentTasksExample(): Promise { - console.log('\n=== Example 5: Different Analysis Tasks ===\n') - - const socketFactsPath = path.join( - __dirname, - '../test/fixtures/socket-facts-sample.json' - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - // Security analysis. - console.log('--- Security Analysis ---\n') - const securityPrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - task: 'security-analysis', - includeUnreachable: false, - }) - console.log(securityPrompt.slice(0, 500) + '...\n') - - // Vulnerability detection. - console.log('--- Vulnerability Detection ---\n') - const vulnPrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - task: 'vulnerability-detection', - includeUnreachable: false, - }) - console.log(vulnPrompt.slice(0, 500) + '...\n') - - // Dependency audit. - console.log('--- Dependency Audit ---\n') - const auditPrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - task: 'dependency-audit', - includeUnreachable: false, - }) - console.log(auditPrompt.slice(0, 500) + '...\n') -} - -/** - * Example 6: Token optimization comparison. - */ -async function tokenOptimizationExample(): Promise { - console.log('\n=== Example 6: Token Optimization Comparison ===\n') - - const socketFactsPath = path.join( - __dirname, - '../test/fixtures/socket-facts-sample.json' - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - // All vulnerabilities. - const allPrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: true, - includeCallStacks: true, - }) - const allTokens = estimateSocketFactsTokenCount(allPrompt) - - // Reachable only. - const reachablePrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: false, - includeCallStacks: true, - }) - const reachableTokens = estimateSocketFactsTokenCount(reachablePrompt) - - // Reachable only, no call stacks. - const minimalPrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: false, - includeCallStacks: false, - }) - const minimalTokens = estimateSocketFactsTokenCount(minimalPrompt) - - console.log('Token Count Comparison:') - console.log(`- All vulnerabilities (with call stacks): ${allTokens} tokens`) - console.log( - `- Reachable only (with call stacks): ${reachableTokens} tokens (${Math.round((1 - reachableTokens / allTokens) * 100)}% reduction)` - ) - console.log( - `- Reachable only (no call stacks): ${minimalTokens} tokens (${Math.round((1 - minimalTokens / allTokens) * 100)}% reduction)` - ) - console.log('\nRecommendation: Use reachable-only with call stacks for best signal-to-noise ratio.') -} - -/** - * Run all examples. - */ -async function main(): Promise { - try { - await basicExample() - await reachableOnlyExample() - await highConfidenceExample() - await compareReachabilityExample() - await differentTasksExample() - await tokenOptimizationExample() - } catch (error) { - console.error('Error running examples:', error) - process.exit(1) - } -} - -// Run examples. -main() diff --git a/packages/sbom-generator/package.json b/packages/sbom-generator/package.json deleted file mode 100644 index 1bb5acb93..000000000 --- a/packages/sbom-generator/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@socketsecurity/sbom-generator", - "version": "1.0.0", - "description": "Type-safe CycloneDX SBOM generator for multi-ecosystem projects", - "type": "module", - "private": true, - "exports": { - ".": "./src/index.mts", - "./types": "./src/types/index.mts", - "./parsers": "./src/parsers/index.mts", - "./enrichment": "./src/enrichment/index.mts", - "./formatters": "./src/formatters/index.mts" - }, - "scripts": { - "test": "vitest", - "test:watch": "vitest watch", - "type": "tsc --noEmit" - }, - "dependencies": { - "@iarna/toml": "catalog:", - "@socketsecurity/lib": "workspace:*", - "@yarnpkg/parsers": "catalog:", - "fast-xml-parser": "catalog:", - "yaml": "catalog:" - }, - "devDependencies": { - "vitest": "catalog:" - } -} diff --git a/packages/sbom-generator/scripts/update-from-cdxgen.mts b/packages/sbom-generator/scripts/update-from-cdxgen.mts deleted file mode 100644 index 5135c0e41..000000000 --- a/packages/sbom-generator/scripts/update-from-cdxgen.mts +++ /dev/null @@ -1,513 +0,0 @@ -#!/usr/bin/env node -/** - * Automation script to track cdxgen updates and generate migration tasks. - * - * This script: - * 1. Fetches the latest cdxgen release from GitHub - * 2. Compares with our current baseline (v11.11.0) - * 3. Downloads and analyzes changes - * 4. Generates migration report and tasks - * 5. Updates LOCK-STEP-COMPLIANCE.md - * - * Usage: - * pnpm run update-from-cdxgen - * pnpm run update-from-cdxgen --check-only (no downloads) - * pnpm run update-from-cdxgen --target-version 11.12.0 - */ - -import { promises as fs } from 'node:fs' -import { createWriteStream } from 'node:fs' -import { mkdir, mkdtemp, readdir, stat } from 'node:fs/promises' -import { tmpdir } from 'node:os' -import path from 'node:path' -import { pipeline } from 'node:stream/promises' -import { fileURLToPath } from 'node:url' -import { extract } from 'tar' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import colors from 'yoctocolors-cjs' - -const __filename = fileURLToPath(import.meta.url) -const __dirname = path.dirname(__filename) - -// Current baseline version. -const CURRENT_BASELINE = '11.11.0' - -// GitHub repository for cdxgen. -const CDXGEN_REPO = 'CycloneDX/cdxgen' - -// Paths. -const PACKAGE_ROOT = path.resolve(__dirname, '..') -const LOCK_STEP_COMPLIANCE_PATH = path.join(PACKAGE_ROOT, 'LOCK-STEP-COMPLIANCE.md') -const MIGRATION_REPORT_DIR = path.resolve(PACKAGE_ROOT, '../../.claude') - -interface CdxgenRelease { - version: string - publishedAt: string - tarballUrl: string - changelogUrl: string - htmlUrl: string -} - -interface ParserModule { - name: string - ecosystem: string - path: string - lastModified: string -} - -interface MigrationTask { - priority: 'critical' | 'high' | 'medium' | 'low' - ecosystem: string - description: string - effort: 'small' | 'medium' | 'large' - cdxgenCommit?: string -} - -/** - * Fetch the latest cdxgen release from GitHub. - */ -async function fetchLatestCdxgenRelease(): Promise { - const logger = getDefaultLogger() - logger.log('Fetching latest cdxgen release from GitHub...') - - const apiUrl = `https://api.github.com/repos/${CDXGEN_REPO}/releases/latest` - - const response = await fetch(apiUrl, { - headers: { - Accept: 'application/vnd.github+json', - 'User-Agent': 'socket-sbom-generator', - }, - }) - - if (!response.ok) { - throw new Error( - `GitHub API request failed: ${response.status} ${response.statusText}` - ) - } - - const data = await response.json() - - // Extract version (remove 'v' prefix if present). - const version = (data.tag_name as string).replace(/^v/, '') - - return { - version, - publishedAt: data.published_at as string, - tarballUrl: data.tarball_url as string, - changelogUrl: `https://github.com/${CDXGEN_REPO}/releases/tag/${data.tag_name}`, - htmlUrl: data.html_url as string, - } -} - -/** - * Compare two semantic versions. - */ -function compareVersions(v1: string, v2: string): number { - const parts1 = v1.split('.').map(Number) - const parts2 = v2.split('.').map(Number) - - for (let i = 0; i < 3; i++) { - const diff = (parts1[i] || 0) - (parts2[i] || 0) - if (diff !== 0) { - return diff - } - } - - return 0 -} - -/** - * Download and extract cdxgen release tarball. - */ -async function downloadCdxgenRelease(release: CdxgenRelease): Promise { - logger.log(`Downloading cdxgen v${release.version}...`) - - // Create temp directory. - const tempDir = await mkdtemp(path.join(tmpdir(), 'cdxgen-')) - logger.log(`Created temp directory: ${tempDir}`) - - try { - // Download tarball. - logger.log('Downloading tarball...') - const response = await fetch(release.tarballUrl) - - if (!response.ok) { - throw new Error(`Download failed: ${response.status} ${response.statusText}`) - } - - if (!response.body) { - throw new Error('Response body is null') - } - - // Extract tarball directly from stream. - logger.log('Extracting tarball...') - - // GitHub tarballs have a top-level directory (e.g., CycloneDX-cdxgen-abc1234). - // We need to extract and find that directory. - await extract({ - cwd: tempDir, - // @ts-expect-error - tar types may not perfectly match Web Streams API. - file: response.body, - }) - - // Find the extracted directory (should be only one). - const entries = await readdir(tempDir) - const extractedDir = entries.find(async entry => { - const entryPath = path.join(tempDir, entry) - const stats = await stat(entryPath) - return stats.isDirectory() - }) - - if (!extractedDir) { - throw new Error('Could not find extracted directory') - } - - const cdxgenPath = path.join(tempDir, extractedDir) - logger.log(`Extracted to: ${cdxgenPath}`) - - return cdxgenPath - } catch (e) { - // Clean up temp directory on error. - await fs.rm(tempDir, { recursive: true, force: true }) - throw e - } -} - -/** - * Analyze cdxgen parser modules. - */ -async function analyzeCdxgenParsers(cdxgenPath: string): Promise { - logger.log('Analyzing cdxgen parsers...') - - const parsersDir = path.join(cdxgenPath, 'lib/parsers') - const modules: ParserModule[] = [] - - // Mapping of cdxgen parser filenames to ecosystem names. - const ECOSYSTEM_MAP: Record = { - __proto__: null, - 'js.js': 'npm', - 'python.js': 'pypi', - 'rust.js': 'cargo', - 'go.js': 'go', - 'java.js': 'maven', - 'ruby.js': 'rubygems', - 'dotnet.js': 'nuget', - 'github.js': 'actions', - } - - try { - const files = await readdir(parsersDir) - - for (const file of files) { - if (!file.endsWith('.js')) { - continue - } - - const filePath = path.join(parsersDir, file) - const stats = await stat(filePath) - - // Map filename to ecosystem. - const ecosystem = ECOSYSTEM_MAP[file] || 'unknown' - - modules.push({ - name: file, - ecosystem, - path: filePath, - lastModified: stats.mtime.toISOString(), - }) - } - } catch (e) { - logger.warn(`Warning: Could not read parsers directory: ${e}`) - // Return empty array if directory doesn't exist. - return [] - } - - logger.log(`Found ${modules.length} parser modules`) - return modules -} - -/** - * Compare our implementation with cdxgen parsers. - */ -async function compareImplementations( - cdxgenParsers: ParserModule[] -): Promise { - logger.log('Comparing implementations...') - - const tasks: MigrationTask[] = [] - - // Our implementation status. - const OUR_PARSERS: Record = { - __proto__: null, - npm: { implemented: true, score: 95 }, - pypi: { implemented: false }, - cargo: { implemented: false }, - go: { implemented: false }, - maven: { implemented: false }, - rubygems: { implemented: false }, - nuget: { implemented: false }, - actions: { implemented: false }, - huggingface: { implemented: false }, - chrome: { implemented: false }, - openvsx: { implemented: false }, - } - - // Check for parsers in cdxgen that we haven't implemented. - for (const parser of cdxgenParsers) { - const { ecosystem } = parser - - if (ecosystem === 'unknown') { - continue - } - - const ourStatus = OUR_PARSERS[ecosystem] - - if (!ourStatus) { - // cdxgen has a parser we don't track. - tasks.push({ - priority: 'low', - ecosystem, - description: `cdxgen has ${parser.name} parser (not in our roadmap)`, - effort: 'large', - }) - continue - } - - if (!ourStatus.implemented) { - // We planned this parser but haven't implemented it yet. - tasks.push({ - priority: 'high', - ecosystem, - description: `Implement ${ecosystem} parser (reference: cdxgen's ${parser.name})`, - effort: ecosystem === 'maven' ? 'large' : 'medium', - }) - } - } - - // Check for implemented parsers that could be improved. - for (const parser of cdxgenParsers) { - const { ecosystem } = parser - const ourStatus = OUR_PARSERS[ecosystem] - - if (ourStatus?.implemented && ourStatus.score && ourStatus.score < 98) { - tasks.push({ - priority: 'medium', - ecosystem, - description: `Improve lock-step score (${ourStatus.score} → 98) by porting cdxgen improvements`, - effort: 'small', - }) - } - } - - // Note: In a full implementation, we would: - // 1. Parse cdxgen source files to identify new features. - // 2. Compare file sizes/dates with previous baseline. - // 3. Identify breaking changes or major refactors. - // 4. Generate specific line-by-line diffs. - - return tasks -} - -/** - * Generate migration report markdown. - */ -function generateMigrationReport( - currentVersion: string, - latestRelease: CdxgenRelease, - tasks: MigrationTask[] -): string { - const timestamp = new Date().toISOString().split('T')[0] - - let report = `# cdxgen Migration Report\n\n` - report += `**Generated**: ${timestamp}\n` - report += `**Current Baseline**: v${currentVersion}\n` - report += `**Latest cdxgen**: v${latestRelease.version}\n` - report += `**Published**: ${latestRelease.publishedAt}\n` - report += `**Changelog**: ${latestRelease.changelogUrl}\n\n` - - report += `---\n\n` - report += `## Summary\n\n` - - if (compareVersions(latestRelease.version, currentVersion) <= 0) { - report += `${colors.green('✓')} **Up to date** - No migration needed.\n\n` - return report - } - - report += `${colors.yellow('⚠')} **Update available** - ${tasks.length} migration tasks identified.\n\n` - - // Group by priority. - const byPriority = { - critical: tasks.filter(t => t.priority === 'critical'), - high: tasks.filter(t => t.priority === 'high'), - medium: tasks.filter(t => t.priority === 'medium'), - low: tasks.filter(t => t.priority === 'low'), - } - - report += `### Priority Breakdown\n\n` - report += `- 🔴 **Critical**: ${byPriority.critical.length} tasks\n` - report += `- 🟠 **High**: ${byPriority.high.length} tasks\n` - report += `- 🟡 **Medium**: ${byPriority.medium.length} tasks\n` - report += `- 🟢 **Low**: ${byPriority.low.length} tasks\n\n` - - report += `---\n\n` - report += `## Migration Tasks\n\n` - - for (const [priority, priorityTasks] of Object.entries(byPriority)) { - if (priorityTasks.length === 0) { - continue - } - - const emoji = - priority === 'critical' - ? '🔴' - : priority === 'high' - ? '🟠' - : priority === 'medium' - ? '🟡' - : '🟢' - - report += `### ${emoji} ${priority.charAt(0).toUpperCase() + priority.slice(1)} Priority\n\n` - - for (const task of priorityTasks) { - report += `#### ${task.ecosystem}\n\n` - report += `**Description**: ${task.description}\n\n` - report += `**Effort**: ${task.effort}\n\n` - - if (task.cdxgenCommit) { - report += `**Reference**: https://github.com/${CDXGEN_REPO}/commit/${task.cdxgenCommit}\n\n` - } - - report += `---\n\n` - } - } - - report += `## Recommended Actions\n\n` - report += `1. Review critical and high priority tasks immediately\n` - report += `2. Schedule medium priority tasks for next sprint\n` - report += `3. Consider low priority tasks for future iterations\n` - report += `4. Update LOCK-STEP-COMPLIANCE.md after porting changes\n\n` - - return report -} - -/** - * Update LOCK-STEP-COMPLIANCE.md with new baseline version. - */ -async function updateLockStepCompliance(newVersion: string): Promise { - logger.log(`Updating LOCK-STEP-COMPLIANCE.md with baseline v${newVersion}...`) - - const content = await fs.readFile(LOCK_STEP_COMPLIANCE_PATH, 'utf8') - - // Update baseline version. - const updatedContent = content.replace( - /\*\*Baseline Version\*\*: CycloneDX v1\.5 \+ cdxgen v[\d.]+/, - `**Baseline Version**: CycloneDX v1.5 + cdxgen v${newVersion}` - ) - - // Update last updated date. - const timestamp = new Date().toISOString().split('T')[0] - const finalContent = updatedContent.replace( - /\*\*Last Updated\*\*: \d{4}-\d{2}-\d{2}/, - `**Last Updated**: ${timestamp}` - ) - - await fs.writeFile(LOCK_STEP_COMPLIANCE_PATH, finalContent, 'utf8') - logger.log('✓ Updated LOCK-STEP-COMPLIANCE.md') -} - -/** - * Main execution. - */ -async function main() { - logger.log('🔍 Checking for cdxgen updates...\n') - - try { - // Parse command-line arguments. - const args = process.argv.slice(2) - const checkOnly = args.includes('--check-only') - const targetVersionArg = args.find(arg => arg.startsWith('--target-version=')) - const targetVersion = targetVersionArg - ? targetVersionArg.split('=')[1] - : undefined - - // Fetch latest release. - let latestRelease: CdxgenRelease - - if (targetVersion) { - logger.log(`Using target version: v${targetVersion}\n`) - // TODO: Fetch specific version from GitHub. - throw new Error('Target version not yet implemented') - } else { - latestRelease = await fetchLatestCdxgenRelease() - } - - // Compare versions. - const comparison = compareVersions(latestRelease.version, CURRENT_BASELINE) - - if (comparison <= 0) { - logger.log( - `${colors.green('✓')} Up to date! Current baseline v${CURRENT_BASELINE} is the latest.\n` - ) - return - } - - logger.log( - `${colors.yellow('⚠')} Update available: v${CURRENT_BASELINE} → v${latestRelease.version}\n` - ) - - if (checkOnly) { - logger.log('--check-only flag detected. Exiting without analysis.\n') - return - } - - // Download and extract release. - const cdxgenPath = await downloadCdxgenRelease(latestRelease) - - // Analyze parsers. - const cdxgenParsers = await analyzeCdxgenParsers(cdxgenPath) - - // Compare implementations. - const tasks = await compareImplementations(cdxgenParsers) - - // Generate migration report. - const report = generateMigrationReport(CURRENT_BASELINE, latestRelease, tasks) - - // Write migration report. - const reportPath = path.join( - MIGRATION_REPORT_DIR, - 'cdxgen-migration-report.md' - ) - await fs.writeFile(reportPath, report, 'utf8') - logger.log(`\n✓ Migration report written to: ${reportPath}`) - - // Generate migration tasks. - const tasksMarkdown = tasks - .map( - t => - `- [ ] **[${t.priority.toUpperCase()}]** ${t.ecosystem}: ${t.description} (effort: ${t.effort})` - ) - .join('\n') - - const tasksPath = path.join(MIGRATION_REPORT_DIR, 'cdxgen-migration-tasks.md') - await fs.writeFile( - tasksPath, - `# cdxgen Migration Tasks\n\n${tasksMarkdown}\n`, - 'utf8' - ) - logger.log(`✓ Migration tasks written to: ${tasksPath}`) - - // Ask user if they want to update baseline. - logger.log( - `\n📝 Review migration tasks and update LOCK-STEP-COMPLIANCE.md manually.` - ) - logger.log( - ` Or run: pnpm run update-from-cdxgen --update-baseline v${latestRelease.version}` - ) - } catch (e) { - logger.error(`\n${colors.red('✗')} Error:`, e instanceof Error ? e.message : String(e)) - process.exit(1) - } -} - -// Run main function. -main() diff --git a/packages/sbom-generator/src/enrichment/index.mts b/packages/sbom-generator/src/enrichment/index.mts deleted file mode 100644 index b67d9dc03..000000000 --- a/packages/sbom-generator/src/enrichment/index.mts +++ /dev/null @@ -1,301 +0,0 @@ -/** - * Socket.dev Enrichment - * - * Enrich SBOM components with Socket.dev security data. - */ - -import type { Component, Sbom } from '../types/sbom.mts' - -/** - * Enrichment options. - */ -export interface EnrichOptions { - /** - * Socket.dev API token. - */ - apiToken: string - - /** - * Socket API base URL (defaults to production). - */ - apiBaseUrl?: string - - /** - * Timeout for API requests (milliseconds). - */ - timeout?: number -} - -/** - * Enriched SBOM with Socket security data. - */ -export interface EnrichedSbom extends Sbom { - components?: EnrichedComponent[] -} - -/** - * Component with Socket security data. - */ -export interface EnrichedComponent extends Component { - socket?: SocketSecurityData -} - -/** - * Socket security data for a component. - */ -export interface SocketSecurityData { - /** - * Socket security score (0-100). - */ - score: number - - /** - * Security issues found. - */ - issues: SocketIssue[] - - /** - * Supply chain risk level. - */ - supplyChainRisk: 'low' | 'medium' | 'high' | 'critical' - - /** - * Package quality metrics. - */ - quality?: { - maintenance: number - popularity: number - quality: number - } - - /** - * License information. - */ - licenseInfo?: { - spdxId: string - name: string - isOsiApproved: boolean - } - - /** - * Package URL on Socket.dev. - */ - socketUrl: string -} - -/** - * Socket security issue. - */ -export interface SocketIssue { - /** - * Issue type (CVE, malware, typosquat, etc.). - */ - type: string - - /** - * Issue severity. - */ - severity: 'low' | 'medium' | 'high' | 'critical' - - /** - * Issue title. - */ - title: string - - /** - * Issue description. - */ - description: string - - /** - * CVE ID (if applicable). - */ - cve?: string - - /** - * CVSS score (if applicable). - */ - cvss?: number - - /** - * Recommended fix. - */ - fix?: string -} - -/** - * Enrich SBOM with Socket.dev security data. - * - * @param sbom - CycloneDX SBOM - * @param options - Enrichment options - * @returns Enriched SBOM - */ -export async function enrichSbomWithSocket( - sbom: Sbom, - options: EnrichOptions, -): Promise { - if (!options.apiToken) { - throw new Error('Socket.dev API token is required') - } - - const enrichedComponents: EnrichedComponent[] = [] - - // Enrich each component with Socket data. - for (const component of sbom.components || []) { - const enriched = await enrichComponent(component, options) - enrichedComponents.push(enriched) - } - - // Return enriched SBOM. - return { - ...sbom, - components: enrichedComponents, - } -} - -/** - * Enrich single component with Socket data. - */ -async function enrichComponent( - component: Component, - options: EnrichOptions, -): Promise { - // Extract ecosystem and package info from PURL. - const packageInfo = parsePurl(component.purl) - if (!packageInfo) { - return component - } - - try { - // Fetch Socket data for package. - const socketData = await fetchSocketData(packageInfo, options) - - return { - ...component, - socket: socketData, - } - } catch (e) { - // If Socket API fails, return component without enrichment. - console.error( - `Failed to enrich ${component.name}@${component.version}: ${e instanceof Error ? e.message : String(e)}`, - ) - return component - } -} - -/** - * Parse PURL into ecosystem and package info. - */ -function parsePurl( - purl: string | undefined, -): { ecosystem: string; name: string; version: string } | null { - if (!purl) { - return null - } - - // PURL format: pkg:ecosystem/name@version. - const match = /^pkg:([^/]+)\/(.+?)@(.+)$/.exec(purl) - if (!match) { - return null - } - - return { - ecosystem: match[1]!, - name: match[2]!, - version: match[3]!, - } -} - -/** - * Fetch Socket security data for package. - */ -async function fetchSocketData( - packageInfo: { ecosystem: string; name: string; version: string }, - options: EnrichOptions, -): Promise { - const baseUrl = options.apiBaseUrl || 'https://api.socket.dev' - const timeout = options.timeout || 30_000 - - // Build Socket API URL. - const url = `${baseUrl}/v0/package/${packageInfo.ecosystem}/${encodeURIComponent(packageInfo.name)}/${encodeURIComponent(packageInfo.version)}` - - // Make API request. - const controller = new AbortController() - const timeoutId = setTimeout(() => controller.abort(), timeout) - - try { - const response = await fetch(url, { - headers: { - Authorization: `Bearer ${options.apiToken}`, - 'Content-Type': 'application/json', - }, - signal: controller.signal, - }) - - if (!response.ok) { - throw new Error(`Socket API returned ${response.status}`) - } - - const data = (await response.json()) as SocketApiResponse - - // Transform Socket API response to our format. - return transformSocketResponse(data, packageInfo) - } finally { - clearTimeout(timeoutId) - } -} - -/** - * Socket API response (simplified). - */ -interface SocketApiResponse { - score?: number - issues?: Array<{ - type: string - severity: string - title: string - description: string - cve?: string - cvss?: number - fix?: string - }> - supplyChainRisk?: string - quality?: { - maintenance: number - popularity: number - quality: number - } - license?: { - spdxId: string - name: string - isOsiApproved: boolean - } -} - -/** - * Transform Socket API response to security data format. - */ -function transformSocketResponse( - response: SocketApiResponse, - packageInfo: { ecosystem: string; name: string; version: string }, -): SocketSecurityData { - return { - score: response.score || 0, - issues: - response.issues?.map(issue => ({ - type: issue.type, - severity: issue.severity as 'low' | 'medium' | 'high' | 'critical', - title: issue.title, - description: issue.description, - ...(issue.cve && { cve: issue.cve }), - ...(issue.cvss !== undefined && { cvss: issue.cvss }), - ...(issue.fix && { fix: issue.fix }), - })) || [], - supplyChainRisk: - (response.supplyChainRisk as 'low' | 'medium' | 'high' | 'critical') || - 'low', - ...(response.quality && { quality: response.quality }), - ...(response.license && { licenseInfo: response.license }), - socketUrl: `https://socket.dev/${packageInfo.ecosystem}/package/${encodeURIComponent(packageInfo.name)}`, - } -} diff --git a/packages/sbom-generator/src/formatters/index.mts b/packages/sbom-generator/src/formatters/index.mts deleted file mode 100644 index 61a8a260c..000000000 --- a/packages/sbom-generator/src/formatters/index.mts +++ /dev/null @@ -1,385 +0,0 @@ -/** - * CodeT5 Formatters - * - * Format SBOM and Socket Facts into optimized prompts for CodeT5 analysis. - * Reduces 50,000+ tokens to ~300 while preserving critical information. - */ - -import type { EnrichedComponent, EnrichedSbom } from '../enrichment/index.mts' - -// Export Socket Facts formatter. -export { - formatSocketFactsForCodeT5, - estimateSocketFactsTokenCount, - type SocketFactsFormatOptions, -} from './socket-facts.mts' - -/** - * Format options for CodeT5. - */ -export interface FormatOptions { - /** - * Analysis task type. - */ - task?: - | 'security-analysis' - | 'vulnerability-detection' - | 'dependency-audit' - | 'license-compliance' - - /** - * Include dependency graph visualization. - */ - includeGraph?: boolean - - /** - * Maximum number of components to include (prioritizes high-risk). - */ - maxComponents?: number - - /** - * Minimum severity to include (filters out low-severity issues). - */ - minSeverity?: 'low' | 'medium' | 'high' | 'critical' -} - -/** - * Format enriched SBOM for CodeT5 analysis. - * - * Optimizes token usage while preserving critical security information. - * - * @param sbom - Enriched SBOM with Socket data - * @param options - Formatting options - * @returns Optimized prompt for CodeT5 - */ -export function formatSbomForCodeT5( - sbom: EnrichedSbom, - options: FormatOptions = {}, -): string { - const task = options.task || 'security-analysis' - const maxComponents = options.maxComponents || 50 - const minSeverity = options.minSeverity || 'low' - - const sections: string[] = [] - - // Task definition. - sections.push(buildTaskPrompt(task)) - - // Project overview. - sections.push(buildProjectOverview(sbom)) - - // Critical issues (highest priority). - const criticalIssues = extractCriticalIssues(sbom, minSeverity) - if (criticalIssues.length > 0) { - sections.push(buildCriticalIssuesSection(criticalIssues)) - } - - // Component summary (prioritize high-risk packages). - const prioritizedComponents = prioritizeComponents( - sbom.components || [], - maxComponents, - ) - sections.push(buildComponentSummary(prioritizedComponents)) - - // Dependency graph (if requested). - if (options.includeGraph) { - sections.push(buildDependencyGraph(sbom)) - } - - // Analysis instructions. - sections.push(buildAnalysisInstructions(task)) - - return sections.join('\n\n') -} - -/** - * Build task-specific prompt. - */ -function buildTaskPrompt(task: string): string { - const TASK_PROMPTS = { - __proto__: null, - 'security-analysis': - 'TASK: Perform comprehensive security analysis of this project.', - 'vulnerability-detection': - 'TASK: Identify all known vulnerabilities and assess risk.', - 'dependency-audit': - 'TASK: Audit dependencies for security and supply chain risks.', - 'license-compliance': - 'TASK: Analyze license compliance and identify potential issues.', - } as const - - return ( - TASK_PROMPTS[task as keyof typeof TASK_PROMPTS] ?? - TASK_PROMPTS['security-analysis'] - ) -} - -/** - * Build project overview section. - */ -function buildProjectOverview(sbom: EnrichedSbom): string { - const component = sbom.metadata?.component - const totalComponents = sbom.components?.length || 0 - - return [ - 'PROJECT OVERVIEW:', - `Name: ${component?.name || 'unknown'}`, - `Version: ${component?.version || '0.0.0'}`, - `Total Dependencies: ${totalComponents}`, - component?.description ? `Description: ${component.description}` : null, - ] - .filter(Boolean) - .join('\n') -} - -/** - * Extract critical issues from all components. - */ -function extractCriticalIssues( - sbom: EnrichedSbom, - minSeverity: string, -): Array<{ - component: string - version: string - issue: { - type: string - severity: string - title: string - cve?: string - cvss?: number - } -}> { - const SEVERITY_RANK = { - __proto__: null, - low: 1, - medium: 2, - high: 3, - critical: 4, - } as const - - const minRank = SEVERITY_RANK[minSeverity as keyof typeof SEVERITY_RANK] ?? 1 - const criticalIssues: Array<{ - component: string - version: string - issue: { - type: string - severity: string - title: string - cve?: string - cvss?: number - } - }> = [] - - for (const component of (sbom.components || []) as EnrichedComponent[]) { - if (!component.socket?.issues) { - continue - } - - for (const issue of component.socket.issues) { - const issueRank = SEVERITY_RANK[issue.severity as keyof typeof SEVERITY_RANK] ?? 1 - if (issueRank >= minRank) { - criticalIssues.push({ - component: component.name, - version: component.version, - issue: { - type: issue.type, - severity: issue.severity, - title: issue.title, - ...(issue.cve && { cve: issue.cve }), - ...(issue.cvss !== undefined && { cvss: issue.cvss }), - }, - }) - } - } - } - - // Sort by severity (critical first). - criticalIssues.sort((a, b) => { - const rankA = SEVERITY_RANK[a.issue.severity as keyof typeof SEVERITY_RANK] ?? 0 - const rankB = SEVERITY_RANK[b.issue.severity as keyof typeof SEVERITY_RANK] ?? 0 - return rankB - rankA - }) - - return criticalIssues -} - -/** - * Build critical issues section. - */ -function buildCriticalIssuesSection( - issues: Array<{ - component: string - version: string - issue: { - type: string - severity: string - title: string - cve?: string - cvss?: number - } - }>, -): string { - const lines = ['CRITICAL ISSUES:'] - - for (const { component, version, issue } of issues.slice(0, 20)) { - const cveInfo = issue.cve ? ` [${issue.cve}]` : '' - const cvssInfo = issue.cvss ? ` CVSS ${issue.cvss}` : '' - lines.push( - `- ${issue.severity.toUpperCase()}: ${component}@${version}${cveInfo}${cvssInfo}`, - ) - lines.push(` ${issue.title}`) - } - - return lines.join('\n') -} - -/** - * Prioritize components by risk. - */ -function prioritizeComponents( - components: EnrichedComponent[], - maxComponents: number, -): EnrichedComponent[] { - // Calculate risk score for each component. - const scored = components.map(component => { - let riskScore = 0 - - if (component.socket) { - // Score based on Socket data. - riskScore += (100 - component.socket.score) * 10 - riskScore += component.socket.issues.length * 5 - - for (const issue of component.socket.issues) { - if (issue.severity === 'critical') { - riskScore += 50 - } else if (issue.severity === 'high') { - riskScore += 30 - } else if (issue.severity === 'medium') { - riskScore += 10 - } - } - - if (component.socket.supplyChainRisk === 'critical') { - riskScore += 40 - } else if (component.socket.supplyChainRisk === 'high') { - riskScore += 20 - } - } - - return { component, riskScore } - }) - - // Sort by risk (highest first). - scored.sort((a, b) => b.riskScore - a.riskScore) - - // Return top N components. - return scored.slice(0, maxComponents).map(s => s.component) -} - -/** - * Build component summary section. - */ -function buildComponentSummary(components: EnrichedComponent[]): string { - const lines = ['COMPONENT SUMMARY:'] - - for (const component of components.slice(0, 30)) { - const socketInfo = component.socket - ? ` [Score: ${component.socket.score}, Issues: ${component.socket.issues.length}]` - : '' - - lines.push(`- ${component.name}@${component.version}${socketInfo}`) - - if (socketInfo && component.socket!.issues.length > 0) { - const topIssue = component.socket!.issues[0] - if (topIssue) { - lines.push(` ${topIssue.severity.toUpperCase()}: ${topIssue.title}`) - } - } - } - - return lines.join('\n') -} - -/** - * Build dependency graph section. - */ -function buildDependencyGraph(sbom: EnrichedSbom): string { - const lines = ['DEPENDENCY GRAPH:'] - - const rootComponent = sbom.metadata?.component - if (rootComponent) { - lines.push(`${rootComponent.name}@${rootComponent.version}`) - - // Find root dependencies. - const rootDeps = sbom.dependencies?.find( - dep => dep.ref === rootComponent['bom-ref'], - ) - - if (rootDeps?.dependsOn) { - for (const depRef of rootDeps.dependsOn.slice(0, 20)) { - const depComponent = (sbom.components as EnrichedComponent[])?.find( - c => c['bom-ref'] === depRef, - ) - if (depComponent) { - const issueCount = depComponent.socket?.issues.length || 0 - const issueInfo = issueCount > 0 ? ` (${issueCount} issues)` : '' - lines.push( - ` - ${depComponent.name}@${depComponent.version}${issueInfo}`, - ) - } - } - } - } - - return lines.join('\n') -} - -/** - * Build analysis instructions. - */ -function buildAnalysisInstructions(task: string): string { - const INSTRUCTIONS = { - __proto__: null, - 'security-analysis': [ - 'ANALYSIS REQUIREMENTS:', - '- Identify all critical and high-severity vulnerabilities', - '- Assess supply chain risks', - '- Recommend specific version updates', - '- Prioritize fixes by impact and exploitability', - ].join('\n'), - 'vulnerability-detection': [ - 'ANALYSIS REQUIREMENTS:', - '- List all CVEs with CVSS scores', - '- Identify exploitable vulnerabilities', - '- Provide patch availability status', - '- Recommend mitigation strategies', - ].join('\n'), - 'dependency-audit': [ - 'ANALYSIS REQUIREMENTS:', - '- Evaluate dependency health and maintenance', - '- Identify outdated or abandoned packages', - '- Assess security posture', - '- Recommend modern alternatives', - ].join('\n'), - 'license-compliance': [ - 'ANALYSIS REQUIREMENTS:', - '- List all licenses used', - '- Identify license conflicts', - '- Flag copyleft licenses', - '- Recommend compliance actions', - ].join('\n'), - } as const - - return ( - INSTRUCTIONS[task as keyof typeof INSTRUCTIONS] ?? - INSTRUCTIONS['security-analysis'] - ) -} - -/** - * Calculate token count estimate for prompt. - */ -export function estimateTokenCount(prompt: string): number { - // Rough estimate: 4 characters per token. - return Math.ceil(prompt.length / 4) -} diff --git a/packages/sbom-generator/src/formatters/socket-facts.mts b/packages/sbom-generator/src/formatters/socket-facts.mts deleted file mode 100644 index 81383fb51..000000000 --- a/packages/sbom-generator/src/formatters/socket-facts.mts +++ /dev/null @@ -1,579 +0,0 @@ -/** - * Socket Facts CodeT5 Formatter - * - * Format Socket Facts (reachability analysis) into optimized prompts for CodeT5. - * Dramatically improves signal-to-noise ratio by prioritizing reachable vulnerabilities. - */ - -import type { - CallStackEntry, - Reachability, - SocketFactArtifact, - SocketFacts, -} from '../types/socket-facts.mts' -import type { EnrichedSbom } from '../enrichment/index.mts' - -/** - * Format options for Socket Facts CodeT5 output. - */ -export interface SocketFactsFormatOptions { - /** - * Analysis task type. - */ - task?: - | 'security-analysis' - | 'vulnerability-detection' - | 'dependency-audit' - | 'license-compliance' - - /** - * Prioritize reachable vulnerabilities (default: true). - */ - prioritizeReachable?: boolean - - /** - * Include call stacks for reachable vulnerabilities (default: true). - */ - includeCallStacks?: boolean - - /** - * Maximum call stack depth to include (default: 5). - */ - maxCallStackDepth?: number - - /** - * Minimum confidence score to include (0.0-1.0, default: 0.7). - */ - minConfidence?: number - - /** - * Include unreachable vulnerabilities in output (default: false). - */ - includeUnreachable?: boolean - - /** - * Maximum number of reachable vulnerabilities to include (default: 20). - */ - maxReachableVulns?: number - - /** - * Maximum number of unreachable vulnerabilities to include (default: 10). - */ - maxUnreachableVulns?: number -} - -/** - * Enriched artifact with reachability context. - */ -interface EnrichedArtifact { - artifact: SocketFactArtifact - reachableVulns: Array<{ - vuln: Reachability - metadata: NonNullable[number] - }> - unreachableVulns: Array<{ - vuln: Reachability - metadata: NonNullable[number] - }> - riskScore: number -} - -/** - * Format Socket Facts for CodeT5 analysis. - * - * Optimizes token usage by focusing on reachable vulnerabilities while - * providing rich context through call stacks and confidence scores. - * - * @param socketFacts - Socket Facts from reachability analysis - * @param sbom - Optional SBOM for cross-reference - * @param options - Formatting options - * @returns Optimized prompt for CodeT5 - */ -export function formatSocketFactsForCodeT5( - socketFacts: SocketFacts, - sbom?: EnrichedSbom, - options: SocketFactsFormatOptions = {}, -): string { - const task = options.task || 'security-analysis' - const prioritizeReachable = options.prioritizeReachable ?? true - const includeCallStacks = options.includeCallStacks ?? true - const maxCallStackDepth = options.maxCallStackDepth || 5 - const minConfidence = options.minConfidence || 0.7 - const includeUnreachable = options.includeUnreachable ?? false - const maxReachableVulns = options.maxReachableVulns || 20 - const maxUnreachableVulns = options.maxUnreachableVulns || 10 - - const sections: string[] = [] - - // Task definition with reachability context. - sections.push(buildTaskPrompt(task)) - - // Project overview. - sections.push(buildProjectOverview(socketFacts, sbom)) - - // Enrich artifacts with reachability data. - const enrichedArtifacts = enrichArtifactsWithReachability( - socketFacts.components, - minConfidence, - ) - - // Prioritize by reachability and risk. - const prioritized = prioritizeReachable - ? prioritizeByReachability(enrichedArtifacts) - : enrichedArtifacts - - // Critical issues (reachable vulnerabilities). - const reachableIssues = extractReachableIssues(prioritized, maxReachableVulns) - if (reachableIssues.length > 0) { - sections.push( - buildReachableIssuesSection( - reachableIssues, - includeCallStacks, - maxCallStackDepth, - ), - ) - } - - // Unreachable vulnerabilities (if requested). - if (includeUnreachable) { - const unreachableIssues = extractUnreachableIssues( - prioritized, - maxUnreachableVulns, - ) - if (unreachableIssues.length > 0) { - sections.push(buildUnreachableIssuesSection(unreachableIssues)) - } - } - - // Component summary. - sections.push(buildComponentSummary(prioritized.slice(0, 30))) - - // Dependency graph (if SBOM provided). - if (sbom) { - sections.push(buildDependencyGraph(prioritized, sbom)) - } - - // Analysis instructions. - sections.push(buildAnalysisInstructions(task)) - - return sections.join('\n\n') -} - -/** - * Build task-specific prompt with reachability awareness. - */ -function buildTaskPrompt(task: string): string { - const TASK_PROMPTS = { - __proto__: null, - 'security-analysis': - 'TASK: Perform reachability-aware security analysis of this project.\n\n' + - 'REACHABILITY CONTEXT:\n' + - '- Vulnerabilities marked REACHABLE require immediate attention\n' + - '- Vulnerabilities marked UNREACHABLE are low priority (dead code)\n' + - '- Call stacks show the path from your code to vulnerable code\n' + - '- Confidence scores indicate analysis certainty (0.0-1.0)', - 'vulnerability-detection': - 'TASK: Identify reachable vulnerabilities and assess exploitability.\n\n' + - 'REACHABILITY CONTEXT:\n' + - '- Focus on REACHABLE vulnerabilities that can be exploited\n' + - '- Use call stacks to understand attack surface\n' + - '- Prioritize by confidence score and severity', - 'dependency-audit': - 'TASK: Audit dependencies with reachability analysis.\n\n' + - 'REACHABILITY CONTEXT:\n' + - '- Identify which dependencies are actually used (not dead code)\n' + - '- Focus on reachable security issues\n' + - '- Recommend upgrades for actively used vulnerable packages', - 'license-compliance': - 'TASK: Analyze license compliance with usage context.\n\n' + - 'REACHABILITY CONTEXT:\n' + - '- Focus on licenses of actively used dependencies\n' + - '- Dead code dependencies have lower compliance risk', - } as const - - return ( - TASK_PROMPTS[task as keyof typeof TASK_PROMPTS] ?? - TASK_PROMPTS['security-analysis'] - ) -} - -/** - * Build project overview section. - */ -function buildProjectOverview( - socketFacts: SocketFacts, - sbom?: EnrichedSbom, -): string { - const totalComponents = socketFacts.components.length - const reachableCount = socketFacts.components.filter(c => - c.reachability?.some(r => r.state === 'reachable'), - ).length - - const lines = ['PROJECT OVERVIEW:'] - - if (sbom?.metadata?.component) { - lines.push(`Name: ${sbom.metadata.component.name}`) - lines.push(`Version: ${sbom.metadata.component.version}`) - } - - lines.push(`Total Dependencies: ${totalComponents}`) - lines.push( - `Reachability Analysis: ${socketFacts.tier1ReachabilityScanId ? 'Complete' : 'Partial'}`, - ) - lines.push(`Components with Reachable Vulnerabilities: ${reachableCount}`) - - return lines.join('\n') -} - -/** - * Enrich artifacts with reachability categorization. - */ -function enrichArtifactsWithReachability( - components: SocketFactArtifact[], - minConfidence: number, -): EnrichedArtifact[] { - return components - .map(artifact => { - const reachableVulns: EnrichedArtifact['reachableVulns'] = [] - const unreachableVulns: EnrichedArtifact['unreachableVulns'] = [] - - // Categorize vulnerabilities by reachability. - for (const reachability of artifact.reachability || []) { - if ( - reachability.confidence !== undefined && - reachability.confidence < minConfidence - ) { - continue - } - - const metadata = artifact.vulnerabilities?.find( - v => v.ghsaId === reachability.vulnerability, - ) - - if (metadata) { - if (reachability.state === 'reachable') { - reachableVulns.push({ vuln: reachability, metadata }) - } else if (reachability.state === 'unreachable') { - unreachableVulns.push({ vuln: reachability, metadata }) - } - } - } - - // Calculate risk score. - let riskScore = 0 - - if (reachableVulns.length > 0) { - riskScore += reachableVulns.length * 100 - riskScore += reachableVulns.reduce( - (sum, { vuln }) => sum + (vuln.confidence || 0) * 50, - 0, - ) - } - - if (artifact.direct) { - riskScore += 20 - } - - if (artifact.dead) { - riskScore -= 50 - } - - return { - artifact, - reachableVulns, - unreachableVulns, - riskScore, - } - }) - .filter(e => e.reachableVulns.length > 0 || e.unreachableVulns.length > 0) -} - -/** - * Prioritize artifacts by reachability and risk. - */ -function prioritizeByReachability( - enriched: EnrichedArtifact[], -): EnrichedArtifact[] { - return enriched.sort((a, b) => { - // Reachable vulnerabilities first. - if (a.reachableVulns.length !== b.reachableVulns.length) { - return b.reachableVulns.length - a.reachableVulns.length - } - - // Then by risk score. - return b.riskScore - a.riskScore - }) -} - -/** - * Extract reachable issues for critical section. - */ -function extractReachableIssues( - enriched: EnrichedArtifact[], - maxCount: number, -): Array<{ - artifact: SocketFactArtifact - vuln: Reachability - metadata: NonNullable[number] -}> { - const issues: Array<{ - artifact: SocketFactArtifact - vuln: Reachability - metadata: NonNullable[number] - }> = [] - - for (const { artifact, reachableVulns } of enriched) { - for (const { vuln, metadata } of reachableVulns) { - issues.push({ artifact, vuln, metadata }) - } - } - - // Sort by confidence (highest first). - issues.sort((a, b) => (b.vuln.confidence || 0) - (a.vuln.confidence || 0)) - - return issues.slice(0, maxCount) -} - -/** - * Extract unreachable issues. - */ -function extractUnreachableIssues( - enriched: EnrichedArtifact[], - maxCount: number, -): Array<{ - artifact: SocketFactArtifact - vuln: Reachability - metadata: NonNullable[number] -}> { - const issues: Array<{ - artifact: SocketFactArtifact - vuln: Reachability - metadata: NonNullable[number] - }> = [] - - for (const { artifact, unreachableVulns } of enriched) { - for (const { vuln, metadata } of unreachableVulns) { - issues.push({ artifact, vuln, metadata }) - } - } - - return issues.slice(0, maxCount) -} - -/** - * Build reachable issues section. - */ -function buildReachableIssuesSection( - issues: Array<{ - artifact: SocketFactArtifact - vuln: Reachability - metadata: NonNullable[number] - }>, - includeCallStacks: boolean, - maxCallStackDepth: number, -): string { - const lines = ['CRITICAL ISSUES (REACHABLE):'] - - for (const { artifact, vuln, metadata } of issues) { - const confidence = vuln.confidence?.toFixed(2) || '?' - const ghsaId = metadata.ghsaId - - lines.push( - `\n🔴 REACHABLE (confidence: ${confidence}): ${artifact.name}@${artifact.version} [${ghsaId}]`, - ) - - if (metadata.reachabilityData?.publicComment) { - lines.push(` ${metadata.reachabilityData.publicComment}`) - } - - if (includeCallStacks && vuln.callStack && vuln.callStack.length > 0) { - lines.push( - ` Call Stack (${Math.min(vuln.callStack.length, maxCallStackDepth)} hops):`, - ) - - for (const [index, entry] of vuln.callStack - .slice(0, maxCallStackDepth) - .entries()) { - const location = formatSourceLocation(entry) - lines.push(` ${index + 1}. ${location}`) - } - } - - lines.push(` Recommendation: Upgrade to fix ${ghsaId}`) - } - - return lines.join('\n') -} - -/** - * Build unreachable issues section. - */ -function buildUnreachableIssuesSection( - issues: Array<{ - artifact: SocketFactArtifact - vuln: Reachability - metadata: NonNullable[number] - }>, -): string { - const lines = ['VULNERABILITIES (UNREACHABLE):'] - - for (const { artifact, vuln, metadata } of issues) { - const confidence = vuln.confidence?.toFixed(2) || '?' - const ghsaId = metadata.ghsaId - - lines.push( - `\n⚪ UNREACHABLE (confidence: ${confidence}): ${artifact.name}@${artifact.version} [${ghsaId}]`, - ) - - if (vuln.reason) { - lines.push(` Status: ${vuln.reason}`) - } else if (artifact.dead) { - lines.push(` Status: Dead code (never imported)`) - } else if (artifact.dev) { - lines.push(` Status: Dev dependency (not in production)`) - } - } - - return lines.join('\n') -} - -/** - * Build component summary section. - */ -function buildComponentSummary(enriched: EnrichedArtifact[]): string { - const lines = ['COMPONENT SUMMARY (PRIORITIZED BY REACHABILITY + RISK):'] - - for (const [ - index, - { artifact, reachableVulns, unreachableVulns }, - ] of enriched.entries()) { - const reachableCount = reachableVulns.length - const unreachableCount = unreachableVulns.length - const statusBadge = reachableCount > 0 ? '🔴' : '⚪' - - let summary = `\n${index + 1}. ${artifact.name}@${artifact.version} ${statusBadge}` - - if (reachableCount > 0) { - summary += ` [REACHABLE VULNS: ${reachableCount}]` - } else if (unreachableCount > 0) { - summary += ` [UNREACHABLE VULNS: ${unreachableCount}]` - } - - if (artifact.dead) { - summary += ' [DEAD CODE]' - } else if (artifact.dev) { - summary += ' [DEV]' - } else if (artifact.direct) { - summary += ' [DIRECT]' - } - - lines.push(summary) - - // Show top reachable vulnerability. - if (reachableCount > 0) { - const topVuln = reachableVulns[0] - if (topVuln) { - const confidence = topVuln.vuln.confidence?.toFixed(2) || '?' - lines.push( - ` Confidence: ${confidence} - ${topVuln.metadata.reachabilityData?.publicComment || topVuln.metadata.ghsaId}`, - ) - } - } - } - - return lines.join('\n') -} - -/** - * Build dependency graph section. - */ -function buildDependencyGraph( - enriched: EnrichedArtifact[], - sbom: EnrichedSbom, -): string { - const lines = ['DEPENDENCY GRAPH (REACHABILITY-AWARE):'] - - const rootComponent = sbom.metadata?.component - if (rootComponent) { - lines.push(`\n${rootComponent.name}@${rootComponent.version}`) - - // Show direct dependencies with reachability status. - for (const e of enriched.filter(e => e.artifact.direct).slice(0, 10)) { - const statusBadge = e.reachableVulns.length > 0 ? '🔴' : '⚪' - const vulnCount = - e.reachableVulns.length > 0 - ? ` [${e.reachableVulns.length} reachable vulns]` - : '' - - lines.push( - ` - ${e.artifact.name}@${e.artifact.version} ${statusBadge}${vulnCount}`, - ) - } - } - - return lines.join('\n') -} - -/** - * Build analysis instructions. - */ -function buildAnalysisInstructions(task: string): string { - const INSTRUCTIONS = { - __proto__: null, - 'security-analysis': [ - 'ANALYSIS REQUIREMENTS:', - '- PRIORITIZE reachable vulnerabilities over unreachable ones', - '- FOCUS on high-confidence reachability results (>0.8)', - '- CONSIDER call stack depth: shorter = more direct threat', - '- RECOMMEND version upgrades for reachable vulnerabilities', - '- DEPRIORITIZE unreachable vulnerabilities (can defer fixes)', - '- EXPLAIN reachability context in your analysis', - ].join('\n'), - 'vulnerability-detection': [ - 'ANALYSIS REQUIREMENTS:', - '- List all REACHABLE CVEs with confidence scores', - '- Identify exploitable vulnerabilities using call stacks', - '- Provide patch availability for reachable issues', - '- Recommend mitigation strategies prioritized by reachability', - ].join('\n'), - 'dependency-audit': [ - 'ANALYSIS REQUIREMENTS:', - '- Evaluate health of ACTIVELY USED dependencies', - '- Identify reachable vulnerabilities in production code', - '- Assess security posture of direct dependencies', - '- Recommend upgrades for packages with reachable issues', - ].join('\n'), - 'license-compliance': [ - 'ANALYSIS REQUIREMENTS:', - '- List licenses of ACTIVELY USED dependencies', - '- Deprioritize licenses of dead code dependencies', - '- Flag copyleft licenses in production code', - '- Recommend compliance actions for active dependencies', - ].join('\n'), - } as const - - return ( - INSTRUCTIONS[task as keyof typeof INSTRUCTIONS] ?? - INSTRUCTIONS['security-analysis'] - ) -} - -/** - * Format source location for display. - */ -function formatSourceLocation(entry: CallStackEntry): string { - const loc = entry.sourceLocation - const file = loc.file.replace(/^node_modules\//, '') - const line = loc.start.line - const col = loc.start.column - - return `${file}:${line}:${col}` -} - -/** - * Calculate token count estimate for prompt. - */ -export function estimateSocketFactsTokenCount(prompt: string): number { - // Rough estimate: 4 characters per token. - return Math.ceil(prompt.length / 4) -} diff --git a/packages/sbom-generator/src/formatters/socket-facts.test.mts b/packages/sbom-generator/src/formatters/socket-facts.test.mts deleted file mode 100644 index 4f8c70322..000000000 --- a/packages/sbom-generator/src/formatters/socket-facts.test.mts +++ /dev/null @@ -1,233 +0,0 @@ -/** - * Tests for Socket Facts CodeT5 formatter. - */ - -import { readFile } from 'node:fs/promises' -import path from 'node:path' -import { describe, expect, it } from 'vitest' -import type { SocketFacts } from '../types/socket-facts.mts' -import { - estimateSocketFactsTokenCount, - formatSocketFactsForCodeT5, -} from './socket-facts.mts' - -describe('formatSocketFactsForCodeT5', () => { - it('should format Socket Facts with reachability context', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const prompt = formatSocketFactsForCodeT5(socketFacts) - - // Verify structure. - expect(prompt).toContain( - 'TASK: Perform reachability-aware security analysis', - ) - expect(prompt).toContain('REACHABILITY CONTEXT:') - expect(prompt).toContain('PROJECT OVERVIEW:') - expect(prompt).toContain('CRITICAL ISSUES (REACHABLE):') - expect(prompt).toContain('COMPONENT SUMMARY') - expect(prompt).toContain('ANALYSIS REQUIREMENTS:') - }) - - it('should prioritize reachable vulnerabilities', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const prompt = formatSocketFactsForCodeT5(socketFacts) - - // Verify reachable vulnerabilities appear first. - const reachableIndex = prompt.indexOf('🔴 REACHABLE') - const unreachableIndex = prompt.indexOf('⚪ UNREACHABLE') - - // Reachable should appear before unreachable (or unreachable should not appear). - if (unreachableIndex !== -1) { - expect(reachableIndex).toBeLessThan(unreachableIndex) - } - - // Verify specific vulnerabilities. - expect(prompt).toContain('lodash@4.17.15') - expect(prompt).toContain('GHSA-29mw-wpgm-hmr9') - expect(prompt).toContain('confidence: 0.95') - }) - - it('should include call stacks for reachable vulnerabilities', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeCallStacks: true, - }) - - // Verify call stack information. - expect(prompt).toContain('Call Stack') - expect(prompt).toContain('your-app/src/index.js:42') - expect(prompt).toContain('node_modules/lodash/merge.js') - }) - - it('should filter out unreachable vulnerabilities by default', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: false, - }) - - // Verify unreachable vulnerabilities are not included. - expect(prompt).not.toContain('VULNERABILITIES (UNREACHABLE):') - expect(prompt).not.toContain('xmldom@0.5.0') - expect(prompt).not.toContain('yargs-parser@15.0.0') - }) - - it('should include unreachable vulnerabilities when requested', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: true, - }) - - // Verify unreachable vulnerabilities are included. - expect(prompt).toContain('VULNERABILITIES (UNREACHABLE):') - expect(prompt).toContain('xmldom@0.5.0') - expect(prompt).toContain('CVE-2021-32796') - }) - - it('should respect minConfidence option', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - minConfidence: 0.9, - }) - - // Only lodash@4.17.15 has confidence >= 0.9. - expect(prompt).toContain('lodash@4.17.15') - expect(prompt).toContain('confidence: 0.95') - - // axios@0.21.0 has confidence 0.87, should be filtered out. - expect(prompt).not.toContain('axios@0.21.0') - }) - - it('should format for different task types', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const securityPrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - task: 'security-analysis', - }) - const vulnPrompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - task: 'vulnerability-detection', - }) - - expect(securityPrompt).toContain('security analysis') - expect(vulnPrompt).toContain('vulnerabilities and assess exploitability') - }) - - it('should generate token-efficient output', async () => { - const socketFactsPath = path.join( - __dirname, - '../../test/fixtures/socket-facts-sample.json', - ) - const socketFactsJson = await readFile(socketFactsPath, 'utf8') - const socketFacts: SocketFacts = JSON.parse(socketFactsJson) - - const prompt = formatSocketFactsForCodeT5(socketFacts, undefined, { - includeUnreachable: false, - }) - - const tokenCount = estimateSocketFactsTokenCount(prompt) - - // For 5 components (2 reachable, 3 unreachable), expect < 500 tokens. - // With includeUnreachable: false, should be even less. - expect(tokenCount).toBeLessThan(500) - }) - - it('should handle empty Socket Facts', () => { - const socketFacts: SocketFacts = { - components: [], - } - - const prompt = formatSocketFactsForCodeT5(socketFacts) - - expect(prompt).toContain('TASK:') - expect(prompt).toContain('PROJECT OVERVIEW:') - expect(prompt).not.toContain('CRITICAL ISSUES') - }) - - it('should handle Socket Facts without reachability data', async () => { - const socketFacts: SocketFacts = { - components: [ - { - type: 'npm', - name: 'test-package', - version: '1.0.0', - id: 'pkg:npm/test-package@1.0.0', - direct: true, - dev: false, - dead: false, - vulnerabilities: [ - { - ghsaId: 'GHSA-test-1234', - range: '<2.0.0', - reachabilityData: { - publicComment: 'Test vulnerability', - pattern: [], - undeterminableReachability: true, - }, - }, - ], - // No reachability field. - }, - ], - } - - const prompt = formatSocketFactsForCodeT5(socketFacts) - - expect(prompt).toContain('PROJECT OVERVIEW:') - expect(prompt).not.toContain('CRITICAL ISSUES') - }) -}) - -describe('estimateSocketFactsTokenCount', () => { - it('should estimate token count correctly', () => { - const prompt = 'This is a test prompt with about 10 words in it.' - const tokenCount = estimateSocketFactsTokenCount(prompt) - - // Rough estimate: 4 characters per token. - const expectedTokens = Math.ceil(prompt.length / 4) - expect(tokenCount).toBe(expectedTokens) - }) - - it('should handle empty prompts', () => { - const tokenCount = estimateSocketFactsTokenCount('') - expect(tokenCount).toBe(0) - }) -}) diff --git a/packages/sbom-generator/src/index.mts b/packages/sbom-generator/src/index.mts deleted file mode 100644 index 3a716651c..000000000 --- a/packages/sbom-generator/src/index.mts +++ /dev/null @@ -1,221 +0,0 @@ -/** - * SBOM Generator Entry Point - * - * Type-safe CycloneDX SBOM generator for multi-ecosystem projects. - */ - -import { randomUUID } from 'node:crypto' - -import type { - Component, - Dependency, - ExternalReference, - Sbom, -} from './types/sbom.mts' -import type { Ecosystem, ParseOptions, Parser } from './types/parser.mts' - -import { NpmParser } from './parsers/index.mts' - -/** - * Generate options. - */ -export interface GenerateOptions extends ParseOptions { - /** - * Limit to specific ecosystems (auto-detects all if not specified). - */ - ecosystems?: Ecosystem[] -} - -/** - * Available parsers. - */ -const PARSERS: Parser[] = [new NpmParser()] - -/** - * Generate CycloneDX SBOM for a project. - * - * @param projectPath - Path to project directory - * @param options - Generation options - * @returns CycloneDX SBOM object - */ -export async function generateSbom( - projectPath: string, - options: GenerateOptions = {}, -): Promise { - // Auto-detect applicable parsers. - const parsers = await detectParsers(projectPath, options.ecosystems) - - if (!parsers.length) { - throw new Error('No supported ecosystems detected in project') - } - - // Parse each ecosystem. - const results = await Promise.all( - parsers.map(p => p.parse(projectPath, options)), - ) - - // Combine into single SBOM. - return combineSbom(results) -} - -/** - * Detect which parsers can handle this project. - */ -async function detectParsers( - projectPath: string, - ecosystems?: Ecosystem[], -): Promise { - const applicable: Parser[] = [] - - for (const parser of PARSERS) { - // Skip if not in allowed ecosystems. - if (ecosystems && !ecosystems.includes(parser.ecosystem)) { - continue - } - - // Check if parser can handle this project. - const canHandle = await parser.detect(projectPath) - if (canHandle) { - applicable.push(parser) - } - } - - return applicable -} - -/** - * Combine multiple parse results into single SBOM. - */ -function combineSbom( - results: Array<{ - ecosystem: Ecosystem - metadata: { - name: string - version: string - description?: string - homepage?: string - repository?: string - license?: string - authors?: string[] - keywords?: string[] - } - components: Component[] - dependencies: Dependency[] - }>, -): Sbom { - // Use first result as primary metadata (typically root project). - const primary = results[0] - if (!primary) { - throw new Error('No results provided to buildSbom') - } - - // Collect all components and dependencies. - const allComponents: Component[] = [] - const allDependencies: Dependency[] = [] - - for (const result of results) { - allComponents.push(...result.components) - allDependencies.push(...result.dependencies) - } - - // Deduplicate components by bom-ref. - const uniqueComponents = deduplicateComponents(allComponents) - - // Build SBOM. - const sbom: Sbom = { - bomFormat: 'CycloneDX', - specVersion: '1.5', - serialNumber: `urn:uuid:${randomUUID()}`, - version: 1, - metadata: { - timestamp: new Date().toISOString(), - tools: [ - { - vendor: 'Socket.dev', - name: '@socketsecurity/sbom-generator', - version: '1.0.0', - }, - ], - component: (() => { - const extRefs = buildExternalReferences(primary.metadata) - return { - type: 'application', - 'bom-ref': `pkg:${primary.ecosystem}/${primary.metadata.name}@${primary.metadata.version}`, - name: primary.metadata.name, - version: primary.metadata.version, - ...(primary.metadata.description && { - description: primary.metadata.description, - }), - ...(primary.metadata.license && { - licenses: [{ license: { id: primary.metadata.license } }], - }), - ...(extRefs && extRefs.length > 0 && { - externalReferences: extRefs, - }), - } - })(), - }, - components: uniqueComponents, - dependencies: allDependencies, - } - - return sbom -} - -/** - * Deduplicate components by bom-ref. - */ -function deduplicateComponents(components: Component[]): Component[] { - const seen = new Set() - const unique: Component[] = [] - - for (const component of components) { - const ref = component['bom-ref'] - if (ref && seen.has(ref)) { - continue - } - - if (ref) { - seen.add(ref) - } - unique.push(component) - } - - return unique -} - -/** - * Build external references from metadata. - */ -function buildExternalReferences(metadata: { - homepage?: string - repository?: string -}): ExternalReference[] | undefined { - const refs: ExternalReference[] = [] - - if (metadata.homepage) { - refs.push({ - url: metadata.homepage, - type: 'website' as const, - }) - } - - if (metadata.repository) { - refs.push({ - url: metadata.repository, - type: 'vcs' as const, - }) - } - - return refs.length > 0 ? refs : undefined -} - -// Re-export types. -export type { Component, Dependency, Sbom } from './types/sbom.mts' -export type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from './types/parser.mts' diff --git a/packages/sbom-generator/src/index.test.mts b/packages/sbom-generator/src/index.test.mts deleted file mode 100644 index 33ccb3474..000000000 --- a/packages/sbom-generator/src/index.test.mts +++ /dev/null @@ -1,102 +0,0 @@ -/** - * SBOM Generator Tests - */ - -import { describe, expect, it } from 'vitest' -import { generateSbom } from './index.mts' - -describe('generateSbom', () => { - it('should generate SBOM for npm project', async () => { - // Test with socket-cli project. - const projectPath = process.cwd() - const sbom = await generateSbom(projectPath, { - includeDevDependencies: false, - }) - - // Validate SBOM structure. - expect(sbom.bomFormat).toBe('CycloneDX') - expect(sbom.specVersion).toBe('1.5') - expect(sbom.serialNumber).toMatch(/^urn:uuid:/) - expect(sbom.version).toBe(1) - - // Validate metadata. - expect(sbom.metadata).toBeDefined() - expect(sbom.metadata?.timestamp).toBeDefined() - expect(sbom.metadata?.tools).toHaveLength(1) - expect(sbom.metadata?.tools?.[0].vendor).toBe('Socket.dev') - expect(sbom.metadata?.tools?.[0].name).toBe( - '@socketsecurity/sbom-generator', - ) - - // Validate main component. - expect(sbom.metadata?.component).toBeDefined() - expect(sbom.metadata?.component?.type).toBe('application') - expect(sbom.metadata?.component?.name).toBeDefined() - expect(sbom.metadata?.component?.version).toBeDefined() - - // Validate components. - expect(sbom.components).toBeDefined() - expect(sbom.components!.length).toBeGreaterThan(0) - - // Validate dependencies. - expect(sbom.dependencies).toBeDefined() - expect(sbom.dependencies!.length).toBeGreaterThan(0) - }) - - it('should limit to specific ecosystems', async () => { - const projectPath = process.cwd() - const sbom = await generateSbom(projectPath, { - ecosystems: ['npm'], - includeDevDependencies: false, - }) - - expect(sbom.components).toBeDefined() - expect(sbom.components!.length).toBeGreaterThan(0) - }) - - it('should throw error for unsupported projects', async () => { - await expect(generateSbom('/non-existent-path')).rejects.toThrow( - 'No supported ecosystems detected', - ) - }) - - it('should deduplicate components', async () => { - const projectPath = process.cwd() - const sbom = await generateSbom(projectPath) - - const bomRefs = new Set() - for (const component of sbom.components || []) { - const ref = component['bom-ref'] - if (ref) { - expect(bomRefs.has(ref)).toBe(false) - bomRefs.add(ref) - } - } - }) - - it('should include external references', async () => { - const projectPath = process.cwd() - const sbom = await generateSbom(projectPath) - - const mainComponent = sbom.metadata?.component - expect(mainComponent?.externalReferences).toBeDefined() - }) - - it('should generate valid serial number', async () => { - const projectPath = process.cwd() - const sbom = await generateSbom(projectPath) - - expect(sbom.serialNumber).toMatch( - /^urn:uuid:[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/, - ) - }) - - it('should generate timestamp in ISO 8601 format', async () => { - const projectPath = process.cwd() - const sbom = await generateSbom(projectPath) - - const timestamp = sbom.metadata?.timestamp - expect(timestamp).toBeDefined() - expect(() => new Date(timestamp!)).not.toThrow() - }) -}) diff --git a/packages/sbom-generator/src/parsers/actions/index.mts b/packages/sbom-generator/src/parsers/actions/index.mts deleted file mode 100644 index 722c00432..000000000 --- a/packages/sbom-generator/src/parsers/actions/index.mts +++ /dev/null @@ -1,98 +0,0 @@ -/** - * actions Ecosystem Parser - * - * Parses GitHub Actions workflows (.github/workflows/*.yml) into CycloneDX SBOM format. - * - * Lock-Step Reference: cdxgen's lib/parsers/github.js - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/github.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { parse as parseYaml } from 'yaml' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -export class ActionsParser implements Parser { - readonly ecosystem: Ecosystem = 'actions' - - async detect(projectPath: string): Promise { - try { - const workflowsPath = path.join(projectPath, '.github', 'workflows') - await fs.access(workflowsPath) - return true - } catch { - return false - } - } - - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - const metadata: ProjectMetadata = { - name: path.basename(projectPath), - version: '0.0.0', - } - - const actions = new Map() - - const workflowsPath = path.join(projectPath, '.github', 'workflows') - const files = await fs.readdir(workflowsPath) - - for (const file of files) { - if (!file.endsWith('.yml') && !file.endsWith('.yaml')) continue - - const content = await fs.readFile(path.join(workflowsPath, file), 'utf8') - const workflow = parseYaml(content) - - if (workflow.jobs) { - for (const job of Object.values(workflow.jobs) as Array<{ - steps?: Array<{ uses?: string }> - }>) { - if (job.steps) { - for (const step of job.steps) { - if (step.uses) { - const match = step.uses.match(/^([^@]+)@(.+)$/) - if (match) { - actions.set(match[1], { name: match[1], version: match[2] }) - } - } - } - } - } - } - } - - const components: Component[] = Array.from(actions.values()).map( - action => ({ - type: 'library', - 'bom-ref': `pkg:github/${action.name}@${action.version}`, - name: action.name, - version: action.version, - purl: `pkg:github/${action.name}@${action.version}`, - scope: 'required', - }), - ) - - const graph: Dependency[] = [ - { - ref: `pkg:github/${metadata.name}@${metadata.version}`, - dependsOn: components.map(c => c.purl || ''), - }, - ] - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies: graph, - } - } -} diff --git a/packages/sbom-generator/src/parsers/cargo/index.mts b/packages/sbom-generator/src/parsers/cargo/index.mts deleted file mode 100644 index a0812392e..000000000 --- a/packages/sbom-generator/src/parsers/cargo/index.mts +++ /dev/null @@ -1,349 +0,0 @@ -/** - * cargo Ecosystem Parser - * - * Parses Rust projects (Cargo.toml + Cargo.lock) into CycloneDX SBOM format. - * Supports: Cargo.toml (manifest), Cargo.lock (lockfile) - * - * Lock-Step Reference: cdxgen's lib/parsers/rust.js - * - Baseline: cdxgen v11.11.0 - * - Lock-Step Score: Target 90-100 - * - Deviations: Pure TypeScript TOML parsing (no cargo binary) - * - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/rust.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { parse as parseToml } from '@iarna/toml' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -/** - * Cargo dependency information. - */ -interface CargoDependencyInfo { - name: string - version: string - source?: string - checksum?: string - dependencies?: string[] - optional?: boolean -} - -/** - * Cargo.toml format (manifest). - */ -interface CargoToml { - package?: { - name?: string - version?: string - description?: string - homepage?: string - repository?: string - license?: string - 'license-file'?: string - authors?: string[] - keywords?: string[] - categories?: string[] - } - dependencies?: Record - 'dev-dependencies'?: Record - 'build-dependencies'?: Record - features?: Record -} - -/** - * Cargo dependency specification (detailed format). - */ -interface CargoDependencySpec { - version?: string - path?: string - git?: string - branch?: string - tag?: string - rev?: string - features?: string[] - optional?: boolean - 'default-features'?: boolean -} - -/** - * Cargo.lock format (lockfile). - * - * Cargo.lock V3 format: - * [[package]] - * name = "crate-name" - * version = "1.0.0" - * source = "registry+https://github.com/rust-lang/crates.io-index" - * checksum = "abc123..." - * dependencies = ["dep1", "dep2 1.2.3"] - */ -interface CargoLock { - version?: number - package?: Array<{ - name: string - version: string - source?: string - checksum?: string - dependencies?: string[] - }> -} - -/** - * Lockfile data aggregated from Cargo.lock. - */ -interface LockfileData { - dependencies: Map -} - -/** - * cargo parser implementation. - * - * cdxgen reference: parseRustProject() in lib/parsers/rust.js - * Target: 90-100 lock-step score - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/rust.js - */ -export class CargoParser implements Parser { - readonly ecosystem: Ecosystem = 'cargo' - - /** - * Detect if this is a Rust project. - * - * cdxgen reference: detectRustProject() checks for Cargo.toml. - * Our implementation: Same detection strategy as cdxgen. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/rust.js#L30-L50 - */ - async detect(projectPath: string): Promise { - try { - // Check for Cargo.toml (Rust manifest file). - const cargoTomlPath = path.join(projectPath, 'Cargo.toml') - await fs.access(cargoTomlPath) - return true - } catch { - return false - } - } - - /** - * Parse Rust project and generate SBOM components. - * - * cdxgen reference: parseRustProject() reads Cargo.toml and Cargo.lock. - * Our implementation: Parse TOML directly (no cargo binary), extract dependency graph. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/rust.js#L80-L130 - */ - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - // Read project metadata from Cargo.toml. - const metadata = await this.extractMetadata(projectPath) - - // Parse Cargo.lock for dependency graph. - const lockfileData = await this.parseCargoLock(projectPath, options) - - // Convert to CycloneDX format. - const components = this.buildComponents(lockfileData.dependencies, options) - const dependencies = this.buildDependencyGraph( - metadata, - lockfileData.dependencies, - ) - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies, - } - } - - /** - * Extract project metadata from Cargo.toml. - * - * cdxgen reference: extractCargoMetadata() reads Cargo.toml package section. - * Our implementation: Parse TOML directly using @iarna/toml. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/rust.js#L150-L180 - */ - private async extractMetadata(projectPath: string): Promise { - try { - const cargoTomlPath = path.join(projectPath, 'Cargo.toml') - const content = await fs.readFile(cargoTomlPath, 'utf8') - const cargoToml = parseToml(content) as CargoToml - - if (!cargoToml.package) { - return { - name: 'unknown', - version: '0.0.0', - } - } - - const pkg = cargoToml.package - - return { - name: pkg.name || 'unknown', - version: pkg.version || '0.0.0', - description: pkg.description, - homepage: pkg.homepage, - repository: pkg.repository, - license: pkg.license || pkg['license-file'], - authors: pkg.authors, - keywords: pkg.keywords, - } - } catch { - return { - name: 'unknown', - version: '0.0.0', - } - } - } - - /** - * Parse Cargo.lock file. - * - * cdxgen reference: parseCargoLock() parses TOML structure for package array. - * Our implementation: Direct TOML parsing using @iarna/toml (same strategy as cdxgen). - * Cargo.lock V3 format uses [[package]] arrays with name, version, source, checksum, dependencies. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/rust.js#L200-L250 - * @see https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html - */ - private async parseCargoLock( - projectPath: string, - options: ParseOptions, - ): Promise { - const dependencies = new Map() - - try { - const cargoLockPath = path.join(projectPath, 'Cargo.lock') - const content = await fs.readFile(cargoLockPath, 'utf8') - const cargoLock = parseToml(content) as CargoLock - - if (!cargoLock.package) { - return { dependencies } - } - - for (const pkg of cargoLock.package) { - const deps: string[] = [] - - // Parse dependency list. - // Format: ["dep1", "dep2 1.2.3"] or ["dep1 1.0.0 (registry+https://...)"]. - if (pkg.dependencies) { - for (const depSpec of pkg.dependencies) { - // Extract dependency name (before space or version). - const depName = depSpec.split(' ')[0] - deps.push(depName) - } - } - - dependencies.set(pkg.name, { - name: pkg.name, - version: pkg.version, - source: pkg.source, - checksum: pkg.checksum, - dependencies: deps, - }) - } - } catch { - // Cargo.lock doesn't exist or failed to parse. - } - - return { dependencies } - } - - /** - * Build CycloneDX components from dependencies. - * - * cdxgen reference: createRustComponents() converts parsed dependencies to CycloneDX components. - * Our implementation: Same conversion logic with CycloneDX v1.5 types. - * Generates PURLs in format: pkg:cargo/@ - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/rust.js#L280-L320 - */ - private buildComponents( - dependencies: Map, - options: ParseOptions, - ): Component[] { - const components: Component[] = [] - - for (const [key, dep] of dependencies.entries()) { - const component: Component = { - type: 'library', - 'bom-ref': `pkg:cargo/${dep.name}@${dep.version}`, - name: dep.name, - version: dep.version, - purl: `pkg:cargo/${dep.name}@${dep.version}`, - scope: dep.optional ? 'optional' : 'required', - } - - components.push(component) - } - - return components - } - - /** - * Build dependency graph. - * - * cdxgen reference: createRustDependencyGraph() constructs CycloneDX dependency relationships. - * Our implementation: Same graph construction with root → direct → transitive relationships. - * Root component depends on all top-level crates. - * Each crate's transitive dependencies are mapped from Cargo.lock data. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/rust.js#L340-L380 - */ - private buildDependencyGraph( - metadata: ProjectMetadata, - dependencies: Map, - ): Dependency[] { - const graph: Dependency[] = [] - - // Root component depends on all direct dependencies. - const rootRef = `pkg:cargo/${metadata.name}@${metadata.version}` - const directDeps: string[] = [] - - for (const [key, dep] of dependencies.entries()) { - directDeps.push(`pkg:cargo/${dep.name}@${dep.version}`) - } - - graph.push({ - ref: rootRef, - dependsOn: directDeps, - }) - - // Add transitive dependencies. - for (const [key, dep] of dependencies.entries()) { - const ref = `pkg:cargo/${dep.name}@${dep.version}` - const dependsOn: string[] = [] - - if (dep.dependencies) { - for (const depName of dep.dependencies) { - // Find matching dependency in map. - const transitiveDep = dependencies.get(depName) - if (transitiveDep) { - dependsOn.push( - `pkg:cargo/${transitiveDep.name}@${transitiveDep.version}`, - ) - } - } - } - - if (dependsOn.length > 0) { - graph.push({ - ref, - dependsOn, - }) - } - } - - return graph - } -} diff --git a/packages/sbom-generator/src/parsers/cargo/index.test.mts b/packages/sbom-generator/src/parsers/cargo/index.test.mts deleted file mode 100644 index fac95d44f..000000000 --- a/packages/sbom-generator/src/parsers/cargo/index.test.mts +++ /dev/null @@ -1,225 +0,0 @@ -/** - * Tests for cargo parser. - * - * Lock-Step Reference: cdxgen's tests for rust.js parser - * Target: 90-100 lock-step quality - */ - -import { mkdir, readFile, writeFile } from 'node:fs/promises' -import path from 'node:path' -import { afterEach, beforeEach, describe, expect, it } from 'vitest' -import { CargoParser } from './index.mts' - -describe('CargoParser', () => { - const parser = new CargoParser() - const fixturesPath = path.join(__dirname, '../../../test/fixtures/rust') - - describe('ecosystem', () => { - it('should have correct ecosystem', () => { - expect(parser.ecosystem).toBe('cargo') - }) - }) - - describe('detect', () => { - it('should detect Rust projects with Cargo.toml', async () => { - const detected = await parser.detect(fixturesPath) - expect(detected).toBe(true) - }) - - it('should not detect non-Rust projects', async () => { - const detected = await parser.detect('/tmp/non-existent-project') - expect(detected).toBe(false) - }) - }) - - describe('Cargo.lock parsing', () => { - it('should parse Cargo.lock format', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.ecosystem).toBe('cargo') - expect(result.components.length).toBeGreaterThan(0) - - // Should find serde package. - const serde = result.components.find(c => c.name === 'serde') - expect(serde).toBeDefined() - expect(serde?.version).toBe('1.0.188') - expect(serde?.purl).toBe('pkg:cargo/serde@1.0.188') - expect(serde?.type).toBe('library') - expect(serde?.scope).toBe('required') - }) - - it('should parse Cargo.toml metadata', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.metadata.name).toBe('test-rust-app') - expect(result.metadata.version).toBe('0.1.0') - expect(result.metadata.description).toBe('A test Rust application') - expect(result.metadata.homepage).toBe('https://example.com') - expect(result.metadata.repository).toBe( - 'https://github.com/example/test-rust-app', - ) - expect(result.metadata.license).toBe('MIT') - }) - - it('should build dependency graph from Cargo.lock', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.dependencies.length).toBeGreaterThan(0) - - // Root component should exist. - const rootDep = result.dependencies.find(d => - d.ref.includes('test-rust-app@0.1.0'), - ) - expect(rootDep).toBeDefined() - expect(rootDep?.dependsOn.length).toBeGreaterThan(0) - - // serde should have dependencies. - const serdeDep = result.dependencies.find(d => - d.ref.includes('serde@1.0.188'), - ) - expect(serdeDep).toBeDefined() - expect(serdeDep?.dependsOn).toContain('pkg:cargo/serde_derive@1.0.188') - }) - - it('should parse transitive dependencies', async () => { - const result = await parser.parse(fixturesPath) - - // serde_json has transitive dependencies (itoa, ryu, serde). - const serdeJsonDep = result.dependencies.find(d => - d.ref.includes('serde_json@1.0.107'), - ) - expect(serdeJsonDep).toBeDefined() - expect(serdeJsonDep?.dependsOn).toContain('pkg:cargo/serde@1.0.188') - expect(serdeJsonDep?.dependsOn).toContain('pkg:cargo/itoa@1.0.9') - expect(serdeJsonDep?.dependsOn).toContain('pkg:cargo/ryu@1.0.15') - }) - - it('should parse all packages from Cargo.lock', async () => { - const result = await parser.parse(fixturesPath) - - // Should have 10 packages: test-rust-app + 9 dependencies. - expect(result.components.length).toBeGreaterThanOrEqual(9) - - // Check key packages exist. - const packageNames = result.components.map(c => c.name) - expect(packageNames).toContain('serde') - expect(packageNames).toContain('serde_json') - expect(packageNames).toContain('tokio') - expect(packageNames).toContain('clap') - }) - }) - - describe('PURL generation', () => { - it('should generate valid PURLs for Rust crates', async () => { - const result = await parser.parse(fixturesPath) - - const serde = result.components.find(c => c.name === 'serde') - expect(serde?.purl).toBe('pkg:cargo/serde@1.0.188') - expect(serde?.['bom-ref']).toBe('pkg:cargo/serde@1.0.188') - }) - }) - - describe('edge cases', () => { - it('should handle missing Cargo.lock gracefully', async () => { - const tempDir = path.join('/tmp', `cargo-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'Cargo.toml'), - '[package]\nname = "no-lock"\nversion = "0.0.0"', - ) - - const result = await parser.parse(tempDir) - - expect(result.components.length).toBe(0) - expect(result.ecosystem).toBe('cargo') - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should handle empty Cargo.lock', async () => { - const tempDir = path.join('/tmp', `cargo-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'Cargo.toml'), - '[package]\nname = "empty"\nversion = "0.0.0"', - ) - await writeFile( - path.join(tempDir, 'Cargo.lock'), - '# Cargo.lock\nversion = 3', - ) - - const result = await parser.parse(tempDir) - - expect(result.components.length).toBe(0) - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should handle Cargo.toml without package section', async () => { - const tempDir = path.join('/tmp', `cargo-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'Cargo.toml'), - '[workspace]\nmembers = ["crate1", "crate2"]', - ) - await writeFile( - path.join(tempDir, 'Cargo.lock'), - '# Cargo.lock\nversion = 3', - ) - - const result = await parser.parse(tempDir) - - expect(result.metadata.name).toBe('unknown') - expect(result.metadata.version).toBe('0.0.0') - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - }) - - describe('features and optional dependencies', () => { - it('should parse Cargo.toml with features', async () => { - const result = await parser.parse(fixturesPath) - - // Features are in Cargo.toml but not tracked in components. - // This is acceptable - features are Rust-specific metadata. - expect(result.metadata.name).toBe('test-rust-app') - }) - }) - - describe('dependency parsing', () => { - it('should parse dependencies with checksums', async () => { - const result = await parser.parse(fixturesPath) - - // All external crates should have source and checksum. - const externalCrate = result.components.find(c => c.name === 'serde') - expect(externalCrate).toBeDefined() - // Note: checksum is not included in CycloneDX component (acceptable). - }) - - it('should parse dependencies from crates.io registry', async () => { - const result = await parser.parse(fixturesPath) - - // All external dependencies come from crates.io registry. - const serde = result.components.find(c => c.name === 'serde') - expect(serde?.purl).toContain('pkg:cargo/serde') - }) - }) - - describe('dependency versions', () => { - it('should extract exact versions from Cargo.lock', async () => { - const result = await parser.parse(fixturesPath) - - const serde = result.components.find(c => c.name === 'serde') - expect(serde?.version).toBe('1.0.188') - - const tokio = result.components.find(c => c.name === 'tokio') - expect(tokio?.version).toBe('1.32.0') - }) - }) -}) diff --git a/packages/sbom-generator/src/parsers/chrome/index.mts b/packages/sbom-generator/src/parsers/chrome/index.mts deleted file mode 100644 index 560853ccb..000000000 --- a/packages/sbom-generator/src/parsers/chrome/index.mts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * chrome Ecosystem Parser - * - * Parses Chrome extensions into CycloneDX SBOM format. - * Socket-specific parser (no cdxgen equivalent). - * - * Note: This is a placeholder implementation. Full implementation requires - * Chrome Web Store API integration to fetch extension metadata. - */ - -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -export class ChromeParser implements Parser { - readonly ecosystem: Ecosystem = 'chrome' - - async detect(projectPath: string): Promise { - // Detection would check for manifest.json with Chrome extension structure. - return false - } - - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - // Placeholder implementation. - // Full implementation would: - // 1. Parse manifest.json for extension ID - // 2. Query Chrome Web Store API for metadata - // 3. Generate components with pkg:chrome/extension-id@version PURLs - - const metadata: ProjectMetadata = { - name: 'chrome-extensions', - version: '0.0.0', - } - - return { - ecosystem: this.ecosystem, - metadata, - components: [], - dependencies: [], - } - } -} diff --git a/packages/sbom-generator/src/parsers/go/index.mts b/packages/sbom-generator/src/parsers/go/index.mts deleted file mode 100644 index 6325acfc5..000000000 --- a/packages/sbom-generator/src/parsers/go/index.mts +++ /dev/null @@ -1,315 +0,0 @@ -/** - * go Ecosystem Parser - * - * Parses Go projects (go.mod + go.sum) into CycloneDX SBOM format. - * Supports: go.mod (manifest), go.sum (checksums) - * - * Lock-Step Reference: cdxgen's lib/parsers/go.js - * - Baseline: cdxgen v11.11.0 - * - Lock-Step Score: Target 90-100 - * - Deviations: Pure text parsing (no go binary) - * - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/go.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -/** - * Go module dependency information. - */ -interface GoDependencyInfo { - name: string - version: string - indirect?: boolean - replaced?: { - name: string - version: string - } -} - -/** - * Lockfile data aggregated from go.sum. - */ -interface LockfileData { - dependencies: Map -} - -/** - * go parser implementation. - * - * cdxgen reference: parseGoProject() in lib/parsers/go.js - * Target: 90-100 lock-step score - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/go.js - */ -export class GoParser implements Parser { - readonly ecosystem: Ecosystem = 'go' - - /** - * Detect if this is a Go project. - * - * cdxgen reference: detectGoProject() checks for go.mod. - * Our implementation: Same detection strategy as cdxgen. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/go.js#L30-L50 - */ - async detect(projectPath: string): Promise { - try { - // Check for go.mod (Go modules manifest). - const goModPath = path.join(projectPath, 'go.mod') - await fs.access(goModPath) - return true - } catch { - return false - } - } - - /** - * Parse Go project and generate SBOM components. - * - * cdxgen reference: parseGoProject() reads go.mod and go.sum. - * Our implementation: Parse text formats directly (no go binary). - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/go.js#L80-L130 - */ - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - // Read project metadata from go.mod. - const metadata = await this.extractMetadata(projectPath) - - // Parse go.mod for dependencies. - const lockfileData = await this.parseGoMod(projectPath, options) - - // Convert to CycloneDX format. - const components = this.buildComponents(lockfileData.dependencies, options) - const dependencies = this.buildDependencyGraph( - metadata, - lockfileData.dependencies, - ) - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies, - } - } - - /** - * Extract project metadata from go.mod. - * - * cdxgen reference: extractGoMetadata() reads go.mod module directive. - * Our implementation: Parse text format directly. - * - * go.mod format: - * module github.com/user/repo - * go 1.21 - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/go.js#L150-L180 - */ - private async extractMetadata(projectPath: string): Promise { - try { - const goModPath = path.join(projectPath, 'go.mod') - const content = await fs.readFile(goModPath, 'utf8') - - // Extract module name (first line: "module "). - const moduleMatch = content.match(/^module\s+(\S+)/m) - const moduleName = moduleMatch ? moduleMatch[1] : 'unknown' - - // Extract Go version (optional: "go "). - const goVersionMatch = content.match(/^go\s+(\S+)/m) - const goVersion = goVersionMatch ? goVersionMatch[1] : undefined - - // Go modules don't have a project version in go.mod. - // Version comes from git tags or is set to 0.0.0. - return { - name: moduleName, - version: '0.0.0', - description: goVersion ? `Go ${goVersion}` : undefined, - } - } catch { - return { - name: 'unknown', - version: '0.0.0', - } - } - } - - /** - * Parse go.mod file for dependencies. - * - * cdxgen reference: parseGoMod() parses require directives and replace directives. - * Our implementation: Text parsing with support for require, replace, indirect markers. - * - * go.mod format: - * require ( - * github.com/pkg/errors v0.9.1 - * github.com/spf13/cobra v1.7.0 // indirect - * ) - * replace github.com/old/pkg => github.com/new/pkg v1.2.3 - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/go.js#L200-L250 - * @see https://go.dev/ref/mod#go-mod-file - */ - private async parseGoMod( - projectPath: string, - options: ParseOptions, - ): Promise { - const dependencies = new Map() - - try { - const goModPath = path.join(projectPath, 'go.mod') - const content = await fs.readFile(goModPath, 'utf8') - - // Parse require directives. - const requireBlockMatch = content.match(/require\s*\(([\s\S]*?)\)/m) - if (requireBlockMatch) { - const requireBlock = requireBlockMatch[1] - const lines = requireBlock.split('\n') - - for (const line of lines) { - const trimmed = line.trim() - if (!trimmed || trimmed.startsWith('//')) { - continue - } - - // Parse line: "github.com/pkg/errors v0.9.1 // indirect" - const match = trimmed.match(/^(\S+)\s+(\S+)(?:\s+\/\/\s*(.*))?$/) - if (match) { - const [, name, version, comment] = match - const indirect = comment?.includes('indirect') || false - - dependencies.set(name, { - name, - version, - indirect, - }) - } - } - } - - // Parse single-line require directives. - const singleRequireRegex = - /^require\s+(\S+)\s+(\S+)(?:\s+\/\/\s*(.*))?$/gm - let singleMatch: RegExpExecArray | null - while ((singleMatch = singleRequireRegex.exec(content)) !== null) { - const [, name, version, comment] = singleMatch - const indirect = comment?.includes('indirect') || false - - if (!dependencies.has(name)) { - dependencies.set(name, { - name, - version, - indirect, - }) - } - } - - // Parse replace directives. - const replaceRegex = - /^replace\s+(\S+)(?:\s+(\S+))?\s+=>\s+(\S+)(?:\s+(\S+))?$/gm - let replaceMatch: RegExpExecArray | null - while ((replaceMatch = replaceRegex.exec(content)) !== null) { - const [, oldName, oldVersion, newName, newVersion] = replaceMatch - - // Update dependency with replacement. - const existing = dependencies.get(oldName) - if (existing) { - existing.replaced = { - name: newName, - version: newVersion || 'latest', - } - } - } - } catch { - // go.mod doesn't exist or failed to parse. - } - - return { dependencies } - } - - /** - * Build CycloneDX components from dependencies. - * - * cdxgen reference: createGoComponents() converts parsed dependencies to CycloneDX components. - * Our implementation: Same conversion logic with CycloneDX v1.5 types. - * Generates PURLs in format: pkg:golang/@ - * Maps indirect dependencies to 'optional' scope. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/go.js#L280-L320 - */ - private buildComponents( - dependencies: Map, - options: ParseOptions, - ): Component[] { - const components: Component[] = [] - - for (const [key, dep] of dependencies.entries()) { - // Use replaced module if available. - const actualName = dep.replaced?.name || dep.name - const actualVersion = dep.replaced?.version || dep.version - - const component: Component = { - type: 'library', - 'bom-ref': `pkg:golang/${actualName}@${actualVersion}`, - name: actualName, - version: actualVersion, - purl: `pkg:golang/${actualName}@${actualVersion}`, - scope: dep.indirect ? 'optional' : 'required', - } - - components.push(component) - } - - return components - } - - /** - * Build dependency graph. - * - * cdxgen reference: createGoDependencyGraph() constructs CycloneDX dependency relationships. - * Our implementation: Simplified graph (go.mod doesn't provide transitive info). - * Root component depends on all direct dependencies. - * - * Note: Go modules don't provide a dependency graph in go.mod. - * For full graph, would need to execute `go mod graph`, but we avoid external binaries. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/go.js#L340-L380 - */ - private buildDependencyGraph( - metadata: ProjectMetadata, - dependencies: Map, - ): Dependency[] { - const graph: Dependency[] = [] - - // Root component depends on all direct dependencies (non-indirect). - const rootRef = `pkg:golang/${metadata.name}@${metadata.version}` - const directDeps: string[] = [] - - for (const [key, dep] of dependencies.entries()) { - if (!dep.indirect) { - const actualName = dep.replaced?.name || dep.name - const actualVersion = dep.replaced?.version || dep.version - directDeps.push(`pkg:golang/${actualName}@${actualVersion}`) - } - } - - graph.push({ - ref: rootRef, - dependsOn: directDeps, - }) - - return graph - } -} diff --git a/packages/sbom-generator/src/parsers/go/index.test.mts b/packages/sbom-generator/src/parsers/go/index.test.mts deleted file mode 100644 index ad24db75e..000000000 --- a/packages/sbom-generator/src/parsers/go/index.test.mts +++ /dev/null @@ -1,189 +0,0 @@ -/** - * Tests for go parser. - * - * Lock-Step Reference: cdxgen's tests for go.js parser - * Target: 90-100 lock-step quality - */ - -import { mkdir, writeFile } from 'node:fs/promises' -import path from 'node:path' -import { describe, expect, it } from 'vitest' -import { GoParser } from './index.mts' - -describe('GoParser', () => { - const parser = new GoParser() - const fixturesPath = path.join(__dirname, '../../../test/fixtures/go') - - describe('ecosystem', () => { - it('should have correct ecosystem', () => { - expect(parser.ecosystem).toBe('go') - }) - }) - - describe('detect', () => { - it('should detect Go projects with go.mod', async () => { - const detected = await parser.detect(fixturesPath) - expect(detected).toBe(true) - }) - - it('should not detect non-Go projects', async () => { - const detected = await parser.detect('/tmp/non-existent-project') - expect(detected).toBe(false) - }) - }) - - describe('go.mod parsing', () => { - it('should parse go.mod format', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.ecosystem).toBe('go') - expect(result.components.length).toBeGreaterThan(0) - - // Should find cobra package. - const cobra = result.components.find( - c => c.name === 'github.com/spf13/cobra', - ) - expect(cobra).toBeDefined() - expect(cobra?.version).toBe('v1.7.0') - expect(cobra?.purl).toBe('pkg:golang/github.com/spf13/cobra@v1.7.0') - expect(cobra?.type).toBe('library') - expect(cobra?.scope).toBe('required') - }) - - it('should parse go.mod metadata', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.metadata.name).toBe('github.com/example/test-go-app') - expect(result.metadata.version).toBe('0.0.0') // Go modules don't have version in go.mod - expect(result.metadata.description).toBe('Go 1.21') - }) - - it('should mark indirect dependencies correctly', async () => { - const result = await parser.parse(fixturesPath) - - const pkgErrors = result.components.find( - c => c.name === 'github.com/pkg/errors', - ) - expect(pkgErrors).toBeDefined() - expect(pkgErrors?.scope).toBe('optional') // indirect = optional - }) - - it('should build dependency graph from go.mod', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.dependencies.length).toBeGreaterThan(0) - - // Root component should exist. - const rootDep = result.dependencies.find(d => - d.ref.includes('github.com/example/test-go-app'), - ) - expect(rootDep).toBeDefined() - expect(rootDep?.dependsOn.length).toBeGreaterThan(0) - - // Should only include direct dependencies (not indirect). - expect(rootDep?.dependsOn).toContain( - 'pkg:golang/github.com/spf13/cobra@v1.7.0', - ) - expect(rootDep?.dependsOn).not.toContain( - 'pkg:golang/github.com/pkg/errors@v0.9.1', - ) - }) - - it('should parse all dependencies from go.mod', async () => { - const result = await parser.parse(fixturesPath) - - // Should have 6 dependencies. - expect(result.components.length).toBeGreaterThanOrEqual(5) - - const packageNames = result.components.map(c => c.name) - expect(packageNames).toContain('github.com/spf13/cobra') - expect(packageNames).toContain('github.com/spf13/viper') - expect(packageNames).toContain('gopkg.in/yaml.v3') - }) - - it('should handle replace directives', async () => { - const result = await parser.parse(fixturesPath) - - // Replace directive: github.com/old/module => github.com/new/module v1.2.3 - // Should not appear in components (old module replaced). - const oldModule = result.components.find( - c => c.name === 'github.com/old/module', - ) - expect(oldModule).toBeUndefined() - }) - }) - - describe('PURL generation', () => { - it('should generate valid PURLs for Go modules', async () => { - const result = await parser.parse(fixturesPath) - - const cobra = result.components.find( - c => c.name === 'github.com/spf13/cobra', - ) - expect(cobra?.purl).toBe('pkg:golang/github.com/spf13/cobra@v1.7.0') - expect(cobra?.['bom-ref']).toBe( - 'pkg:golang/github.com/spf13/cobra@v1.7.0', - ) - }) - }) - - describe('edge cases', () => { - it('should handle missing go.mod gracefully', async () => { - const tempDir = path.join('/tmp', `go-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - const result = await parser.parse(tempDir) - - expect(result.components.length).toBe(0) - expect(result.ecosystem).toBe('go') - expect(result.metadata.name).toBe('unknown') - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should handle empty go.mod', async () => { - const tempDir = path.join('/tmp', `go-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'go.mod'), - 'module example.com/empty\n\ngo 1.21', - ) - - const result = await parser.parse(tempDir) - - expect(result.components.length).toBe(0) - expect(result.metadata.name).toBe('example.com/empty') - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should handle single-line require directives', async () => { - const tempDir = path.join('/tmp', `go-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'go.mod'), - 'module example.com/single\n\ngo 1.21\n\nrequire github.com/pkg/errors v0.9.1', - ) - - const result = await parser.parse(tempDir) - - expect(result.components.length).toBe(1) - expect(result.components[0].name).toBe('github.com/pkg/errors') - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - }) - - describe('Go version parsing', () => { - it('should extract Go version from go.mod', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.metadata.description).toBe('Go 1.21') - }) - }) -}) diff --git a/packages/sbom-generator/src/parsers/huggingface/index.mts b/packages/sbom-generator/src/parsers/huggingface/index.mts deleted file mode 100644 index c8eedb097..000000000 --- a/packages/sbom-generator/src/parsers/huggingface/index.mts +++ /dev/null @@ -1,51 +0,0 @@ -/** - * huggingface Ecosystem Parser - * - * Parses Hugging Face models and datasets into CycloneDX SBOM format. - * Socket-specific parser (no cdxgen equivalent). - * - * Note: This is a placeholder implementation. Full implementation requires - * Hugging Face API integration to fetch model/dataset metadata. - */ - -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -export class HuggingfaceParser implements Parser { - readonly ecosystem: Ecosystem = 'huggingface' - - async detect(projectPath: string): Promise { - // Detection would check for requirements.txt with transformers/huggingface references. - // Or a .huggingface file with model IDs. - return false - } - - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - // Placeholder implementation. - // Full implementation would: - // 1. Scan for model references in code (e.g., from_pretrained calls) - // 2. Query Hugging Face API for model metadata - // 3. Generate components with pkg:huggingface/model-name@version PURLs - - const metadata: ProjectMetadata = { - name: 'huggingface-models', - version: '0.0.0', - } - - return { - ecosystem: this.ecosystem, - metadata, - components: [], - dependencies: [], - } - } -} diff --git a/packages/sbom-generator/src/parsers/index.mts b/packages/sbom-generator/src/parsers/index.mts deleted file mode 100644 index 61d2fa08d..000000000 --- a/packages/sbom-generator/src/parsers/index.mts +++ /dev/null @@ -1,18 +0,0 @@ -/** - * Parser Exports - * - * Ecosystem-specific parsers for generating SBOM components. - */ - -// TODO: Complete and re-enable these parsers. -// export { ActionsParser } from './actions/index.mts' -// export { CargoParser } from './cargo/index.mts' -// export { ChromeParser } from './chrome/index.mts' -// export { GoParser } from './go/index.mts' -// export { HuggingfaceParser } from './huggingface/index.mts' -// export { MavenParser } from './maven/index.mts' -export { NpmParser } from './npm/index.mts' -// export { NugetParser } from './nuget/index.mts' -// export { OpenvsxParser } from './openvsx/index.mts' -// export { PypiParser } from './pypi/index.mts' -// export { RubygemsParser } from './rubygems/index.mts' diff --git a/packages/sbom-generator/src/parsers/maven/index.mts b/packages/sbom-generator/src/parsers/maven/index.mts deleted file mode 100644 index 520354907..000000000 --- a/packages/sbom-generator/src/parsers/maven/index.mts +++ /dev/null @@ -1,96 +0,0 @@ -/** - * maven Ecosystem Parser - * - * Parses Java/Maven projects (pom.xml) into CycloneDX SBOM format. - * - * Lock-Step Reference: cdxgen's lib/parsers/java.js - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/java.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { XMLParser } from 'fast-xml-parser' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -export class MavenParser implements Parser { - readonly ecosystem: Ecosystem = 'maven' - private xmlParser = new XMLParser({ ignoreAttributes: false }) - - async detect(projectPath: string): Promise { - try { - const pomPath = path.join(projectPath, 'pom.xml') - await fs.access(pomPath) - return true - } catch { - return false - } - } - - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - const pomPath = path.join(projectPath, 'pom.xml') - const content = await fs.readFile(pomPath, 'utf8') - const parsed = this.xmlParser.parse(content) - - const project = parsed.project - const metadata: ProjectMetadata = { - name: project.artifactId || 'unknown', - version: project.version || '0.0.0', - description: project.description, - } - - const dependencies = new Map< - string, - { groupId: string; artifactId: string; version: string } - >() - - if (project.dependencies?.dependency) { - const deps = Array.isArray(project.dependencies.dependency) - ? project.dependencies.dependency - : [project.dependencies.dependency] - - for (const dep of deps) { - const key = `${dep.groupId}:${dep.artifactId}` - dependencies.set(key, { - groupId: dep.groupId, - artifactId: dep.artifactId, - version: dep.version || 'latest', - }) - } - } - - const components: Component[] = Array.from(dependencies.values()).map( - dep => ({ - type: 'library', - 'bom-ref': `pkg:maven/${dep.groupId}/${dep.artifactId}@${dep.version}`, - name: `${dep.groupId}:${dep.artifactId}`, - version: dep.version, - purl: `pkg:maven/${dep.groupId}/${dep.artifactId}@${dep.version}`, - scope: 'required', - }), - ) - - const graph: Dependency[] = [ - { - ref: `pkg:maven/${project.groupId}/${metadata.name}@${metadata.version}`, - dependsOn: components.map(c => c.purl || ''), - }, - ] - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies: graph, - } - } -} diff --git a/packages/sbom-generator/src/parsers/npm/index.mts b/packages/sbom-generator/src/parsers/npm/index.mts deleted file mode 100644 index cf987fd6a..000000000 --- a/packages/sbom-generator/src/parsers/npm/index.mts +++ /dev/null @@ -1,673 +0,0 @@ -/** - * npm Ecosystem Parser - * - * Parses npm projects (package.json + lockfiles) into CycloneDX SBOM format. - * Supports: package-lock.json, yarn.lock, pnpm-lock.yaml - * - * Lock-Step Reference: cdxgen's lib/parsers/js.js - * - Baseline: cdxgen v11.11.0 - * - Lock-Step Score: 95/100 (Excellent) - * - Deviations: Pure TypeScript parsing (no npm binary), enhanced PURL generation - * - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/js.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { parseSyml } from '@yarnpkg/parsers' -import { parse as parseYaml } from 'yaml' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -/** - * npm parser implementation. - */ -export class NpmParser implements Parser { - readonly ecosystem: Ecosystem = 'npm' - - /** - * Detect if this is an npm project. - */ - async detect(projectPath: string): Promise { - try { - const packageJsonPath = path.join(projectPath, 'package.json') - await fs.access(packageJsonPath) - return true - } catch { - return false - } - } - - /** - * Parse npm project and generate SBOM components. - */ - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - // Read package.json for metadata. - const packageJson = await this.readPackageJson(projectPath) - const metadata = this.extractMetadata(packageJson) - - // Detect and parse lockfile. - const lockfileData = await this.detectAndParseLockfile(projectPath, options) - - // Convert to CycloneDX format. - const components = this.buildComponents(lockfileData.dependencies, options) - const dependencies = this.buildDependencyGraph( - packageJson, - lockfileData.dependencies, - ) - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies, - } - } - - /** - * Read and parse package.json. - */ - private async readPackageJson(projectPath: string): Promise { - const packageJsonPath = path.join(projectPath, 'package.json') - const content = await fs.readFile(packageJsonPath, 'utf8') - return JSON.parse(content) as PackageJson - } - - /** - * Extract project metadata from package.json. - */ - private extractMetadata(packageJson: PackageJson): ProjectMetadata { - const repository = this.normalizeRepository(packageJson.repository) - const authors = this.extractAuthors(packageJson) - return { - name: packageJson.name || 'unknown', - version: packageJson.version || '0.0.0', - ...(packageJson.description && { description: packageJson.description }), - ...(packageJson.homepage && { homepage: packageJson.homepage }), - ...(repository && { repository }), - ...(packageJson.license && { license: packageJson.license }), - ...(authors && { authors }), - ...(packageJson.keywords && { keywords: packageJson.keywords }), - } - } - - /** - * Normalize repository field to URL string. - */ - private normalizeRepository( - repository: string | { type: string; url: string } | undefined, - ): string | undefined { - if (!repository) { - return undefined - } - if (typeof repository === 'string') { - return repository - } - return repository.url - } - - /** - * Extract authors from package.json. - */ - private extractAuthors(packageJson: PackageJson): string[] | undefined { - const authors: string[] = [] - - if (packageJson.author) { - authors.push( - typeof packageJson.author === 'string' - ? packageJson.author - : packageJson.author.name || packageJson.author.email || 'unknown', - ) - } - - if (packageJson.contributors) { - for (const contributor of packageJson.contributors) { - authors.push( - typeof contributor === 'string' - ? contributor - : contributor.name || contributor.email || 'unknown', - ) - } - } - - return authors.length > 0 ? authors : undefined - } - - /** - * Detect which lockfile exists and parse it. - */ - private async detectAndParseLockfile( - projectPath: string, - options: ParseOptions, - ): Promise { - // Try package-lock.json first. - const packageLockPath = path.join(projectPath, 'package-lock.json') - const hasPackageLock = await this.fileExists(packageLockPath) - if (hasPackageLock) { - return this.parsePackageLock(packageLockPath, options) - } - - // Try pnpm-lock.yaml. - const pnpmLockPath = path.join(projectPath, 'pnpm-lock.yaml') - const hasPnpmLock = await this.fileExists(pnpmLockPath) - if (hasPnpmLock) { - return this.parsePnpmLock(pnpmLockPath, options) - } - - // Try yarn.lock. - const yarnLockPath = path.join(projectPath, 'yarn.lock') - const hasYarnLock = await this.fileExists(yarnLockPath) - if (hasYarnLock) { - return this.parseYarnLock(yarnLockPath, options) - } - - // No lockfile found. - return { dependencies: new Map() } - } - - /** - * Check if file exists. - */ - private async fileExists(filePath: string): Promise { - try { - await fs.access(filePath) - return true - } catch { - return false - } - } - - /** - * Parse package-lock.json (npm v5+). - * - * Reference: cdxgen's parseLockFile() in lib/parsers/js.js - * Deviation: Direct JSON parsing instead of executing `npm list --json`. - */ - private async parsePackageLock( - lockfilePath: string, - options: ParseOptions, - ): Promise { - const content = await fs.readFile(lockfilePath, 'utf8') - const lockfile = JSON.parse(content) as PackageLock - - const dependencies = new Map() - - // package-lock.json v2+ uses "packages" field. - if (lockfile.packages) { - for (const [pkgPath, pkgData] of Object.entries(lockfile.packages)) { - // Skip root package (empty string key). - if (pkgPath === '') { - continue - } - - // Skip dev dependencies if not included. - if (!options.includeDevDependencies && pkgData.dev) { - continue - } - - const name = this.extractPackageNameFromPath(pkgPath) - const version = pkgData.version || '0.0.0' - const key = `${name}@${version}` - - dependencies.set(key, { - name, - version, - isDev: !!pkgData.dev, - isOptional: !!pkgData.optional, - dependencies: pkgData.dependencies - ? Object.keys(pkgData.dependencies) - : [], - ...(pkgData.resolved && { resolved: pkgData.resolved }), - ...(pkgData.integrity && { integrity: pkgData.integrity }), - ...(pkgData.license && { license: pkgData.license }), - }) - } - } - // package-lock.json v1 uses "dependencies" field. - else if (lockfile.dependencies) { - this.flattenDependencies(lockfile.dependencies, dependencies, options) - } - - return { dependencies } - } - - /** - * Extract package name from node_modules path. - */ - private extractPackageNameFromPath(pkgPath: string): string { - // Remove "node_modules/" prefix. - const withoutPrefix = pkgPath.replace(/^node_modules\//, '') - - // Handle scoped packages (@scope/name). - if (withoutPrefix.startsWith('@')) { - const parts = withoutPrefix.split('/') - return `${parts[0]}/${parts[1]}` - } - - // Regular packages. - return withoutPrefix.split('/')[0] || withoutPrefix - } - - /** - * Flatten nested dependencies (package-lock.json v1). - */ - private flattenDependencies( - deps: Record, - result: Map, - options: ParseOptions, - _parentKey?: string, - ): void { - for (const [name, data] of Object.entries(deps)) { - // Skip dev dependencies if not included. - if (!options.includeDevDependencies && data.dev) { - continue - } - - const version = data.version || '0.0.0' - const key = `${name}@${version}` - - // Only add if not already present (first occurrence wins). - if (!result.has(key)) { - result.set(key, { - name, - version, - isDev: !!data.dev, - isOptional: !!data.optional, - dependencies: data.requires ? Object.keys(data.requires) : [], - ...(data.resolved && { resolved: data.resolved }), - ...(data.integrity && { integrity: data.integrity }), - }) - } - - // Recursively flatten nested dependencies. - if (data.dependencies) { - this.flattenDependencies(data.dependencies, result, options, key) - } - } - } - - /** - * Parse pnpm-lock.yaml. - * - * Reference: cdxgen's parsePnpmLock() in lib/parsers/js.js - * Implementation: Direct YAML parsing (same strategy as cdxgen). - */ - private async parsePnpmLock( - lockfilePath: string, - options: ParseOptions, - ): Promise { - const content = await fs.readFile(lockfilePath, 'utf8') - const lockfile = parseYaml(content) as PnpmLock - - const dependencies = new Map() - - if (lockfile.packages) { - for (const [pkgId, pkgData] of Object.entries(lockfile.packages)) { - // Parse package ID (e.g., "/axios/0.21.0" or "/@babel/core/7.12.0"). - const { name, version } = this.parsePnpmPackageId(pkgId) - - // Skip dev dependencies if not included. - if (!options.includeDevDependencies && pkgData.dev) { - continue - } - - const key = `${name}@${version}` - - dependencies.set(key, { - name, - version, - isDev: !!pkgData.dev, - isOptional: !!pkgData.optional, - dependencies: pkgData.dependencies - ? Object.keys(pkgData.dependencies) - : [], - ...(pkgData.resolution?.tarball && { - resolved: pkgData.resolution.tarball, - }), - ...(pkgData.resolution?.integrity && { - integrity: pkgData.resolution.integrity, - }), - }) - } - } - - return { dependencies } - } - - /** - * Parse pnpm package ID into name and version. - */ - private parsePnpmPackageId(pkgId: string): { - name: string - version: string - } { - // Remove leading slash. - const withoutSlash = pkgId.startsWith('/') ? pkgId.slice(1) : pkgId - - // Handle scoped packages (e.g., "@babel/core/7.12.0"). - if (withoutSlash.startsWith('@')) { - const parts = withoutSlash.split('/') - const name = `${parts[0]}/${parts[1]}` - const version = parts[2] || '0.0.0' - return { name, version } - } - - // Regular packages (e.g., "axios/0.21.0"). - const parts = withoutSlash.split('/') - const name = parts[0] || withoutSlash - const version = parts[1] || '0.0.0' - return { name, version } - } - - /** - * Parse yarn.lock. - * - * Reference: cdxgen's parseYarnLock() in lib/parsers/js.js - * Implementation: Uses @yarnpkg/parsers (same as cdxgen). - */ - private async parseYarnLock( - lockfilePath: string, - options: ParseOptions, - ): Promise { - const content = await fs.readFile(lockfilePath, 'utf8') - const lockfile = parseSyml(content) as YarnLock - - const dependencies = new Map() - - for (const [pkgDescriptor, pkgData] of Object.entries(lockfile)) { - // Parse package descriptor (e.g., "axios@^0.21.0"). - const { name } = this.parseYarnDescriptor(pkgDescriptor) - const version = pkgData.version || '0.0.0' - const key = `${name}@${version}` - - // Yarn doesn't have dev flag in lockfile, all are production. - if (!options.includeDevDependencies && options.lockfileOnly) { - continue - } - - dependencies.set(key, { - name, - version, - isDev: false, - isOptional: !!pkgData.optional, - dependencies: pkgData.dependencies - ? Object.keys(pkgData.dependencies) - : [], - ...(pkgData.resolved && { resolved: pkgData.resolved }), - ...(pkgData.integrity && { integrity: pkgData.integrity }), - }) - } - - return { dependencies } - } - - /** - * Parse yarn package descriptor into name. - */ - private parseYarnDescriptor(descriptor: string): { name: string } { - // Descriptor format: "package-name@version-range" or "@scope/name@version-range". - const atIndex = descriptor.lastIndexOf('@') - const name = descriptor.slice(0, atIndex) - return { name } - } - - /** - * Build CycloneDX components from dependencies. - * - * Reference: cdxgen's createComponents() in lib/parsers/js.js - * Deviation: Enhanced PURL generation with qualifiers (integrity, resolved). - */ - private buildComponents( - dependencies: Map, - options: ParseOptions, - ): Component[] { - const components: Component[] = [] - - for (const [_key, dep] of dependencies.entries()) { - // Skip dev dependencies if not included. - if (!options.includeDevDependencies && dep.isDev) { - continue - } - - // Skip optional dependencies based on scope. - if (dep.isOptional && !options.includeDevDependencies) { - continue - } - - const component: Component = { - type: 'library', - 'bom-ref': `pkg:npm/${dep.name}@${dep.version}`, - name: dep.name, - version: dep.version, - purl: `pkg:npm/${dep.name}@${dep.version}`, - scope: dep.isDev ? 'optional' : 'required', - } - - // Add license if available. - if (dep.license) { - component.licenses = [{ license: { id: dep.license } }] - } - - // Add external reference if resolved URL available. - if (dep.resolved) { - component.externalReferences = [ - { - url: dep.resolved, - type: 'distribution', - }, - ] - } - - // Add integrity hash if available. - if (dep.integrity) { - component.hashes = [this.parseIntegrity(dep.integrity)] - } - - components.push(component) - } - - return components - } - - /** - * Parse integrity string into hash object. - */ - private parseIntegrity(integrity: string): { - alg: 'SHA-256' | 'SHA-384' | 'SHA-512' - content: string - } { - // Integrity format: "sha512-base64hash" or "sha384-base64hash". - const [alg, content] = integrity.split('-') - - const algMap = { - __proto__: null, - sha256: 'SHA-256', - sha384: 'SHA-384', - sha512: 'SHA-512', - } as const - - return { - alg: algMap[alg as keyof typeof algMap] ?? 'SHA-512', - content: content ?? '', - } - } - - /** - * Build dependency graph. - * - * Reference: cdxgen's createDependencyGraph() in lib/parsers/js.js - * Implementation: Similar graph construction strategy. - */ - private buildDependencyGraph( - packageJson: PackageJson, - dependencies: Map, - ): Dependency[] { - const graph: Dependency[] = [] - - // Root component depends on direct dependencies. - const rootRef = `pkg:npm/${packageJson.name}@${packageJson.version}` - const directDeps: string[] = [] - - if (packageJson.dependencies) { - for (const name of Object.keys(packageJson.dependencies)) { - // Find matching version in lockfile. - const dep = this.findDependency(name, dependencies) - if (dep) { - directDeps.push(`pkg:npm/${dep.name}@${dep.version}`) - } - } - } - - graph.push({ - ref: rootRef, - dependsOn: directDeps, - }) - - // Add transitive dependencies. - for (const [_key, dep] of dependencies.entries()) { - const ref = `pkg:npm/${dep.name}@${dep.version}` - const dependsOn: string[] = [] - - for (const depName of dep.dependencies) { - const transitiveDep = this.findDependency(depName, dependencies) - if (transitiveDep) { - dependsOn.push( - `pkg:npm/${transitiveDep.name}@${transitiveDep.version}`, - ) - } - } - - graph.push({ - ref, - ...(dependsOn.length > 0 && { dependsOn }), - }) - } - - return graph - } - - /** - * Find dependency in map by name. - */ - private findDependency( - name: string, - dependencies: Map, - ): DependencyInfo | undefined { - for (const [_key, dep] of dependencies.entries()) { - if (dep.name === name) { - return dep - } - } - return undefined - } -} - -/** - * package.json interface. - */ -interface PackageJson { - name?: string - version?: string - description?: string - homepage?: string - repository?: string | { type: string; url: string } - license?: string - author?: string | { name?: string; email?: string } - contributors?: Array - keywords?: string[] - dependencies?: Record - devDependencies?: Record - optionalDependencies?: Record -} - -/** - * package-lock.json interface. - */ -interface PackageLock { - lockfileVersion?: number - packages?: Record - dependencies?: Record -} - -interface PackageLockPackage { - version?: string - resolved?: string - integrity?: string - dev?: boolean - optional?: boolean - dependencies?: Record - license?: string -} - -interface PackageLockDependency { - version?: string - resolved?: string - integrity?: string - dev?: boolean - optional?: boolean - requires?: Record - dependencies?: Record -} - -/** - * pnpm-lock.yaml interface. - */ -interface PnpmLock { - lockfileVersion?: string - packages?: Record -} - -interface PnpmPackage { - resolution?: { - integrity?: string - tarball?: string - } - dependencies?: Record - dev?: boolean - optional?: boolean -} - -/** - * yarn.lock interface. - */ -interface YarnLock { - [descriptor: string]: YarnPackage -} - -interface YarnPackage { - version?: string - resolved?: string - integrity?: string - dependencies?: Record - optional?: boolean -} - -/** - * Internal dependency info. - */ -interface DependencyInfo { - name: string - version: string - isDev: boolean - isOptional: boolean - dependencies: string[] - resolved?: string - integrity?: string - license?: string -} - -/** - * Parsed lockfile data. - */ -interface LockfileData { - dependencies: Map -} diff --git a/packages/sbom-generator/src/parsers/npm/index.test.mts b/packages/sbom-generator/src/parsers/npm/index.test.mts deleted file mode 100644 index e52df7f46..000000000 --- a/packages/sbom-generator/src/parsers/npm/index.test.mts +++ /dev/null @@ -1,69 +0,0 @@ -/** - * npm Parser Tests - */ - -import { describe, expect, it } from 'vitest' -import { NpmParser } from './index.mts' - -describe('NpmParser', () => { - const parser = new NpmParser() - - it('should have correct ecosystem', () => { - expect(parser.ecosystem).toBe('npm') - }) - - it('should detect npm projects with package.json', async () => { - // Test with socket-cli project (current directory has package.json). - const projectPath = process.cwd() - const canDetect = await parser.detect(projectPath) - expect(canDetect).toBe(true) - }) - - it('should not detect projects without package.json', async () => { - // Test with non-existent directory. - const canDetect = await parser.detect('/non-existent-path') - expect(canDetect).toBe(false) - }) - - it('should parse npm project and generate components', async () => { - // Test with socket-cli project. - const projectPath = process.cwd() - const result = await parser.parse(projectPath, { - includeDevDependencies: false, - }) - - expect(result.ecosystem).toBe('npm') - expect(result.metadata.name).toBeDefined() - expect(result.metadata.version).toBeDefined() - expect(result.components.length).toBeGreaterThan(0) - expect(result.dependencies.length).toBeGreaterThan(0) - - // Validate component structure. - const firstComponent = result.components[0] - expect(firstComponent).toHaveProperty('type') - expect(firstComponent).toHaveProperty('name') - expect(firstComponent).toHaveProperty('version') - expect(firstComponent).toHaveProperty('purl') - expect(firstComponent['bom-ref']).toMatch(/^pkg:npm\//) - - // Validate dependency structure. - const firstDep = result.dependencies[0] - expect(firstDep).toHaveProperty('ref') - expect(firstDep.ref).toMatch(/^pkg:npm\//) - }) - - it('should handle projects with dev dependencies', async () => { - const projectPath = process.cwd() - const withDev = await parser.parse(projectPath, { - includeDevDependencies: true, - }) - const withoutDev = await parser.parse(projectPath, { - includeDevDependencies: false, - }) - - // With dev dependencies should have more components. - expect(withDev.components.length).toBeGreaterThanOrEqual( - withoutDev.components.length, - ) - }) -}) diff --git a/packages/sbom-generator/src/parsers/nuget/index.mts b/packages/sbom-generator/src/parsers/nuget/index.mts deleted file mode 100644 index dca128817..000000000 --- a/packages/sbom-generator/src/parsers/nuget/index.mts +++ /dev/null @@ -1,101 +0,0 @@ -/** - * nuget Ecosystem Parser - * - * Parses .NET projects (packages.config, .csproj) into CycloneDX SBOM format. - * - * Lock-Step Reference: cdxgen's lib/parsers/dotnet.js - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/dotnet.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { XMLParser } from 'fast-xml-parser' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -export class NugetParser implements Parser { - readonly ecosystem: Ecosystem = 'nuget' - private xmlParser = new XMLParser({ ignoreAttributes: false }) - - async detect(projectPath: string): Promise { - try { - const csprojFiles = await fs.readdir(projectPath) - return csprojFiles.some(f => f.endsWith('.csproj')) - } catch { - return false - } - } - - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - const metadata: ProjectMetadata = { - name: path.basename(projectPath), - version: '0.0.0', - } - - const dependencies = new Map() - - // Parse .csproj files for PackageReference. - const files = await fs.readdir(projectPath) - for (const file of files) { - if (!file.endsWith('.csproj')) continue - - const content = await fs.readFile(path.join(projectPath, file), 'utf8') - const parsed = this.xmlParser.parse(content) - - // Extract PackageReference elements. - const project = parsed.Project - if (project?.ItemGroup) { - const itemGroups = Array.isArray(project.ItemGroup) - ? project.ItemGroup - : [project.ItemGroup] - for (const group of itemGroups) { - const refs = group.PackageReference - if (refs) { - const refArray = Array.isArray(refs) ? refs : [refs] - for (const ref of refArray) { - const name = ref['@_Include'] - const version = ref['@_Version'] - if (name && version) { - dependencies.set(name, { name, version }) - } - } - } - } - } - } - - const components: Component[] = Array.from(dependencies.values()).map( - dep => ({ - type: 'library', - 'bom-ref': `pkg:nuget/${dep.name}@${dep.version}`, - name: dep.name, - version: dep.version, - purl: `pkg:nuget/${dep.name}@${dep.version}`, - scope: 'required', - }), - ) - - const graph: Dependency[] = [ - { - ref: `pkg:nuget/${metadata.name}@${metadata.version}`, - dependsOn: components.map(c => c.purl || ''), - }, - ] - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies: graph, - } - } -} diff --git a/packages/sbom-generator/src/parsers/openvsx/index.mts b/packages/sbom-generator/src/parsers/openvsx/index.mts deleted file mode 100644 index 0614c0f86..000000000 --- a/packages/sbom-generator/src/parsers/openvsx/index.mts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * openvsx Ecosystem Parser - * - * Parses VS Code extensions from Open VSX Registry into CycloneDX SBOM format. - * Socket-specific parser (no cdxgen equivalent). - * - * Note: This is a placeholder implementation. Full implementation requires - * Open VSX API integration to fetch extension metadata. - */ - -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -export class OpenvsxParser implements Parser { - readonly ecosystem: Ecosystem = 'openvsx' - - async detect(projectPath: string): Promise { - // Detection would check for .vscode/extensions.json or package.json with vscode extension metadata. - return false - } - - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - // Placeholder implementation. - // Full implementation would: - // 1. Parse .vscode/extensions.json for extension IDs - // 2. Query Open VSX API for metadata - // 3. Generate components with pkg:vscode/publisher.extension@version PURLs - - const metadata: ProjectMetadata = { - name: 'vscode-extensions', - version: '0.0.0', - } - - return { - ecosystem: this.ecosystem, - metadata, - components: [], - dependencies: [], - } - } -} diff --git a/packages/sbom-generator/src/parsers/pypi/index.mts b/packages/sbom-generator/src/parsers/pypi/index.mts deleted file mode 100644 index ceb50900d..000000000 --- a/packages/sbom-generator/src/parsers/pypi/index.mts +++ /dev/null @@ -1,638 +0,0 @@ -/** - * pypi Ecosystem Parser - * - * Parses Python projects (pyproject.toml, setup.py + lockfiles) into CycloneDX SBOM format. - * Supports: requirements.txt, poetry.lock, Pipfile.lock - * - * Lock-Step Reference: cdxgen's lib/parsers/python.js - * - Baseline: cdxgen v11.11.0 - * - Lock-Step Score: Target 90-100 - * - Deviations: Pure TypeScript TOML/JSON parsing (no pip binary) - * - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/python.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import { parse as parseToml } from '@iarna/toml' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -/** - * Python package dependency information. - */ -interface PypiDependencyInfo { - name: string - version: string - extras?: string[] - markers?: string - isDev?: boolean - dependencies: string[] -} - -/** - * Poetry lockfile format. - */ -interface PoetryLock { - package?: Array<{ - name: string - version: string - description?: string - category?: string - optional?: boolean - dependencies?: Record< - string, - string | { version: string; markers?: string } - > - }> - metadata?: { - 'python-versions'?: string - 'content-hash'?: string - } -} - -/** - * Pipfile.lock format. - */ -interface PipfileLock { - _meta?: { - hash?: { sha256?: string } - 'pipfile-spec'?: number - requires?: { python_version?: string } - } - default?: Record< - string, - { - version: string - hashes?: string[] - markers?: string - extras?: string[] - } - > - develop?: Record< - string, - { - version: string - hashes?: string[] - markers?: string - extras?: string[] - } - > -} - -/** - * pyproject.toml format. - */ -interface PyprojectToml { - project?: { - name?: string - version?: string - description?: string - readme?: string | { file?: string; text?: string } - requires?: string[] - license?: string | { text?: string; file?: string } - authors?: Array<{ name?: string; email?: string }> - maintainers?: Array<{ name?: string; email?: string }> - keywords?: string[] - classifiers?: string[] - urls?: Record - dependencies?: string[] - 'optional-dependencies'?: Record - } - tool?: { - poetry?: { - name?: string - version?: string - description?: string - authors?: string[] - license?: string - readme?: string - homepage?: string - repository?: string - documentation?: string - keywords?: string[] - dependencies?: Record< - string, - string | { version: string; extras?: string[] } - > - 'dev-dependencies'?: Record< - string, - string | { version: string; extras?: string[] } - > - group?: Record }> - } - } -} - -/** - * requirements.txt dependency line. - */ -interface RequirementLine { - name: string - version?: string - specifier?: string - extras?: string[] - markers?: string -} - -/** - * Lockfile data aggregated from various formats. - */ -interface LockfileData { - dependencies: Map - format: 'poetry' | 'pipfile' | 'requirements' | 'none' -} - -/** - * pypi parser implementation. - * - * Reference: cdxgen's python.js parser - * Target: 90-100 lock-step score - */ -export class PypiParser implements Parser { - readonly ecosystem: Ecosystem = 'pypi' - - /** - * Detect if this is a Python project. - * - * cdxgen reference: detectPythonProject() checks for setup.py, pyproject.toml, requirements.txt. - * Our implementation: Same detection strategy as cdxgen. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L50-L70 - */ - async detect(projectPath: string): Promise { - try { - // Check for pyproject.toml (PEP 621 or Poetry format). - const pyprojectPath = path.join(projectPath, 'pyproject.toml') - await fs.access(pyprojectPath) - return true - } catch { - // Check for setup.py (legacy format). - try { - const setupPyPath = path.join(projectPath, 'setup.py') - await fs.access(setupPyPath) - return true - } catch { - // Check for requirements.txt (common in pip-based projects). - try { - const requirementsPath = path.join(projectPath, 'requirements.txt') - await fs.access(requirementsPath) - return true - } catch { - return false - } - } - } - } - - /** - * Parse Python project and generate SBOM components. - * - * Reference: cdxgen's parsePythonProject() in lib/parsers/python.js - */ - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - // Read project metadata. - const metadata = await this.extractMetadata(projectPath) - - // Detect and parse lockfile. - const lockfileData = await this.detectAndParseLockfile(projectPath, options) - - // Convert to CycloneDX format. - const components = this.buildComponents(lockfileData.dependencies, options) - const dependencies = this.buildDependencyGraph( - metadata, - lockfileData.dependencies, - ) - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies, - } - } - - /** - * Extract project metadata from pyproject.toml or setup.py. - * - * cdxgen reference: extractPyMetadata() reads pyproject.toml and setup.py. - * Our implementation: Parse TOML directly (no Python execution), supports PEP 621 and Poetry formats. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L100-L150 - */ - private async extractMetadata(projectPath: string): Promise { - // Try pyproject.toml first. - try { - const pyprojectPath = path.join(projectPath, 'pyproject.toml') - const content = await fs.readFile(pyprojectPath, 'utf8') - const pyproject = parseToml(content) as PyprojectToml - - // Try PEP 621 format first (project table). - if (pyproject.project) { - return { - name: pyproject.project.name || 'unknown', - version: pyproject.project.version || '0.0.0', - description: pyproject.project.description, - homepage: pyproject.project.urls?.Homepage, - repository: - pyproject.project.urls?.Repository || - pyproject.project.urls?.Source, - license: - typeof pyproject.project.license === 'string' - ? pyproject.project.license - : pyproject.project.license?.text, - authors: pyproject.project.authors?.map( - a => `${a.name || ''} <${a.email || ''}>`, - ), - keywords: pyproject.project.keywords, - } - } - - // Fall back to Poetry format (tool.poetry table). - if (pyproject.tool?.poetry) { - const poetry = pyproject.tool.poetry - return { - name: poetry.name || 'unknown', - version: poetry.version || '0.0.0', - description: poetry.description, - homepage: poetry.homepage, - repository: poetry.repository, - license: poetry.license, - authors: poetry.authors, - keywords: poetry.keywords, - } - } - } catch { - // pyproject.toml doesn't exist or failed to parse. - } - - // Fall back to setup.py (best effort - we can't execute Python). - try { - const setupPyPath = path.join(projectPath, 'setup.py') - const content = await fs.readFile(setupPyPath, 'utf8') - - // Very basic regex parsing (won't handle all cases). - const nameMatch = content.match(/name\s*=\s*["']([^"']+)["']/) - const versionMatch = content.match(/version\s*=\s*["']([^"']+)["']/) - - return { - name: nameMatch ? nameMatch[1] : 'unknown', - version: versionMatch ? versionMatch[1] : '0.0.0', - } - } catch { - // setup.py doesn't exist or failed to parse. - } - - return { - name: 'unknown', - version: '0.0.0', - } - } - - /** - * Detect and parse lockfile. - * - * Reference: cdxgen's detectPythonLockfile() in lib/parsers/python.js - * Priority: poetry.lock > Pipfile.lock > requirements.txt - */ - private async detectAndParseLockfile( - projectPath: string, - options: ParseOptions, - ): Promise { - // Try poetry.lock first (most complete). - try { - const poetryLockPath = path.join(projectPath, 'poetry.lock') - await fs.access(poetryLockPath) - return await this.parsePoetryLock(poetryLockPath, options) - } catch { - // poetry.lock doesn't exist. - } - - // Try Pipfile.lock second. - try { - const pipfileLockPath = path.join(projectPath, 'Pipfile.lock') - await fs.access(pipfileLockPath) - return await this.parsePipfileLock(pipfileLockPath, options) - } catch { - // Pipfile.lock doesn't exist. - } - - // Fall back to requirements.txt (least complete). - try { - const requirementsPath = path.join(projectPath, 'requirements.txt') - await fs.access(requirementsPath) - return await this.parseRequirementsTxt(requirementsPath, options) - } catch { - // No lockfile found. - } - - return { - dependencies: new Map(), - format: 'none', - } - } - - /** - * Parse poetry.lock file. - * - * cdxgen reference: parsePoetryLock() parses TOML structure for dependencies and metadata. - * Our implementation: Direct TOML parsing using @iarna/toml (same strategy as cdxgen). - * Extracts package name, version, category (dev vs main), and dependencies. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L200-L250 - */ - private async parsePoetryLock( - lockfilePath: string, - options: ParseOptions, - ): Promise { - const content = await fs.readFile(lockfilePath, 'utf8') - const lockfile = parseToml(content) as PoetryLock - - const dependencies = new Map() - - if (!lockfile.package) { - return { dependencies, format: 'poetry' } - } - - for (const pkg of lockfile.package) { - const isDev = pkg.category === 'dev' || pkg.optional === true - const deps: string[] = [] - - if (pkg.dependencies) { - for (const [depName, depSpec] of Object.entries(pkg.dependencies)) { - deps.push(depName) - } - } - - dependencies.set(pkg.name, { - name: pkg.name, - version: pkg.version, - isDev, - dependencies: deps, - }) - } - - return { dependencies, format: 'poetry' } - } - - /** - * Parse Pipfile.lock file. - * - * cdxgen reference: parsePipfileLock() parses JSON structure for default and develop dependencies. - * Our implementation: Direct JSON parsing (same strategy as cdxgen). - * Handles version specifiers (==, >=, etc.) and dependency metadata (hashes, markers, extras). - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L280-L320 - */ - private async parsePipfileLock( - lockfilePath: string, - options: ParseOptions, - ): Promise { - const content = await fs.readFile(lockfilePath, 'utf8') - const lockfile = JSON.parse(content) as PipfileLock - - const dependencies = new Map() - - // Parse default dependencies. - if (lockfile.default) { - for (const [name, spec] of Object.entries(lockfile.default)) { - const version = spec.version.replace(/^==/, '') - dependencies.set(name, { - name, - version, - extras: spec.extras, - markers: spec.markers, - isDev: false, - dependencies: [], - }) - } - } - - // Parse dev dependencies. - if (lockfile.develop) { - for (const [name, spec] of Object.entries(lockfile.develop)) { - const version = spec.version.replace(/^==/, '') - dependencies.set(name, { - name, - version, - extras: spec.extras, - markers: spec.markers, - isDev: true, - dependencies: [], - }) - } - } - - return { dependencies, format: 'pipfile' } - } - - /** - * Parse requirements.txt file. - * - * cdxgen reference: parseRequirementsTxt() parses line-by-line, handling comments, extras, and markers. - * Our implementation: Line-by-line text parsing (same strategy as cdxgen). - * Skips comments (#), blank lines, and URL-based requirements (git://, http://). - * Extracts package name, version specifier, extras ([extra1,extra2]), and markers (;condition). - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L350-L400 - */ - private async parseRequirementsTxt( - lockfilePath: string, - options: ParseOptions, - ): Promise { - const content = await fs.readFile(lockfilePath, 'utf8') - const lines = content.split('\n') - - const dependencies = new Map() - - for (const line of lines) { - const trimmed = line.trim() - - // Skip comments and empty lines. - if (!trimmed || trimmed.startsWith('#')) { - continue - } - - // Skip URL-based requirements (git, http). - if (trimmed.includes('://')) { - continue - } - - // Parse requirement line. - const req = this.parseRequirementLine(trimmed) - - if (req.name) { - dependencies.set(req.name, { - name: req.name, - version: req.version || '0.0.0', - extras: req.extras, - markers: req.markers, - isDev: false, - dependencies: [], - }) - } - } - - return { dependencies, format: 'requirements' } - } - - /** - * Parse a single requirement line. - * - * cdxgen reference: parseRequirementLine() extracts name, version, extras, and markers using regex. - * Our implementation: Similar regex-based parsing with TypeScript types. - * - * PEP 508 format examples: - * - requests==2.28.0 (pinned version) - * - numpy>=1.20.0,<2.0.0 (version range) - * - django[email]>=3.0 (with extras) - * - pytest; python_version >= "3.7" (with markers) - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L420-L460 - * @see https://peps.python.org/pep-0508/ (PEP 508 specification) - */ - private parseRequirementLine(line: string): RequirementLine { - // Remove inline comments. - const commentIndex = line.indexOf('#') - if (commentIndex !== -1) { - line = line.slice(0, commentIndex).trim() - } - - // Extract markers (after semicolon). - let markers: string | undefined - const markerIndex = line.indexOf(';') - if (markerIndex !== -1) { - markers = line.slice(markerIndex + 1).trim() - line = line.slice(0, markerIndex).trim() - } - - // Extract extras (in square brackets). - let extras: string[] | undefined - const extrasMatch = line.match(/\[([^\]]+)\]/) - if (extrasMatch) { - extras = extrasMatch[1].split(',').map(e => e.trim()) - line = line.replace(/\[([^\]]+)\]/, '') - } - - // Extract name and version specifier. - const match = line.match(/^([a-zA-Z0-9_-]+)(.*)?$/) - if (!match) { - return { name: '', version: undefined, extras, markers } - } - - const name = match[1] - const specifier = match[2]?.trim() - - // Extract version from specifier (e.g., ==2.28.0, >=1.20.0). - let version: string | undefined - if (specifier) { - const versionMatch = specifier.match(/==\s*([^\s,;]+)/) - if (versionMatch) { - version = versionMatch[1] - } - } - - return { name, version, specifier, extras, markers } - } - - /** - * Build CycloneDX components from dependencies. - * - * cdxgen reference: createPythonComponents() converts parsed dependencies to CycloneDX components. - * Our implementation: Same conversion logic with CycloneDX v1.5 types. - * Generates PURLs in format: pkg:pypi/@ - * Maps dev dependencies to 'optional' scope, production to 'required' scope. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L480-L520 - */ - private buildComponents( - dependencies: Map, - options: ParseOptions, - ): Component[] { - const components: Component[] = [] - - for (const [key, dep] of dependencies.entries()) { - // Skip dev dependencies if excluded. - if (dep.isDev && options.excludeDev) { - continue - } - - const component: Component = { - type: 'library', - 'bom-ref': `pkg:pypi/${dep.name}@${dep.version}`, - name: dep.name, - version: dep.version, - purl: `pkg:pypi/${dep.name}@${dep.version}`, - scope: dep.isDev ? 'optional' : 'required', - } - - components.push(component) - } - - return components - } - - /** - * Build dependency graph. - * - * cdxgen reference: createPythonDependencyGraph() constructs CycloneDX dependency relationships. - * Our implementation: Same graph construction with root → direct → transitive relationships. - * Root component depends on all top-level packages. - * Each package's transitive dependencies are mapped from lockfile data. - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/python.js#L540-L580 - */ - private buildDependencyGraph( - metadata: ProjectMetadata, - dependencies: Map, - ): Dependency[] { - const graph: Dependency[] = [] - - // Root component depends on all direct dependencies. - const rootRef = `pkg:pypi/${metadata.name}@${metadata.version}` - const directDeps: string[] = [] - - for (const [key, dep] of dependencies.entries()) { - directDeps.push(`pkg:pypi/${dep.name}@${dep.version}`) - } - - graph.push({ - ref: rootRef, - dependsOn: directDeps, - }) - - // Add transitive dependencies (if available from lockfile). - for (const [key, dep] of dependencies.entries()) { - const ref = `pkg:pypi/${dep.name}@${dep.version}` - const dependsOn: string[] = [] - - for (const depName of dep.dependencies) { - // Find matching dependency in map. - const transitiveDep = dependencies.get(depName) - if (transitiveDep) { - dependsOn.push( - `pkg:pypi/${transitiveDep.name}@${transitiveDep.version}`, - ) - } - } - - if (dependsOn.length > 0) { - graph.push({ - ref, - dependsOn, - }) - } - } - - return graph - } -} diff --git a/packages/sbom-generator/src/parsers/pypi/index.test.mts b/packages/sbom-generator/src/parsers/pypi/index.test.mts deleted file mode 100644 index 881c0ac05..000000000 --- a/packages/sbom-generator/src/parsers/pypi/index.test.mts +++ /dev/null @@ -1,331 +0,0 @@ -/** - * Tests for pypi parser. - * - * Lock-Step Reference: cdxgen's tests for python.js parser - * Target: 90-100 lock-step quality - */ - -import { readFile, mkdir, writeFile } from 'node:fs/promises' -import path from 'node:path' -import { afterEach, beforeEach, describe, expect, it } from 'vitest' -import type { Component } from '../../types/sbom.mts' -import { PypiParser } from './index.mts' - -describe('PypiParser', () => { - const parser = new PypiParser() - const fixturesPath = path.join(__dirname, '../../../test/fixtures/python') - - describe('ecosystem', () => { - it('should have correct ecosystem', () => { - expect(parser.ecosystem).toBe('pypi') - }) - }) - - describe('detect', () => { - it('should detect Python projects with pyproject.toml', async () => { - const detected = await parser.detect(fixturesPath) - expect(detected).toBe(true) - }) - - it('should not detect non-Python projects', async () => { - const detected = await parser.detect('/tmp/non-existent-project') - expect(detected).toBe(false) - }) - }) - - describe('poetry.lock parsing', () => { - it('should parse poetry.lock format', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.ecosystem).toBe('pypi') - expect(result.components.length).toBeGreaterThan(0) - - // Should find requests package. - const requests = result.components.find(c => c.name === 'requests') - expect(requests).toBeDefined() - expect(requests?.version).toBe('2.31.0') - expect(requests?.purl).toBe('pkg:pypi/requests@2.31.0') - expect(requests?.type).toBe('library') - expect(requests?.scope).toBe('required') - }) - - it('should parse Poetry metadata from pyproject.toml', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.metadata.name).toBe('test-python-app') - expect(result.metadata.version).toBe('1.0.0') - expect(result.metadata.description).toBe('A test Python application') - expect(result.metadata.homepage).toBe('https://example.com') - expect(result.metadata.repository).toBe( - 'https://github.com/example/test-python-app', - ) - expect(result.metadata.license).toBe('MIT') - }) - - it('should mark dev dependencies correctly', async () => { - const result = await parser.parse(fixturesPath) - - const pytest = result.components.find(c => c.name === 'pytest') - expect(pytest).toBeDefined() - expect(pytest?.scope).toBe('optional') - }) - - it('should exclude dev dependencies when excludeDev is true', async () => { - const result = await parser.parse(fixturesPath, { excludeDev: true }) - - const pytest = result.components.find(c => c.name === 'pytest') - expect(pytest).toBeUndefined() - }) - - it('should build dependency graph from poetry.lock', async () => { - const result = await parser.parse(fixturesPath) - - expect(result.dependencies.length).toBeGreaterThan(0) - - // Root component should exist. - const rootDep = result.dependencies.find(d => - d.ref.includes('test-python-app@1.0.0'), - ) - expect(rootDep).toBeDefined() - expect(rootDep?.dependsOn.length).toBeGreaterThan(0) - - // requests should have dependencies. - const requestsDep = result.dependencies.find(d => - d.ref.includes('requests@2.31.0'), - ) - expect(requestsDep).toBeDefined() - expect(requestsDep?.dependsOn).toContain('pkg:pypi/certifi@2023.7.22') - expect(requestsDep?.dependsOn).toContain('pkg:pypi/urllib3@2.0.4') - }) - }) - - describe('Pipfile.lock parsing', () => { - let tempDir: string - - beforeEach(async () => { - // Create temporary directory for Pipfile.lock test. - tempDir = path.join('/tmp', `pypi-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - // Copy Pipfile.lock to temp directory. - const pipfileLock = await readFile( - path.join(fixturesPath, 'Pipfile.lock'), - 'utf8', - ) - await writeFile(path.join(tempDir, 'Pipfile.lock'), pipfileLock) - - // Create minimal pyproject.toml. - await writeFile( - path.join(tempDir, 'pyproject.toml'), - '[project]\nname = "pipfile-test"\nversion = "1.0.0"', - ) - }) - - afterEach(async () => { - // Clean up temporary directory. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should parse Pipfile.lock format', async () => { - const result = await parser.parse(tempDir) - - expect(result.components.length).toBeGreaterThan(0) - - const requests = result.components.find(c => c.name === 'requests') - expect(requests).toBeDefined() - expect(requests?.version).toBe('2.31.0') - }) - - it('should parse dev dependencies from Pipfile.lock', async () => { - const result = await parser.parse(tempDir) - - const pytest = result.components.find(c => c.name === 'pytest') - expect(pytest).toBeDefined() - expect(pytest?.scope).toBe('optional') - }) - }) - - describe('requirements.txt parsing', () => { - let tempDir: string - - beforeEach(async () => { - // Create temporary directory for requirements.txt test. - tempDir = path.join('/tmp', `pypi-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - // Copy requirements.txt to temp directory. - const requirementsTxt = await readFile( - path.join(fixturesPath, 'requirements.txt'), - 'utf8', - ) - await writeFile(path.join(tempDir, 'requirements.txt'), requirementsTxt) - - // Create minimal pyproject.toml. - await writeFile( - path.join(tempDir, 'pyproject.toml'), - '[project]\nname = "requirements-test"\nversion = "1.0.0"', - ) - }) - - afterEach(async () => { - // Clean up temporary directory. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should parse requirements.txt format', async () => { - const result = await parser.parse(tempDir) - - expect(result.components.length).toBeGreaterThan(0) - - const requests = result.components.find(c => c.name === 'requests') - expect(requests).toBeDefined() - expect(requests?.version).toBe('2.31.0') - }) - - it('should handle version ranges in requirements.txt', async () => { - const result = await parser.parse(tempDir) - - // numpy uses >=1.20.0,<2.0.0 - should be parsed. - const numpy = result.components.find(c => c.name === 'numpy') - expect(numpy).toBeDefined() - // Version not pinned in requirements.txt, so defaults to 0.0.0. - expect(numpy?.version).toBe('0.0.0') - }) - - it('should handle extras in requirements.txt', async () => { - const result = await parser.parse(tempDir) - - // flask[async] should be parsed. - const flask = result.components.find(c => c.name === 'flask') - expect(flask).toBeDefined() - expect(flask?.version).toBe('2.3.0') - }) - - it('should skip comments and blank lines', async () => { - const result = await parser.parse(tempDir) - - // Should have 9 packages (certifi, charset-normalizer, idna, requests, urllib3, flask, numpy, pandas, pytest, click). - expect(result.components.length).toBeGreaterThanOrEqual(9) - }) - - it('should handle markers in requirements.txt', async () => { - const result = await parser.parse(tempDir) - - // pytest with marker should be parsed. - const pytest = result.components.find(c => c.name === 'pytest') - expect(pytest).toBeDefined() - }) - }) - - describe('PURL generation', () => { - it('should generate valid PURLs for Python packages', async () => { - const result = await parser.parse(fixturesPath) - - const requests = result.components.find(c => c.name === 'requests') - expect(requests?.purl).toBe('pkg:pypi/requests@2.31.0') - expect(requests?.['bom-ref']).toBe('pkg:pypi/requests@2.31.0') - }) - }) - - describe('PEP 621 format', () => { - let tempDir: string - - beforeEach(async () => { - // Create temporary directory for PEP 621 test. - tempDir = path.join('/tmp', `pypi-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - // Copy PEP 621 pyproject.toml. - const pyprojectToml = await readFile( - path.join(fixturesPath, 'pyproject-pep621.toml'), - 'utf8', - ) - await writeFile(path.join(tempDir, 'pyproject.toml'), pyprojectToml) - - // Copy poetry.lock for dependencies. - const poetryLock = await readFile( - path.join(fixturesPath, 'poetry.lock'), - 'utf8', - ) - await writeFile(path.join(tempDir, 'poetry.lock'), poetryLock) - }) - - afterEach(async () => { - // Clean up temporary directory. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should parse PEP 621 pyproject.toml metadata', async () => { - const result = await parser.parse(tempDir) - - expect(result.metadata.name).toBe('test-pep621-app') - expect(result.metadata.version).toBe('2.0.0') - expect(result.metadata.description).toBe('A test PEP 621 application') - expect(result.metadata.license).toBe('MIT') - }) - }) - - describe('edge cases', () => { - it('should handle empty poetry.lock', async () => { - const tempDir = path.join('/tmp', `pypi-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'pyproject.toml'), - '[project]\nname = "empty"\nversion = "0.0.0"', - ) - await writeFile( - path.join(tempDir, 'poetry.lock'), - '[metadata]\npython-versions = "^3.8"', - ) - - const result = await parser.parse(tempDir) - - expect(result.components.length).toBe(0) - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should handle missing lockfile gracefully', async () => { - const tempDir = path.join('/tmp', `pypi-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'pyproject.toml'), - '[project]\nname = "no-lock"\nversion = "0.0.0"', - ) - - const result = await parser.parse(tempDir) - - expect(result.components.length).toBe(0) - expect(result.ecosystem).toBe('pypi') - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - - it('should handle malformed requirement lines', async () => { - const tempDir = path.join('/tmp', `pypi-test-${Date.now()}`) - await mkdir(tempDir, { recursive: true }) - - await writeFile( - path.join(tempDir, 'pyproject.toml'), - '[project]\nname = "malformed"\nversion = "0.0.0"', - ) - await writeFile( - path.join(tempDir, 'requirements.txt'), - '# Valid\nrequests==2.31.0\n\n# Invalid lines\ngit+https://github.com/user/repo.git\nhttp://example.com/package.tar.gz\n', - ) - - const result = await parser.parse(tempDir) - - // Should only find requests, skip URL-based requirements. - expect(result.components.length).toBe(1) - expect(result.components[0].name).toBe('requests') - - // Clean up. - await import('trash').then(({ trash }) => trash([tempDir])) - }) - }) -}) diff --git a/packages/sbom-generator/src/parsers/rubygems/index.mts b/packages/sbom-generator/src/parsers/rubygems/index.mts deleted file mode 100644 index d581a96d2..000000000 --- a/packages/sbom-generator/src/parsers/rubygems/index.mts +++ /dev/null @@ -1,246 +0,0 @@ -/** - * rubygems Ecosystem Parser - * - * Parses Ruby projects (Gemfile + Gemfile.lock) into CycloneDX SBOM format. - * Supports: Gemfile.lock (lockfile) - * - * Lock-Step Reference: cdxgen's lib/parsers/ruby.js - * - Baseline: cdxgen v11.11.0 - * - Lock-Step Score: Target 90-100 - * - Deviations: Pure text parsing (no bundler binary) - * - * @see https://github.com/CycloneDX/cdxgen/blob/master/lib/parsers/ruby.js - */ - -import { promises as fs } from 'node:fs' -import path from 'node:path' -import type { Component, Dependency } from '../../types/sbom.mts' -import type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from '../../types/parser.mts' - -/** - * Ruby gem dependency information. - */ -interface GemDependencyInfo { - name: string - version: string - dependencies: string[] -} - -/** - * rubygems parser implementation. - * - * cdxgen reference: parseRubyProject() in lib/parsers/ruby.js - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/ruby.js - */ -export class RubygemsParser implements Parser { - readonly ecosystem: Ecosystem = 'rubygems' - - /** - * Detect if this is a Ruby project. - * - * cdxgen reference: detectRubyProject() checks for Gemfile or Gemfile.lock. - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/ruby.js#L30-L50 - */ - async detect(projectPath: string): Promise { - try { - const gemfilePath = path.join(projectPath, 'Gemfile') - await fs.access(gemfilePath) - return true - } catch { - try { - const gemfileLockPath = path.join(projectPath, 'Gemfile.lock') - await fs.access(gemfileLockPath) - return true - } catch { - return false - } - } - } - - /** - * Parse Ruby project and generate SBOM components. - * - * cdxgen reference: parseRubyProject() reads Gemfile.lock. - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/ruby.js#L80-L130 - */ - async parse( - projectPath: string, - options: ParseOptions = {}, - ): Promise { - const metadata = await this.extractMetadata(projectPath) - const dependencies = await this.parseGemfileLock(projectPath) - - const components = this.buildComponents(dependencies, options) - const dependencyGraph = this.buildDependencyGraph(metadata, dependencies) - - return { - ecosystem: this.ecosystem, - metadata, - components, - dependencies: dependencyGraph, - } - } - - /** - * Extract project metadata. - * - * Ruby projects don't have explicit metadata files like package.json. - * Metadata would come from .gemspec files. - */ - private async extractMetadata(projectPath: string): Promise { - const projectName = path.basename(projectPath) - return { - name: projectName, - version: '0.0.0', - } - } - - /** - * Parse Gemfile.lock. - * - * cdxgen reference: parseGemfileLock() parses custom text format. - * Gemfile.lock format: - * GEM - * remote: https://rubygems.org/ - * specs: - * gem-name (1.0.0) - * dependency1 (~> 2.0) - * dependency2 - * - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/ruby.js#L200-L250 - */ - private async parseGemfileLock( - projectPath: string, - ): Promise> { - const dependencies = new Map() - - try { - const lockfilePath = path.join(projectPath, 'Gemfile.lock') - const content = await fs.readFile(lockfilePath, 'utf8') - const lines = content.split('\n') - - let inSpecsSection = false - let currentGem: GemDependencyInfo | null = null - - for (const line of lines) { - // Check for specs section. - if (line.trim() === 'specs:') { - inSpecsSection = true - continue - } - - if (!inSpecsSection) { - continue - } - - // End of specs section. - if (line.match(/^[A-Z]/)) { - inSpecsSection = false - continue - } - - // Parse gem definition: " gem-name (1.0.0)" - const gemMatch = line.match(/^\s{4}(\S+)\s+\(([^)]+)\)/) - if (gemMatch) { - if (currentGem) { - dependencies.set(currentGem.name, currentGem) - } - currentGem = { - name: gemMatch[1], - version: gemMatch[2], - dependencies: [], - } - continue - } - - // Parse dependency: " dependency-name (~> 2.0)" - const depMatch = line.match(/^\s{6}(\S+)/) - if (depMatch && currentGem) { - currentGem.dependencies.push(depMatch[1]) - } - } - - // Add last gem. - if (currentGem) { - dependencies.set(currentGem.name, currentGem) - } - } catch { - // Gemfile.lock doesn't exist. - } - - return dependencies - } - - /** - * Build CycloneDX components. - * - * cdxgen reference: createRubyComponents() - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/ruby.js#L280-L320 - */ - private buildComponents( - dependencies: Map, - options: ParseOptions, - ): Component[] { - const components: Component[] = [] - - for (const [name, dep] of dependencies.entries()) { - components.push({ - type: 'library', - 'bom-ref': `pkg:gem/${dep.name}@${dep.version}`, - name: dep.name, - version: dep.version, - purl: `pkg:gem/${dep.name}@${dep.version}`, - scope: 'required', - }) - } - - return components - } - - /** - * Build dependency graph. - * - * cdxgen reference: createRubyDependencyGraph() - * @see https://github.com/CycloneDX/cdxgen/blob/v11.11.0/lib/parsers/ruby.js#L340-L380 - */ - private buildDependencyGraph( - metadata: ProjectMetadata, - dependencies: Map, - ): Dependency[] { - const graph: Dependency[] = [] - - const rootRef = `pkg:gem/${metadata.name}@${metadata.version}` - const directDeps = Array.from(dependencies.values()).map( - d => `pkg:gem/${d.name}@${d.version}`, - ) - - graph.push({ - ref: rootRef, - dependsOn: directDeps, - }) - - for (const [name, dep] of dependencies.entries()) { - const ref = `pkg:gem/${dep.name}@${dep.version}` - const dependsOn = dep.dependencies - .map(depName => { - const transitive = dependencies.get(depName) - return transitive - ? `pkg:gem/${transitive.name}@${transitive.version}` - : null - }) - .filter((purl): purl is string => purl !== null) - - if (dependsOn.length > 0) { - graph.push({ ref, dependsOn }) - } - } - - return graph - } -} diff --git a/packages/sbom-generator/src/types/index.mts b/packages/sbom-generator/src/types/index.mts deleted file mode 100644 index fa783aa52..000000000 --- a/packages/sbom-generator/src/types/index.mts +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Type Exports - * - * Re-export all SBOM and parser types. - */ - -export type { - Advisory, - AggregateType, - AnalysisJustification, - AnalysisResponse, - AnalysisState, - AttachedText, - Callstack, - Commit, - Component, - ComponentEvidence, - ComponentType, - Composition, - Copyright, - DataClassification, - DataFlow, - Dependency, - Diff, - Encoding, - ExternalReference, - ExternalReferenceType, - Frame, - Hash, - HashAlgorithm, - IdentifiableAction, - Identity, - IdentityField, - IdentityMethod, - Issue, - IssueType, - License, - LicenseChoice, - Metadata, - Occurrence, - OrganizationalContact, - OrganizationalEntity, - Patch, - PatchType, - Pedigree, - Property, - PublicKey, - Sbom, - Scope, - Service, - Signature, - SignatureAlgorithm, - Signer, - Swid, - Tool, - Vulnerability, - VulnerabilityAffect, - VulnerabilityAffectedStatus, - VulnerabilityAffectedVersionRange, - VulnerabilityAnalysis, - VulnerabilityCredit, - VulnerabilityRating, - VulnerabilityRatingMethod, - VulnerabilityReference, - VulnerabilitySeverity, - VulnerabilitySource, -} from './sbom.mts' - -export type { - Ecosystem, - ParseOptions, - ParseResult, - Parser, - ProjectMetadata, -} from './parser.mts' diff --git a/packages/sbom-generator/src/types/parser.mts b/packages/sbom-generator/src/types/parser.mts deleted file mode 100644 index dd47506e3..000000000 --- a/packages/sbom-generator/src/types/parser.mts +++ /dev/null @@ -1,131 +0,0 @@ -/** - * Parser Interface Types - * - * Base types for ecosystem-specific parsers. - */ - -import type { Component, Dependency } from './sbom.mts' - -/** - * Supported ecosystems. - */ -export type Ecosystem = - | 'npm' - | 'pypi' - | 'maven' - | 'gradle' - | 'go' - | 'cargo' - | 'rubygems' - | 'packagist' - | 'nuget' - -/** - * Base parser interface - all ecosystem parsers implement this. - */ -export interface Parser { - /** - * Ecosystem identifier. - */ - readonly ecosystem: Ecosystem - - /** - * Detect if this parser can handle the given directory. - */ - detect(projectPath: string): Promise - - /** - * Parse project and generate SBOM components. - */ - parse(projectPath: string, options?: ParseOptions): Promise -} - -/** - * Options for parsing. - */ -export interface ParseOptions { - /** - * Include development dependencies. - */ - includeDevDependencies?: boolean - - /** - * Include transitive dependencies (deep parsing). - */ - deep?: boolean - - /** - * Only parse lockfile, ignore manifest. - */ - lockfileOnly?: boolean -} - -/** - * Result from parsing. - */ -export interface ParseResult { - /** - * Ecosystem that was parsed. - */ - ecosystem: Ecosystem - - /** - * Project metadata from manifest. - */ - metadata: ProjectMetadata - - /** - * SBOM components (packages). - */ - components: Component[] - - /** - * Dependency relationships. - */ - dependencies: Dependency[] -} - -/** - * Project metadata extracted from manifest. - */ -export interface ProjectMetadata { - /** - * Project name. - */ - name: string - - /** - * Project version. - */ - version: string - - /** - * Project description. - */ - description?: string - - /** - * Project homepage URL. - */ - homepage?: string - - /** - * Repository URL. - */ - repository?: string - - /** - * License identifier (SPDX). - */ - license?: string - - /** - * Authors/maintainers. - */ - authors?: string[] - - /** - * Keywords/tags. - */ - keywords?: string[] -} diff --git a/packages/sbom-generator/src/types/sbom.mts b/packages/sbom-generator/src/types/sbom.mts deleted file mode 100644 index 6bbb1e631..000000000 --- a/packages/sbom-generator/src/types/sbom.mts +++ /dev/null @@ -1,546 +0,0 @@ -/** - * CycloneDX SBOM v1.5 Type Definitions - * - * Complete TypeScript types for CycloneDX Software Bill of Materials (SBOM) format. - * Spec: https://cyclonedx.org/docs/1.5/json/ - */ - -/** - * Root SBOM object. - */ -export interface Sbom { - bomFormat: 'CycloneDX' - specVersion: '1.5' - serialNumber?: string // urn:uuid format - version: number - metadata?: Metadata - components?: Component[] - services?: Service[] - dependencies?: Dependency[] - compositions?: Composition[] - vulnerabilities?: Vulnerability[] - properties?: Property[] -} - -/** - * SBOM metadata. - */ -export interface Metadata { - timestamp?: string // ISO 8601 - tools?: Tool[] - authors?: OrganizationalContact[] - component?: Component // Main project component - manufacture?: OrganizationalEntity - supplier?: OrganizationalEntity - licenses?: LicenseChoice[] - properties?: Property[] -} - -/** - * Tool that generated the SBOM. - */ -export interface Tool { - vendor?: string - name?: string - version?: string - hashes?: Hash[] - externalReferences?: ExternalReference[] -} - -/** - * Software component (package, library, application). - */ -export interface Component { - type: ComponentType - 'bom-ref'?: string // Unique identifier - supplier?: OrganizationalEntity - author?: string - publisher?: string - group?: string // Namespace/organization - name: string - version: string - description?: string - scope?: Scope - hashes?: Hash[] - licenses?: LicenseChoice[] - copyright?: string - purl?: string // Package URL - cpe?: string // Common Platform Enumeration - swid?: Swid - modified?: boolean - pedigree?: Pedigree - externalReferences?: ExternalReference[] - components?: Component[] // Nested components - evidence?: ComponentEvidence - properties?: Property[] - signature?: Signature -} - -export type ComponentType = - | 'application' - | 'framework' - | 'library' - | 'container' - | 'operating-system' - | 'device' - | 'firmware' - | 'file' - | 'machine-learning-model' - | 'data' - -export type Scope = 'required' | 'optional' | 'excluded' - -/** - * Service component. - */ -export interface Service { - 'bom-ref'?: string - provider?: OrganizationalEntity - group?: string - name: string - version?: string - description?: string - endpoints?: string[] - authenticated?: boolean - 'x-trust-boundary'?: boolean - data?: DataClassification[] - licenses?: LicenseChoice[] - externalReferences?: ExternalReference[] - properties?: Property[] - services?: Service[] // Nested services -} - -/** - * Dependency relationship. - */ -export interface Dependency { - ref: string // bom-ref of the component - dependsOn?: string[] // Array of bom-refs -} - -/** - * Composition describes component assemblies. - */ -export interface Composition { - aggregate: AggregateType - assemblies?: string[] // bom-refs - dependencies?: string[] // bom-refs - signature?: Signature -} - -export type AggregateType = - | 'complete' - | 'incomplete' - | 'incomplete_first_party_only' - | 'incomplete_third_party_only' - | 'unknown' - | 'not_specified' - -/** - * Known vulnerability. - */ -export interface Vulnerability { - 'bom-ref'?: string - id?: string // CVE, GHSA, etc. - source?: VulnerabilitySource - references?: VulnerabilityReference[] - ratings?: VulnerabilityRating[] - cwes?: number[] // CWE IDs - description?: string - detail?: string - recommendation?: string - advisories?: Advisory[] - created?: string - published?: string - updated?: string - credits?: VulnerabilityCredit - tools?: Tool[] - analysis?: VulnerabilityAnalysis - affects?: VulnerabilityAffect[] - properties?: Property[] -} - -export interface VulnerabilitySource { - url?: string - name?: string -} - -export interface VulnerabilityReference { - id: string - source?: VulnerabilitySource -} - -export interface VulnerabilityRating { - source?: VulnerabilitySource - score?: number - severity?: VulnerabilitySeverity - method?: VulnerabilityRatingMethod - vector?: string - justification?: string -} - -export type VulnerabilitySeverity = - | 'critical' - | 'high' - | 'medium' - | 'low' - | 'info' - | 'none' - | 'unknown' - -export type VulnerabilityRatingMethod = - | 'CVSSv2' - | 'CVSSv3' - | 'CVSSv31' - | 'OWASP' - | 'other' - -export interface Advisory { - title?: string - url: string -} - -export interface VulnerabilityCredit { - organizations?: OrganizationalEntity[] - individuals?: OrganizationalContact[] -} - -export interface VulnerabilityAnalysis { - state?: AnalysisState - justification?: AnalysisJustification - response?: AnalysisResponse[] - detail?: string -} - -export type AnalysisState = - | 'resolved' - | 'resolved_with_pedigree' - | 'exploitable' - | 'in_triage' - | 'false_positive' - | 'not_affected' - -export type AnalysisJustification = - | 'code_not_present' - | 'code_not_reachable' - | 'requires_configuration' - | 'requires_dependency' - | 'requires_environment' - | 'protected_by_compiler' - | 'protected_at_runtime' - | 'protected_at_perimeter' - | 'protected_by_mitigating_control' - -export type AnalysisResponse = - | 'can_not_fix' - | 'will_not_fix' - | 'update' - | 'rollback' - | 'workaround_available' - -export interface VulnerabilityAffect { - ref: string // bom-ref - versions?: VulnerabilityAffectedVersionRange[] -} - -export interface VulnerabilityAffectedVersionRange { - version?: string - range?: string - status?: VulnerabilityAffectedStatus -} - -export type VulnerabilityAffectedStatus = 'affected' | 'unaffected' | 'unknown' - -/** - * Cryptographic hash. - */ -export interface Hash { - alg: HashAlgorithm - content: string -} - -export type HashAlgorithm = - | 'MD5' - | 'SHA-1' - | 'SHA-256' - | 'SHA-384' - | 'SHA-512' - | 'SHA3-256' - | 'SHA3-384' - | 'SHA3-512' - | 'BLAKE2b-256' - | 'BLAKE2b-384' - | 'BLAKE2b-512' - | 'BLAKE3' - -/** - * License information. - */ -export interface LicenseChoice { - license?: License - expression?: string // SPDX expression -} - -export interface License { - id?: string // SPDX ID - name?: string - text?: AttachedText - url?: string -} - -export interface AttachedText { - contentType?: string - encoding?: Encoding - content: string -} - -export type Encoding = 'base64' - -/** - * External reference (URL, repository, etc.). - */ -export interface ExternalReference { - url: string - type: ExternalReferenceType - comment?: string - hashes?: Hash[] -} - -export type ExternalReferenceType = - | 'vcs' - | 'issue-tracker' - | 'website' - | 'advisories' - | 'bom' - | 'mailing-list' - | 'social' - | 'chat' - | 'documentation' - | 'support' - | 'source-distribution' - | 'distribution' - | 'distribution-intake' - | 'license' - | 'build-meta' - | 'build-system' - | 'release-notes' - | 'security-contact' - | 'model-card' - | 'log' - | 'configuration' - | 'evidence' - | 'formulation' - | 'attestation' - | 'threat-model' - | 'adversary-model' - | 'risk-assessment' - | 'vulnerability-assertion' - | 'exploitability-statement' - | 'pentest-report' - | 'static-analysis-report' - | 'dynamic-analysis-report' - | 'runtime-analysis-report' - | 'component-analysis-report' - | 'maturity-report' - | 'certification-report' - | 'quality-metrics' - | 'codified-infrastructure' - | 'other' - -/** - * Organizational entity. - */ -export interface OrganizationalEntity { - name?: string - url?: string[] - contact?: OrganizationalContact[] -} - -export interface OrganizationalContact { - name?: string - email?: string - phone?: string -} - -/** - * Software identification (SWID) tag. - */ -export interface Swid { - tagId: string - name: string - version?: string - tagVersion?: number - patch?: boolean - text?: AttachedText - url?: string -} - -/** - * Component pedigree (ancestry and evolution). - */ -export interface Pedigree { - ancestors?: Component[] - descendants?: Component[] - variants?: Component[] - commits?: Commit[] - patches?: Patch[] - notes?: string -} - -export interface Commit { - uid?: string - url?: string - author?: IdentifiableAction - committer?: IdentifiableAction - message?: string -} - -export interface IdentifiableAction { - timestamp?: string - name?: string - email?: string -} - -export interface Patch { - type: PatchType - diff?: Diff - resolves?: Issue[] -} - -export type PatchType = 'unofficial' | 'monkey' | 'backport' | 'cherry-pick' - -export interface Diff { - text?: AttachedText - url?: string -} - -export interface Issue { - type: IssueType - id?: string - name?: string - description?: string - source?: VulnerabilitySource - references?: string[] -} - -export type IssueType = 'defect' | 'enhancement' | 'security' - -/** - * Component evidence. - */ -export interface ComponentEvidence { - licenses?: LicenseChoice[] - copyright?: Copyright[] - identity?: Identity - occurrences?: Occurrence[] - callstack?: Callstack -} - -export interface Copyright { - text: string -} - -export interface Identity { - field?: IdentityField - confidence?: number // 0.0 to 1.0 - methods?: IdentityMethod[] - tools?: Tool[] -} - -export type IdentityField = - | 'group' - | 'name' - | 'version' - | 'purl' - | 'cpe' - | 'swid' - | 'hash' - -export interface IdentityMethod { - technique?: string - confidence?: number - value?: string -} - -export interface Occurrence { - 'bom-ref'?: string - location?: string -} - -export interface Callstack { - frames?: Frame[] -} - -export interface Frame { - package?: string - module?: string - function?: string - parameters?: string[] - line?: number - column?: number - fullFilename?: string -} - -/** - * Data classification. - */ -export interface DataClassification { - flow: DataFlow - classification: string -} - -export type DataFlow = 'inbound' | 'outbound' | 'bi-directional' | 'unknown' - -/** - * Generic property (name-value pair). - */ -export interface Property { - name: string - value?: string -} - -/** - * Digital signature. - */ -export interface Signature { - algorithm: SignatureAlgorithm - keyId?: string - publicKey?: PublicKey - certificatePath?: string[] - excludes?: string[] - signers?: Signer[] - value: string -} - -export type SignatureAlgorithm = - | 'RS256' - | 'RS384' - | 'RS512' - | 'PS256' - | 'PS384' - | 'PS512' - | 'ES256' - | 'ES384' - | 'ES512' - | 'Ed25519' - | 'Ed448' - | 'HS256' - | 'HS384' - | 'HS512' - -export interface PublicKey { - kty?: string - crv?: string - x?: string - y?: string - n?: string - e?: string -} - -export interface Signer { - algorithm?: SignatureAlgorithm - keyId?: string - publicKey?: PublicKey - certificatePath?: string[] - value?: string -} diff --git a/packages/sbom-generator/src/types/socket-facts.mts b/packages/sbom-generator/src/types/socket-facts.mts deleted file mode 100644 index 1e1830366..000000000 --- a/packages/sbom-generator/src/types/socket-facts.mts +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Socket Facts Type Definitions - * - * Based on @coana-tech/cli's reachability analysis output. - * These types define the structure of Socket Facts files which contain - * vulnerability reachability data. - */ - -/** - * PURL type from packageurl-js. - */ -export type PURLType = - | 'npm' - | 'pypi' - | 'cargo' - | 'gem' - | 'golang' - | 'maven' - | 'nuget' - | 'generic' - -/** - * Reachability state for a vulnerability. - */ -export type ReachabilityState = - | 'reachable' - | 'unreachable' - | 'pending' - | 'error' - | 'missing_support' - | 'undeterminable_reachability' - -/** - * Source location in a file. - */ -export interface SourceLocation { - file: string - start: { - line: number - column: number - } - end?: { - line: number - column: number - } -} - -/** - * Call stack entry for function-level reachability. - */ -export interface CallStackEntry { - purl?: string - package: string - sourceLocation: SourceLocation - confidence: number -} - -/** - * Class stack entry for class-level reachability. - */ -export interface ClassStackEntry { - purl?: string - package: string - class: string - confidence?: number -} - -/** - * Reachability analysis result for a specific vulnerability. - */ -export interface Reachability { - vulnerability: string - state: ReachabilityState - confidence?: number - reason?: string - callStack?: CallStackEntry[] - classStack?: ClassStackEntry[] -} - -/** - * Manifest file reference with line number. - */ -export interface ManifestReference { - file: string - lineNumber?: number -} - -/** - * Vulnerability with reachability data. - */ -export interface SocketVulnerability { - ghsaId: string - range: string - reachabilityData: { - publicComment: string - pattern: string[] - undeterminableReachability: boolean - } | null -} - -/** - * Socket Fact Artifact (extends PURL). - */ -export interface SocketFactArtifact { - // PURL fields. - type: PURLType - namespace?: string - name: string - version?: string - qualifiers?: Record - - // Socket Facts fields. - id: string - reachability?: Reachability[] - direct: boolean - dev: boolean - dead: boolean - dependencies?: string[] - manifestFiles?: ManifestReference[] - vulnerabilities?: SocketVulnerability[] - files?: string - toplevelAncestors?: string[] -} - -/** - * Socket Facts top-level structure. - */ -export interface SocketFacts { - skipEcosystems?: PURLType[] - components: SocketFactArtifact[] - tier1ReachabilityScanId?: string -} diff --git a/packages/sbom-generator/test/fixtures/go/go.mod b/packages/sbom-generator/test/fixtures/go/go.mod deleted file mode 100644 index 54f7033af..000000000 --- a/packages/sbom-generator/test/fixtures/go/go.mod +++ /dev/null @@ -1,17 +0,0 @@ -module github.com/example/test-go-app - -go 1.21 - -require ( - github.com/spf13/cobra v1.7.0 - github.com/spf13/viper v1.16.0 - gopkg.in/yaml.v3 v3.0.1 - github.com/pkg/errors v0.9.1 // indirect -) - -require ( - github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect -) - -replace github.com/old/module => github.com/new/module v1.2.3 diff --git a/packages/sbom-generator/test/fixtures/go/go.sum b/packages/sbom-generator/test/fixtures/go/go.sum deleted file mode 100644 index a9d4fc79b..000000000 --- a/packages/sbom-generator/test/fixtures/go/go.sum +++ /dev/null @@ -1,10 +0,0 @@ -github.com/spf13/cobra v1.7.0 h1:example123 -github.com/spf13/cobra v1.7.0/go.mod h1:example456 -github.com/spf13/viper v1.16.0 h1:example789 -github.com/spf13/viper v1.16.0/go.mod h1:exampleabc -gopkg.in/yaml.v3 v3.0.1 h1:exampledef -gopkg.in/yaml.v3 v3.0.1/go.mod h1:exampleghi -github.com/pkg/errors v0.9.1 h1:examplejkl -github.com/pkg/errors v0.9.1/go.mod h1:examplemno -github.com/fsnotify/fsnotify v1.6.0 h1:examplepqr -github.com/hashicorp/hcl v1.0.0 h1:examplestu diff --git a/packages/sbom-generator/test/fixtures/python/Pipfile.lock b/packages/sbom-generator/test/fixtures/python/Pipfile.lock deleted file mode 100644 index 87f66290b..000000000 --- a/packages/sbom-generator/test/fixtures/python/Pipfile.lock +++ /dev/null @@ -1,39 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "abc123def456" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.8" - } - }, - "default": { - "certifi": { - "version": "==2023.7.22", - "hashes": [ - "sha256:example123" - ] - }, - "requests": { - "version": "==2.31.0", - "hashes": [ - "sha256:example456" - ] - }, - "urllib3": { - "version": "==2.0.4", - "hashes": [ - "sha256:example789" - ] - } - }, - "develop": { - "pytest": { - "version": "==7.4.0", - "hashes": [ - "sha256:exampleabc" - ] - } - } -} diff --git a/packages/sbom-generator/test/fixtures/python/poetry.lock b/packages/sbom-generator/test/fixtures/python/poetry.lock deleted file mode 100644 index 9bd2160c6..000000000 --- a/packages/sbom-generator/test/fixtures/python/poetry.lock +++ /dev/null @@ -1,51 +0,0 @@ -[[package]] -name = "certifi" -version = "2023.7.22" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false - -[[package]] -name = "charset-normalizer" -version = "3.2.0" -description = "The Real First Universal Charset Detector" -category = "main" -optional = false - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -category = "main" -optional = false - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[[package]] -name = "urllib3" -version = "2.0.4" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false - -[[package]] -name = "pytest" -version = "7.4.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false - -[metadata] -python-versions = "^3.8" -content-hash = "abc123" diff --git a/packages/sbom-generator/test/fixtures/python/pyproject-pep621.toml b/packages/sbom-generator/test/fixtures/python/pyproject-pep621.toml deleted file mode 100644 index 6d85bdb48..000000000 --- a/packages/sbom-generator/test/fixtures/python/pyproject-pep621.toml +++ /dev/null @@ -1,33 +0,0 @@ -[project] -name = "test-pep621-app" -version = "2.0.0" -description = "A test PEP 621 application" -readme = "README.md" -requires-python = ">=3.8" -license = {text = "MIT"} -keywords = ["test", "pep621"] -authors = [ - {name = "John Doe", email = "john@example.com"} -] -maintainers = [ - {name = "Jane Smith", email = "jane@example.com"} -] -classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python" -] - -[project.urls] -Homepage = "https://example.com" -Repository = "https://github.com/example/test-pep621-app" -Documentation = "https://docs.example.com" - -[project.dependencies] -requests = ">=2.31.0" -numpy = ">=1.20.0" - -[project.optional-dependencies] -dev = [ - "pytest>=7.4.0", - "black>=23.0.0" -] diff --git a/packages/sbom-generator/test/fixtures/python/pyproject.toml b/packages/sbom-generator/test/fixtures/python/pyproject.toml deleted file mode 100644 index 16db09219..000000000 --- a/packages/sbom-generator/test/fixtures/python/pyproject.toml +++ /dev/null @@ -1,17 +0,0 @@ -[tool.poetry] -name = "test-python-app" -version = "1.0.0" -description = "A test Python application" -authors = ["Test Author "] -license = "MIT" -readme = "README.md" -homepage = "https://example.com" -repository = "https://github.com/example/test-python-app" -keywords = ["test", "python"] - -[tool.poetry.dependencies] -python = "^3.8" -requests = "^2.31.0" - -[tool.poetry.dev-dependencies] -pytest = "^7.4.0" diff --git a/packages/sbom-generator/test/fixtures/python/requirements.txt b/packages/sbom-generator/test/fixtures/python/requirements.txt deleted file mode 100644 index 1bf27bc54..000000000 --- a/packages/sbom-generator/test/fixtures/python/requirements.txt +++ /dev/null @@ -1,21 +0,0 @@ -# Core dependencies -certifi==2023.7.22 -charset-normalizer==3.2.0 -idna==3.4 -requests==2.31.0 -urllib3==2.0.4 - -# Optional dependencies with extras -flask[async]==2.3.0 - -# Version ranges -numpy>=1.20.0,<2.0.0 -pandas>=2.0.0 - -# With markers -pytest>=7.4.0; python_version >= "3.8" - -# Comments and blank lines - -# More deps -click==8.1.0 diff --git a/packages/sbom-generator/test/fixtures/rust/Cargo.lock b/packages/sbom-generator/test/fixtures/rust/Cargo.lock deleted file mode 100644 index dcd5c38f0..000000000 --- a/packages/sbom-generator/test/fixtures/rust/Cargo.lock +++ /dev/null @@ -1,81 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "test-rust-app" -version = "0.1.0" -dependencies = [ - "clap", - "serde", - "serde_json", - "tokio", -] - -[[package]] -name = "clap" -version = "4.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abc123def456" -dependencies = [ - "clap_derive", -] - -[[package]] -name = "clap_derive" -version = "4.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "def456abc789" - -[[package]] -name = "serde" -version = "1.0.188" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ghi789jkl012" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.188" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "jkl012mno345" - -[[package]] -name = "serde_json" -version = "1.0.107" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "mno345pqr678" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "tokio" -version = "1.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "pqr678stu901" -dependencies = [ - "pin-project-lite", -] - -[[package]] -name = "itoa" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "stu901vwx234" - -[[package]] -name = "ryu" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "vwx234yza567" - -[[package]] -name = "pin-project-lite" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "yza567bcd890" diff --git a/packages/sbom-generator/test/fixtures/rust/Cargo.toml b/packages/sbom-generator/test/fixtures/rust/Cargo.toml deleted file mode 100644 index 61a75d96f..000000000 --- a/packages/sbom-generator/test/fixtures/rust/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -[package] -name = "test-rust-app" -version = "0.1.0" -edition = "2021" -description = "A test Rust application" -homepage = "https://example.com" -repository = "https://github.com/example/test-rust-app" -license = "MIT" -authors = ["Test Author "] -keywords = ["test", "rust"] -categories = ["command-line-utilities"] - -[dependencies] -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -tokio = { version = "1.32", features = ["full"] } -clap = { version = "4.4", features = ["derive"] } - -[dev-dependencies] -criterion = "0.5" - -[features] -default = ["serde/derive"] -extra = ["tokio/rt-multi-thread"] diff --git a/packages/sbom-generator/test/fixtures/socket-facts-sample.json b/packages/sbom-generator/test/fixtures/socket-facts-sample.json deleted file mode 100644 index 96f948e9f..000000000 --- a/packages/sbom-generator/test/fixtures/socket-facts-sample.json +++ /dev/null @@ -1,252 +0,0 @@ -{ - "tier1ReachabilityScanId": "scan_abc123", - "components": [ - { - "type": "npm", - "name": "lodash", - "version": "4.17.15", - "id": "pkg:npm/lodash@4.17.15", - "direct": true, - "dev": false, - "dead": false, - "dependencies": [], - "manifestFiles": [ - { - "file": "package.json", - "lineNumber": 15 - } - ], - "vulnerabilities": [ - { - "ghsaId": "GHSA-29mw-wpgm-hmr9", - "range": "<4.17.21", - "reachabilityData": { - "publicComment": "Prototype pollution in lodash.merge", - "pattern": [ - "your-app/src/index.js:42:lodash.merge()", - "node_modules/lodash/merge.js:120:baseAssign()", - "node_modules/lodash/_baseAssign.js:15" - ], - "undeterminableReachability": false - } - }, - { - "ghsaId": "CVE-2020-28500", - "range": "<4.17.21", - "reachabilityData": { - "publicComment": "ReDoS in lodash", - "pattern": [ - "your-app/src/utils.js:88:lodash.template()", - "node_modules/lodash/template.js:45" - ], - "undeterminableReachability": false - } - } - ], - "reachability": [ - { - "vulnerability": "GHSA-29mw-wpgm-hmr9", - "state": "reachable", - "confidence": 0.95, - "callStack": [ - { - "package": "lodash", - "sourceLocation": { - "file": "your-app/src/index.js", - "start": { "line": 42, "column": 10 }, - "end": { "line": 42, "column": 25 } - }, - "confidence": 0.95 - }, - { - "package": "lodash", - "sourceLocation": { - "file": "node_modules/lodash/merge.js", - "start": { "line": 120, "column": 5 } - }, - "confidence": 0.95 - } - ] - }, - { - "vulnerability": "CVE-2020-28500", - "state": "reachable", - "confidence": 0.87, - "callStack": [ - { - "package": "lodash", - "sourceLocation": { - "file": "your-app/src/utils.js", - "start": { "line": 88, "column": 15 } - }, - "confidence": 0.87 - } - ] - } - ] - }, - { - "type": "npm", - "name": "axios", - "version": "0.21.0", - "id": "pkg:npm/axios@0.21.0", - "direct": true, - "dev": false, - "dead": false, - "dependencies": [], - "manifestFiles": [ - { - "file": "package.json", - "lineNumber": 18 - } - ], - "vulnerabilities": [ - { - "ghsaId": "CVE-2021-3749", - "range": "<0.21.4", - "reachabilityData": { - "publicComment": "Server-side request forgery (SSRF)", - "pattern": [ - "your-app/src/api.js:15:axios.get()", - "node_modules/axios/lib/core/Axios.js:50" - ], - "undeterminableReachability": false - } - } - ], - "reachability": [ - { - "vulnerability": "CVE-2021-3749", - "state": "reachable", - "confidence": 0.87, - "callStack": [ - { - "package": "axios", - "sourceLocation": { - "file": "your-app/src/api.js", - "start": { "line": 15, "column": 20 } - }, - "confidence": 0.87 - }, - { - "package": "axios", - "sourceLocation": { - "file": "node_modules/axios/lib/core/Axios.js", - "start": { "line": 50, "column": 10 } - }, - "confidence": 0.87 - } - ] - } - ] - }, - { - "type": "npm", - "name": "xmldom", - "version": "0.5.0", - "id": "pkg:npm/xmldom@0.5.0", - "direct": false, - "dev": true, - "dead": true, - "dependencies": [], - "manifestFiles": [ - { - "file": "package.json", - "lineNumber": 45 - } - ], - "toplevelAncestors": ["pkg:npm/jest@27.0.0"], - "vulnerabilities": [ - { - "ghsaId": "CVE-2021-32796", - "range": "<0.6.0", - "reachabilityData": { - "publicComment": "Prototype pollution via parseFromString", - "pattern": [], - "undeterminableReachability": false - } - } - ], - "reachability": [ - { - "vulnerability": "CVE-2021-32796", - "state": "unreachable", - "confidence": 0.92, - "reason": "Dead code - transitive dependency of testing library, never imported" - } - ] - }, - { - "type": "npm", - "name": "yargs-parser", - "version": "15.0.0", - "id": "pkg:npm/yargs-parser@15.0.0", - "direct": false, - "dev": true, - "dead": true, - "dependencies": [], - "manifestFiles": [ - { - "file": "package.json", - "lineNumber": 52 - } - ], - "toplevelAncestors": ["pkg:npm/jest@27.0.0"], - "vulnerabilities": [ - { - "ghsaId": "CVE-2020-7608", - "range": "<18.1.2", - "reachabilityData": { - "publicComment": "Prototype pollution vulnerability", - "pattern": [], - "undeterminableReachability": false - } - } - ], - "reachability": [ - { - "vulnerability": "CVE-2020-7608", - "state": "unreachable", - "confidence": 0.88, - "reason": "Dev dependency, not bundled in production" - } - ] - }, - { - "type": "npm", - "name": "debug", - "version": "4.1.0", - "id": "pkg:npm/debug@4.1.0", - "direct": false, - "dev": false, - "dead": true, - "dependencies": [], - "manifestFiles": [ - { - "file": "package.json", - "lineNumber": 22 - } - ], - "toplevelAncestors": ["pkg:npm/express@4.17.1"], - "vulnerabilities": [ - { - "ghsaId": "CVE-2020-7698", - "range": "<4.3.1", - "reachabilityData": { - "publicComment": "ReDoS vulnerability in debug", - "pattern": [], - "undeterminableReachability": false - } - } - ], - "reachability": [ - { - "vulnerability": "CVE-2020-7698", - "state": "unreachable", - "confidence": 0.92, - "reason": "Transitive dependency, never imported in user code" - } - ] - } - ] -} diff --git a/packages/sbom-generator/tsconfig.json b/packages/sbom-generator/tsconfig.json deleted file mode 100644 index 36313d76d..000000000 --- a/packages/sbom-generator/tsconfig.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "extends": "../../.config/tsconfig.base.json", - "include": ["src/**/*.mts", "src/**/*.d.ts"], - "exclude": [ - "src/**/*.test.mts", - "src/parsers/actions/**", - "src/parsers/cargo/**", - "src/parsers/chrome/**", - "src/parsers/go/**", - "src/parsers/huggingface/**", - "src/parsers/maven/**", - "src/parsers/nuget/**", - "src/parsers/openvsx/**", - "src/parsers/pypi/**", - "src/parsers/rubygems/**", - "node_modules/**" - ] -} diff --git a/packages/sdk/SECURITY.md b/packages/sdk/docs/security.md similarity index 100% rename from packages/sdk/SECURITY.md rename to packages/sdk/docs/security.md diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 739f17ff2..32d49eb12 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -60,7 +60,7 @@ "@eslint/compat": "catalog:", "@eslint/js": "catalog:", "@socketregistry/packageurl-js": "catalog:", - "@socketsecurity/lib": "workspace:*", + "@socketsecurity/lib-internal": "workspace:*", "@types/babel__traverse": "catalog:", "@types/node": "catalog:", "@typescript/native-preview": "catalog:", diff --git a/packages/socket/package.json b/packages/socket/package.json index 9bcdf96ae..3babf0927 100644 --- a/packages/socket/package.json +++ b/packages/socket/package.json @@ -50,7 +50,7 @@ "socket-yarn": "dist/bootstrap.js" }, "devDependencies": { - "@socketsecurity/lib": "workspace:*", + "@socketsecurity/lib-internal": "workspace:*", "esbuild": "catalog:" } } diff --git a/packages/yoga-layout/package.json b/packages/yoga-layout/package.json index 1bd5162dc..a8063a09e 100644 --- a/packages/yoga-layout/package.json +++ b/packages/yoga-layout/package.json @@ -15,6 +15,6 @@ }, "dependencies": { "@socketsecurity/build-infra": "workspace:*", - "@socketsecurity/lib": "workspace:*" + "@socketsecurity/lib-internal": "workspace:*" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0df86e0a0..317fe2104 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -60,9 +60,6 @@ catalogs: '@gitbeaker/rest': specifier: 43.7.0 version: 43.7.0 - '@iarna/toml': - specifier: ^2.2.5 - version: 2.2.5 '@inquirer/confirm': specifier: 5.1.16 version: 5.1.16 @@ -201,9 +198,6 @@ catalogs: '@yarnpkg/extensions': specifier: 2.0.6 version: 2.0.6 - '@yarnpkg/parsers': - specifier: ^3.0.3 - version: 3.0.3 ajv-dist: specifier: 8.17.1 version: 8.17.1 @@ -264,9 +258,6 @@ catalogs: fast-sort: specifier: 3.4.1 version: 3.4.1 - fast-xml-parser: - specifier: ^5.3.1 - version: 5.3.1 get-east-asian-width: specifier: 1.3.0 version: 1.3.0 @@ -583,6 +574,12 @@ importers: '@socketsecurity/config': specifier: 'catalog:' version: 3.0.1 + '@socketsecurity/lib': + specifier: 3.2.8 + version: 3.2.8(typescript@5.9.3) + '@socketsecurity/lib-external': + specifier: npm:@socketsecurity/lib@3.2.8 + version: '@socketsecurity/lib@3.2.8(typescript@5.9.3)' '@socketsecurity/registry': specifier: 'catalog:' version: 2.0.2(typescript@5.9.3) @@ -811,9 +808,9 @@ importers: '@socketsecurity/cli': specifier: workspace:* version: link:../cli - '@socketsecurity/lib': + '@socketsecurity/lib-internal': specifier: workspace:* - version: link:../lib + version: link:../lib-internal del-cli: specifier: 'catalog:' version: 6.0.0 @@ -835,9 +832,9 @@ importers: '@babel/traverse': specifier: 'catalog:' version: 7.28.4 - '@socketsecurity/lib': + '@socketsecurity/lib-internal': specifier: workspace:* - version: link:../lib + version: link:../lib-internal magic-string: specifier: 'catalog:' version: 0.30.19 @@ -865,9 +862,9 @@ importers: '@socketsecurity/build-infra': specifier: workspace:* version: link:../build-infra - '@socketsecurity/lib': + '@socketsecurity/lib-internal': specifier: workspace:* - version: link:../lib + version: link:../lib-internal '@socketsecurity/sdk': specifier: workspace:* version: link:../sdk @@ -905,16 +902,7 @@ importers: specifier: 'catalog:' version: 8.0.0 - packages/codet5-models-builder: - dependencies: - '@socketsecurity/build-infra': - specifier: workspace:* - version: link:../build-infra - '@socketsecurity/lib': - specifier: workspace:* - version: link:../lib - - packages/lib: + packages/lib-internal: dependencies: '@babel/core': specifier: 'catalog:' @@ -1109,24 +1097,6 @@ importers: specifier: 'catalog:' version: 4.1.8 - packages/minilm-builder: - dependencies: - '@socketsecurity/build-infra': - specifier: workspace:* - version: link:../build-infra - '@socketsecurity/lib': - specifier: workspace:* - version: link:../lib - - packages/models: - dependencies: - '@socketsecurity/build-infra': - specifier: workspace:* - version: link:../build-infra - '@socketsecurity/lib': - specifier: workspace:* - version: link:../lib - packages/node-sea-builder: dependencies: '@socketsecurity/bootstrap': @@ -1143,53 +1113,6 @@ importers: specifier: 'catalog:' version: 4.0.3(@types/debug@4.1.12)(@types/node@24.9.2)(@vitest/ui@4.0.3)(jiti@2.6.1)(yaml@2.8.1) - packages/node-smol-builder: - dependencies: - '@socketsecurity/bootstrap': - specifier: workspace:* - version: link:../bootstrap - '@socketsecurity/build-infra': - specifier: workspace:* - version: link:../build-infra - '@socketsecurity/lib': - specifier: workspace:* - version: link:../lib - devDependencies: - vitest: - specifier: 'catalog:' - version: 4.0.3(@types/debug@4.1.12)(@types/node@24.9.2)(@vitest/ui@4.0.3)(jiti@2.6.1)(yaml@2.8.1) - - packages/onnxruntime: - dependencies: - '@socketsecurity/build-infra': - specifier: workspace:* - version: link:../build-infra - '@socketsecurity/lib': - specifier: workspace:* - version: link:../lib - - packages/sbom-generator: - dependencies: - '@iarna/toml': - specifier: 'catalog:' - version: 2.2.5 - '@socketsecurity/lib': - specifier: workspace:* - version: link:../lib - '@yarnpkg/parsers': - specifier: 'catalog:' - version: 3.0.3 - fast-xml-parser: - specifier: 'catalog:' - version: 5.3.1 - yaml: - specifier: 2.8.1 - version: 2.8.1 - devDependencies: - vitest: - specifier: 'catalog:' - version: 4.0.3(@types/debug@4.1.12)(@types/node@24.9.2)(@vitest/ui@4.0.3)(jiti@2.6.1)(yaml@2.8.1) - packages/sdk: dependencies: '@babel/generator': @@ -1216,9 +1139,9 @@ importers: '@socketregistry/packageurl-js': specifier: 'catalog:' version: 1.3.5 - '@socketsecurity/lib': + '@socketsecurity/lib-internal': specifier: workspace:* - version: link:../lib + version: link:../lib-internal '@types/babel__traverse': specifier: 'catalog:' version: 7.28.0 @@ -1303,9 +1226,9 @@ importers: packages/socket: devDependencies: - '@socketsecurity/lib': + '@socketsecurity/lib-internal': specifier: workspace:* - version: link:../lib + version: link:../lib-internal esbuild: specifier: 'catalog:' version: 0.25.11 @@ -1356,9 +1279,9 @@ importers: '@socketsecurity/build-infra': specifier: workspace:* version: link:../build-infra - '@socketsecurity/lib': + '@socketsecurity/lib-internal': specifier: workspace:* - version: link:../lib + version: link:../lib-internal packages: @@ -2878,6 +2801,15 @@ packages: typescript: optional: true + '@socketsecurity/lib@3.2.8': + resolution: {integrity: sha512-fGUwZUYmgnzZ2ayyQCh5PspTydjRAqP3Dpzk5dm5KOicjOeMAcT/x8XtaVkFqilCGsbzMoBTag8qWfODtfrxAQ==} + engines: {node: '>=22'} + peerDependencies: + typescript: '>=5.0.0' + peerDependenciesMeta: + typescript: + optional: true + '@socketsecurity/registry@2.0.2': resolution: {integrity: sha512-hGfteZxSnPN2gmOc9A5cJmyTZBumgMWmg2MVOMRmQjFwxVssk/Bs5dgETGGSOfWBmo/g1K5rBfPs1vE0n/SXMQ==} engines: {node: '>=18'} @@ -4300,10 +4232,6 @@ packages: fast-uri@3.1.0: resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} - fast-xml-parser@5.3.1: - resolution: {integrity: sha512-jbNkWiv2Ec1A7wuuxk0br0d0aTMUtQ4IkL+l/i1r9PRf6pLXjDgsBsWwO+UyczmQlnehi4Tbc8/KIvxGQe+I/A==} - hasBin: true - fastq@1.19.1: resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} @@ -6104,9 +6032,6 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} - strnum@2.1.1: - resolution: {integrity: sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw==} - suffix-thumb@5.0.2: resolution: {integrity: sha512-I5PWXAFKx3FYnI9a+dQMWNqTxoRt6vdBdb0O+BJ1sxXCWtSoQCusc13E58f+9p4MYx/qCnEMkD5jac6K2j3dgA==} @@ -8436,6 +8361,10 @@ snapshots: optionalDependencies: typescript: 5.9.2 + '@socketsecurity/lib@3.2.8(typescript@5.9.3)': + optionalDependencies: + typescript: 5.9.3 + '@socketsecurity/registry@2.0.2(typescript@5.9.3)': optionalDependencies: typescript: 5.9.3 @@ -8705,10 +8634,10 @@ snapshots: '@types/yargs-parser@21.0.3': {} - '@typescript-eslint/eslint-plugin@8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2))(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2)': + '@typescript-eslint/eslint-plugin@8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2) + '@typescript-eslint/parser': 8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.3) '@typescript-eslint/scope-manager': 8.44.1 '@typescript-eslint/type-utils': 8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2) '@typescript-eslint/utils': 8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2) @@ -10194,10 +10123,6 @@ snapshots: fast-uri@3.1.0: {} - fast-xml-parser@5.3.1: - dependencies: - strnum: 2.1.1 - fastq@1.19.1: dependencies: reusify: 1.1.0 @@ -12085,8 +12010,6 @@ snapshots: strip-json-comments@3.1.1: {} - strnum@2.1.1: {} - suffix-thumb@5.0.2: {} supports-color@5.5.0: @@ -12363,7 +12286,7 @@ snapshots: typescript-eslint@8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2): dependencies: - '@typescript-eslint/eslint-plugin': 8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2))(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2) + '@typescript-eslint/eslint-plugin': 8.44.1(@typescript-eslint/parser@8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.3))(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2) '@typescript-eslint/parser': 8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2) '@typescript-eslint/typescript-estree': 8.44.1(typescript@5.9.2) '@typescript-eslint/utils': 8.44.1(eslint@9.35.0(jiti@2.6.1))(typescript@5.9.2) diff --git a/scripts/build.mjs b/scripts/build.mjs index fe9ace0ef..c48a59850 100755 --- a/scripts/build.mjs +++ b/scripts/build.mjs @@ -23,9 +23,9 @@ import path from 'node:path' import process from 'node:process' import { fileURLToPath } from 'node:url' -import { WIN32 } from '@socketsecurity/lib/constants/platform' -import { getDefaultLogger } from '@socketsecurity/lib/logger' -import { spawn } from '@socketsecurity/lib/spawn' +import { WIN32 } from '@socketsecurity/lib-external/constants/platform' +import { getDefaultLogger } from '@socketsecurity/lib-external/logger' +import { spawn } from '@socketsecurity/lib-external/spawn' import colors from 'yoctocolors-cjs' @@ -75,6 +75,16 @@ const BUILD_PACKAGES = [ // filter: '@socketsecurity/onnxruntime', // outputCheck: 'packages/onnxruntime/dist/ort-wasm-simd.wasm', // }, + { + name: 'Lib Internal', + filter: '@socketsecurity/lib-internal', + outputCheck: 'packages/lib-internal/dist/logger.js', + }, + { + name: 'SDK', + filter: '@socketsecurity/sdk', + outputCheck: 'packages/sdk/dist/index.js', + }, { name: 'Yoga WASM', filter: '@socketsecurity/yoga', @@ -160,9 +170,11 @@ function showHelp() { logger.log(' pnpm run build --help # Show this help') logger.log('') logger.log('Default Build Order:') - logger.log(' 1. Yoga WASM (terminal layouts)') - logger.log(' 2. CLI Package (TypeScript compilation + bundling)') - logger.log(' 3. SEA Binary (Node.js Single Executable)') + logger.log(' 1. Lib Internal (shared utilities)') + logger.log(' 2. SDK (Socket SDK)') + logger.log(' 3. Yoga WASM (terminal layouts)') + logger.log(' 4. CLI Package (TypeScript compilation + bundling)') + logger.log(' 5. SEA Binary (Node.js Single Executable)') logger.log('') logger.log('Note: ONNX Runtime WASM temporarily disabled (build issues)') logger.log('')