From 38963e26f61a0e99756a2742805e9b79c6ac77b5 Mon Sep 17 00:00:00 2001 From: Sunny Kolattukudy Date: Sun, 5 Apr 2026 14:10:29 -0400 Subject: [PATCH 1/8] feat: add deps command group with CVE scanning and dependency auditing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implements pncli deps with frisk as the primary command and scan, diff, outdated, license-check, and connectivity as auxiliary commands. Replaces the artifactory stub. - deps frisk: scans all deps for CVEs via OSV.dev querybatch, returns structured remediation paths in JSON for agent consumption (Tier 3) - deps scan: local-only dependency inventory across npm, NuGet, Maven - deps diff: dep changes between two git refs using git show - deps outdated: latest versions via Artifactory REST (Tier 2) - deps license-check: license data per package via Artifactory (Tier 2) - deps connectivity: diagnoses which tier is available Parsers handle package-lock.json (v2/v3), yarn.lock, pnpm-lock.yaml, .csproj/packages.lock.json/Directory.Packages.props/packages.config, pom.xml, build.gradle, and gradle.lockfile. Artifactory config uses flat npmRepo/nugetRepo/mavenRepo fields. Each ecosystem repo is independently optional — missing repos are skipped silently. config init updated with opt-in Artifactory wizard section. Co-Authored-By: Claude Sonnet 4.6 --- src/cli.ts | 6 +- src/lib/config.ts | 16 ++ src/services/artifactory/commands.ts | 16 -- src/services/config/commands.ts | 49 ++++ src/services/deps/clients/artifactory.ts | 305 +++++++++++++++++++++++ src/services/deps/clients/osv.ts | 195 +++++++++++++++ src/services/deps/commands.ts | 155 ++++++++++++ src/services/deps/connectivity.ts | 82 ++++++ src/services/deps/diff.ts | 88 +++++++ src/services/deps/frisk.ts | 45 ++++ src/services/deps/license-check.ts | 52 ++++ src/services/deps/outdated.ts | 45 ++++ src/services/deps/parsers/index.ts | 145 +++++++++++ src/services/deps/parsers/maven.ts | 214 ++++++++++++++++ src/services/deps/parsers/npm.ts | 231 +++++++++++++++++ src/services/deps/parsers/nuget.ts | 205 +++++++++++++++ src/services/deps/scan.ts | 14 ++ src/services/deps/types.ts | 150 +++++++++++ src/types/config.ts | 10 + 19 files changed, 2004 insertions(+), 19 deletions(-) delete mode 100644 src/services/artifactory/commands.ts create mode 100644 src/services/deps/clients/artifactory.ts create mode 100644 src/services/deps/clients/osv.ts create mode 100644 src/services/deps/commands.ts create mode 100644 src/services/deps/connectivity.ts create mode 100644 src/services/deps/diff.ts create mode 100644 src/services/deps/frisk.ts create mode 100644 src/services/deps/license-check.ts create mode 100644 src/services/deps/outdated.ts create mode 100644 src/services/deps/parsers/index.ts create mode 100644 src/services/deps/parsers/maven.ts create mode 100644 src/services/deps/parsers/npm.ts create mode 100644 src/services/deps/parsers/nuget.ts create mode 100644 src/services/deps/scan.ts create mode 100644 src/services/deps/types.ts diff --git a/src/cli.ts b/src/cli.ts index 342bf88..ab40ad3 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -7,7 +7,7 @@ import { registerJiraCommands } from './services/jira/commands.js'; import { registerBitbucketCommands } from './services/bitbucket/commands.js'; import { registerConfluenceCommands } from './services/confluence/commands.js'; import { registerSonarCommands } from './services/sonar/commands.js'; -import { registerArtifactoryCommands } from './services/artifactory/commands.js'; +import { registerDepsCommands } from './services/deps/commands.js'; import { registerConfigCommands } from './services/config/commands.js'; const require = createRequire(import.meta.url); @@ -47,17 +47,17 @@ registerJiraCommands(program); registerBitbucketCommands(program); registerConfluenceCommands(program); registerSonarCommands(program); -registerArtifactoryCommands(program); +registerDepsCommands(program); registerConfigCommands(program); program.addHelpText('after', ` Services: git Local git operations (status, diff, log, branch) + deps Dependency scanning, CVE detection, license auditing jira Jira Data Cloud (coming soon) bitbucket Bitbucket Server (coming soon) confluence Confluence (coming soon) sonar SonarQube (coming soon) - artifactory Artifactory (coming soon) config Manage pncli configuration `); diff --git a/src/lib/config.ts b/src/lib/config.ts index c78e925..b51499d 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -12,6 +12,11 @@ const ENV_KEYS = { JIRA_API_TOKEN: 'PNCLI_JIRA_API_TOKEN', BITBUCKET_BASE_URL: 'PNCLI_BITBUCKET_BASE_URL', BITBUCKET_PAT: 'PNCLI_BITBUCKET_PAT', + ARTIFACTORY_BASE_URL: 'PNCLI_ARTIFACTORY_BASE_URL', + ARTIFACTORY_TOKEN: 'PNCLI_ARTIFACTORY_TOKEN', + ARTIFACTORY_REPO_NPM: 'PNCLI_ARTIFACTORY_REPO_NPM', + ARTIFACTORY_REPO_NUGET: 'PNCLI_ARTIFACTORY_REPO_NUGET', + ARTIFACTORY_REPO_MAVEN: 'PNCLI_ARTIFACTORY_REPO_MAVEN', CONFIG_PATH: 'PNCLI_CONFIG_PATH' } as const; @@ -89,6 +94,13 @@ export function loadConfig(opts: LoadConfigOptions = {}): ResolvedConfig { baseUrl: process.env[ENV_KEYS.BITBUCKET_BASE_URL] ?? globalConfig.bitbucket?.baseUrl, pat: process.env[ENV_KEYS.BITBUCKET_PAT] ?? globalConfig.bitbucket?.pat }, + artifactory: { + baseUrl: process.env[ENV_KEYS.ARTIFACTORY_BASE_URL] ?? globalConfig.artifactory?.baseUrl, + token: process.env[ENV_KEYS.ARTIFACTORY_TOKEN] ?? globalConfig.artifactory?.token, + npmRepo: process.env[ENV_KEYS.ARTIFACTORY_REPO_NPM] ?? globalConfig.artifactory?.npmRepo, + nugetRepo: process.env[ENV_KEYS.ARTIFACTORY_REPO_NUGET] ?? globalConfig.artifactory?.nugetRepo, + mavenRepo: process.env[ENV_KEYS.ARTIFACTORY_REPO_MAVEN] ?? globalConfig.artifactory?.mavenRepo + }, defaults: mergedDefaults }; } @@ -138,6 +150,10 @@ export function maskConfig(config: ResolvedConfig): unknown { bitbucket: { ...config.bitbucket, pat: config.bitbucket.pat ? '***' : undefined + }, + artifactory: { + ...config.artifactory, + token: config.artifactory.token ? '***' : undefined } }; } diff --git a/src/services/artifactory/commands.ts b/src/services/artifactory/commands.ts deleted file mode 100644 index 5a07625..0000000 --- a/src/services/artifactory/commands.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Command } from 'commander'; -import { success } from '../../lib/output.js'; - -export function registerArtifactoryCommands(program: Command): void { - program - .command('artifactory') - .description('Artifactory operations') - .action(() => { - success( - { message: 'Coming soon — the nightmare never ends.' }, - 'artifactory', - 'stub', - Date.now() - ); - }); -} diff --git a/src/services/config/commands.ts b/src/services/config/commands.ts index beecf71..7775e83 100644 --- a/src/services/config/commands.ts +++ b/src/services/config/commands.ts @@ -113,6 +113,46 @@ async function initGlobalConfig(start: number): Promise { message: 'Bitbucket personal access token:' }); + process.stderr.write('\n── Artifactory ───────────────────────────────────\n'); + const useArtifactory = await confirm({ + message: 'Configure Artifactory for dependency scanning (deps frisk, outdated, license-check)?', + default: false + }); + + let artifactoryBaseUrl = ''; + let artifactoryToken = ''; + let npmRepo = ''; + let nugetRepo = ''; + let mavenRepo = ''; + + if (useArtifactory) { + artifactoryBaseUrl = await input({ + message: 'Artifactory base URL (e.g. https://artifactory.company.com):', + default: '' + }); + + artifactoryToken = await password({ + message: 'Artifactory API token:' + }); + + process.stderr.write('\nConfigure which ecosystems you use (skip any that don\'t apply):\n'); + + const useNpm = await confirm({ message: ' Use npm packages from Artifactory?', default: true }); + if (useNpm) { + npmRepo = await input({ message: ' npm repository name:', default: 'npm-remote' }); + } + + const useNuget = await confirm({ message: ' Use NuGet packages from Artifactory?', default: false }); + if (useNuget) { + nugetRepo = await input({ message: ' NuGet repository name:', default: 'nuget-remote' }); + } + + const useMaven = await confirm({ message: ' Use Maven packages from Artifactory?', default: false }); + if (useMaven) { + mavenRepo = await input({ message: ' Maven repository name:', default: 'libs-release' }); + } + } + process.stderr.write('\n── Defaults ──────────────────────────────────────\n'); const jiraProject = await input({ message: 'Default Jira project key (optional):', @@ -143,6 +183,15 @@ async function initGlobalConfig(start: number): Promise { baseUrl: bitbucketBaseUrl || undefined, pat: bitbucketPat || undefined }, + ...(useArtifactory ? { + artifactory: { + baseUrl: artifactoryBaseUrl || undefined, + token: artifactoryToken || undefined, + npmRepo: npmRepo || undefined, + nugetRepo: nugetRepo || undefined, + mavenRepo: mavenRepo || undefined + } + } : {}), defaults: { jira: { project: jiraProject || undefined diff --git a/src/services/deps/clients/artifactory.ts b/src/services/deps/clients/artifactory.ts new file mode 100644 index 0000000..f25f9b8 --- /dev/null +++ b/src/services/deps/clients/artifactory.ts @@ -0,0 +1,305 @@ +import type { Ecosystem, LicensedPackage, OutdatedPackage } from '../types.js'; +import type { ArtifactoryConfig } from '../../../types/config.js'; +import { PncliError } from '../../../lib/errors.js'; + +const TIMEOUT_MS = 15_000; + +async function fetchWithTimeout(url: string, token: string): Promise { + const controller = new AbortController(); + const timer = setTimeout(() => controller.abort(), TIMEOUT_MS); + try { + return await fetch(url, { + headers: { Authorization: `Bearer ${token}` }, + signal: controller.signal + }); + } finally { + clearTimeout(timer); + } +} + +export async function checkArtifactoryConnectivity(config: ArtifactoryConfig): Promise<{ + reachable: boolean; + authenticated: boolean; + configured: boolean; + error?: string; +}> { + if (!config.baseUrl && !config.token) { + return { + reachable: false, + authenticated: false, + configured: false, + error: 'Artifactory is not configured' + }; + } + if (!config.baseUrl) { + return { + reachable: false, + authenticated: false, + configured: false, + error: 'Artifactory baseUrl is not set' + }; + } + if (!config.token) { + return { + reachable: false, + authenticated: false, + configured: false, + error: 'Artifactory token is not set' + }; + } + + try { + const res = await fetchWithTimeout(`${config.baseUrl}/api/system/ping`, config.token); + if (res.status === 401 || res.status === 403) { + return { + reachable: true, + authenticated: false, + configured: true, + error: 'Artifactory token is invalid or lacks permissions' + }; + } + return { reachable: res.ok, authenticated: res.ok, configured: true }; + } catch (err) { + return { + reachable: false, + authenticated: false, + configured: true, + error: err instanceof Error ? err.message : String(err) + }; + } +} + +export function requireArtifactory(config: ArtifactoryConfig, command: string): void { + if (!config.baseUrl && !config.token) { + throw new PncliError( + `${command} requires Artifactory. Add the following to ~/.pncli/config.json:\n` + + ` "artifactory": { "baseUrl": "https://artifactory.company.com", "token": "your-token" }\n` + + `Or set PNCLI_ARTIFACTORY_BASE_URL and PNCLI_ARTIFACTORY_TOKEN environment variables.`, + 503 + ); + } + if (!config.baseUrl) { + throw new PncliError( + `${command} requires an Artifactory URL. Set artifactory.baseUrl in ~/.pncli/config.json or PNCLI_ARTIFACTORY_BASE_URL.`, + 503 + ); + } + if (!config.token) { + throw new PncliError( + `${command} requires an Artifactory token. Set artifactory.token in ~/.pncli/config.json or PNCLI_ARTIFACTORY_TOKEN.`, + 503 + ); + } +} + +function repoForEcosystem(config: ArtifactoryConfig, eco: Ecosystem): string | undefined { + if (eco === 'npm') return config.npmRepo; + if (eco === 'nuget') return config.nugetRepo; + if (eco === 'maven') return config.mavenRepo; + return undefined; +} + +async function getLatestNpm( + baseUrl: string, + token: string, + repoName: string, + packageName: string +): Promise { + try { + const encoded = packageName.startsWith('@') + ? packageName.replace('/', '%2F') + : packageName; + const res = await fetchWithTimeout(`${baseUrl}/api/npm/${repoName}/${encoded}`, token); + if (!res.ok) return null; + const data = (await res.json()) as { 'dist-tags'?: { latest?: string } }; + return data['dist-tags']?.latest ?? null; + } catch { + return null; + } +} + +async function getLatestNuget( + baseUrl: string, + token: string, + repoName: string, + packageName: string +): Promise { + try { + const lower = packageName.toLowerCase(); + const res = await fetchWithTimeout( + `${baseUrl}/api/nuget/v3/${repoName}/flatcontainer/${lower}/index.json`, + token + ); + if (!res.ok) return null; + const data = (await res.json()) as { versions?: string[] }; + const versions = data.versions ?? []; + return versions[versions.length - 1] ?? null; + } catch { + return null; + } +} + +async function getLatestMaven( + baseUrl: string, + token: string, + repoName: string, + packageName: string +): Promise { + const [groupId, artifactId] = packageName.split(':'); + if (!groupId || !artifactId) return null; + + try { + const res = await fetchWithTimeout( + `${baseUrl}/api/search/latestVersion?g=${encodeURIComponent(groupId)}&a=${encodeURIComponent(artifactId)}&repos=${repoName}`, + token + ); + if (!res.ok) return null; + const text = await res.text(); + return text.trim() || null; + } catch { + return null; + } +} + +function parseSemver(v: string): [number, number, number] { + const clean = v.replace(/[^0-9.]/g, ''); + const parts = clean.split('.').map(Number); + return [parts[0] ?? 0, parts[1] ?? 0, parts[2] ?? 0]; +} + +function getUpdateType(current: string, latest: string): 'major' | 'minor' | 'patch' { + const [cMaj, cMin] = parseSemver(current); + const [lMaj, lMin] = parseSemver(latest); + if (lMaj > cMaj) return 'major'; + if (lMin > cMin) return 'minor'; + return 'patch'; +} + +function isNewer(current: string, latest: string): boolean { + const [cMaj, cMin, cPat] = parseSemver(current); + const [lMaj, lMin, lPat] = parseSemver(latest); + if (lMaj !== cMaj) return lMaj > cMaj; + if (lMin !== cMin) return lMin > cMin; + return lPat > cPat; +} + +export async function getOutdatedPackages( + packages: Array<{ name: string; version: string; ecosystem: Ecosystem; source: string }>, + config: ArtifactoryConfig, + filterType?: 'major' | 'minor' | 'patch' +): Promise { + const { baseUrl, token } = config; + if (!baseUrl || !token) return []; + + const outdated: OutdatedPackage[] = []; + + for (const pkg of packages) { + const repoName = repoForEcosystem(config, pkg.ecosystem); + if (!repoName) continue; // ecosystem repo not configured — skip silently + + let latest: string | null = null; + if (pkg.ecosystem === 'npm') { + latest = await getLatestNpm(baseUrl, token, repoName, pkg.name); + } else if (pkg.ecosystem === 'nuget') { + latest = await getLatestNuget(baseUrl, token, repoName, pkg.name); + } else if (pkg.ecosystem === 'maven') { + latest = await getLatestMaven(baseUrl, token, repoName, pkg.name); + } + + if (!latest || !isNewer(pkg.version, latest)) continue; + + const updateType = getUpdateType(pkg.version, latest); + if (filterType) { + const order = { major: 3, minor: 2, patch: 1 }; + if (order[updateType] < order[filterType]) continue; + } + + outdated.push({ + name: pkg.name, + ecosystem: pkg.ecosystem, + current: pkg.version, + latest, + updateType, + source: pkg.source, + availableInArtifactory: true + }); + } + + return outdated; +} + +async function getLicenseNpm( + baseUrl: string, + token: string, + repoName: string, + packageName: string +): Promise { + try { + const encoded = packageName.startsWith('@') + ? packageName.replace('/', '%2F') + : packageName; + const res = await fetchWithTimeout(`${baseUrl}/api/npm/${repoName}/${encoded}`, token); + if (!res.ok) return null; + const data = (await res.json()) as { license?: string | { type?: string } }; + if (typeof data.license === 'string') return data.license; + if (typeof data.license === 'object' && data.license !== null) return data.license.type ?? null; + return null; + } catch { + return null; + } +} + +async function getLicenseNuget( + baseUrl: string, + token: string, + repoName: string, + packageName: string, + version: string +): Promise { + try { + const lower = packageName.toLowerCase(); + const res = await fetchWithTimeout( + `${baseUrl}/api/nuget/v3/${repoName}/registration/${lower}/${version}.json`, + token + ); + if (!res.ok) return null; + const data = (await res.json()) as { licenseExpression?: string; licenseUrl?: string }; + return data.licenseExpression ?? (data.licenseUrl ? 'See licenseUrl' : null); + } catch { + return null; + } +} + +export async function getLicensedPackages( + packages: Array<{ name: string; version: string; ecosystem: Ecosystem; source: string }>, + config: ArtifactoryConfig +): Promise { + const { baseUrl, token } = config; + if (!baseUrl || !token) return []; + + const result: LicensedPackage[] = []; + + for (const pkg of packages) { + const repoName = repoForEcosystem(config, pkg.ecosystem); + let license: string | null = null; + + if (repoName) { + if (pkg.ecosystem === 'npm') { + license = await getLicenseNpm(baseUrl, token, repoName, pkg.name); + } else if (pkg.ecosystem === 'nuget') { + license = await getLicenseNuget(baseUrl, token, repoName, pkg.name, pkg.version); + } + // Maven: fetching license from POM via Artifactory is complex — emit null + } + + result.push({ + name: pkg.name, + version: pkg.version, + ecosystem: pkg.ecosystem, + source: pkg.source, + license + }); + } + + return result; +} diff --git a/src/services/deps/clients/osv.ts b/src/services/deps/clients/osv.ts new file mode 100644 index 0000000..fa64099 --- /dev/null +++ b/src/services/deps/clients/osv.ts @@ -0,0 +1,195 @@ +import type { Package, OsvVulnerability, VulnerablePackage } from '../types.js'; + +const OSV_URL = 'https://api.osv.dev'; +const BATCH_SIZE = 500; +const TIMEOUT_MS = 30_000; + +const OSV_ECOSYSTEM: Record = { + npm: 'npm', + nuget: 'NuGet', + maven: 'Maven' +}; + +interface OsvQuery { + package: { name: string; ecosystem: string }; + version: string; +} + +interface OsvRange { + type: string; + events?: Array<{ introduced?: string; fixed?: string; last_affected?: string }>; +} + +interface OsvAffected { + package?: { name: string; ecosystem: string }; + ranges?: OsvRange[]; +} + +interface OsvSeverityEntry { + type: string; + score: string; +} + +interface OsvRawVuln { + id: string; + summary?: string; + aliases?: string[]; + references?: Array<{ url?: string }>; + affected?: OsvAffected[]; + severity?: OsvSeverityEntry[]; + database_specific?: Record; +} + +interface OsvBatchResponse { + results: Array<{ vulns?: OsvRawVuln[] }>; +} + +async function fetchWithTimeout(url: string, init: RequestInit): Promise { + const controller = new AbortController(); + const timer = setTimeout(() => controller.abort(), TIMEOUT_MS); + try { + return await fetch(url, { ...init, signal: controller.signal }); + } finally { + clearTimeout(timer); + } +} + +export async function checkOsvConnectivity(): Promise<{ reachable: boolean; error?: string }> { + try { + const res = await fetchWithTimeout(`${OSV_URL}/v1/querybatch`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ queries: [] }) + }); + return { reachable: res.ok || res.status === 400 }; // 400 is fine, means it's up + } catch (err) { + return { + reachable: false, + error: err instanceof Error ? err.message : String(err) + }; + } +} + +function extractRemediation(vuln: OsvRawVuln, pkg: Package): OsvVulnerability['remediation'] { + const fixedVersions: string[] = []; + + for (const affected of vuln.affected ?? []) { + for (const range of affected.ranges ?? []) { + if (range.type === 'SEMVER' || range.type === 'ECOSYSTEM') { + for (const event of range.events ?? []) { + if (event.fixed) fixedVersions.push(event.fixed); + } + } + } + } + + const unique = [...new Set(fixedVersions)]; + const fix_available = unique.length > 0; + const advice = fix_available + ? `Upgrade ${pkg.name} to ${unique.join(' or ')}` + : `No fix available yet for ${pkg.name}. Monitor ${vuln.id} for updates.`; + + return { fix_available, fixed_versions: unique, advice }; +} + +function extractSeverity(vuln: OsvRawVuln): { severity: string; cvss: number | null } { + const dbSpecific = vuln.database_specific ?? {}; + const dbSeverity = typeof dbSpecific['severity'] === 'string' ? dbSpecific['severity'] : undefined; + + // Some databases put numeric CVSS in database_specific.cvss + let cvss: number | null = null; + const rawCvss = dbSpecific['cvss']; + if (typeof rawCvss === 'number') { + cvss = rawCvss; + } else if (rawCvss && typeof rawCvss === 'object' && 'score' in rawCvss) { + const score = (rawCvss as Record)['score']; + if (typeof score === 'number') cvss = score; + } + + // Try severity array for numeric scores + for (const s of vuln.severity ?? []) { + const score = parseFloat(s.score); + if (!isNaN(score) && score >= 0 && score <= 10) { + cvss = score; + break; + } + } + + return { + severity: dbSeverity ?? inferSeverityFromCvss(cvss), + cvss + }; +} + +function inferSeverityFromCvss(cvss: number | null): string { + if (cvss === null) return 'UNKNOWN'; + if (cvss >= 9.0) return 'CRITICAL'; + if (cvss >= 7.0) return 'HIGH'; + if (cvss >= 4.0) return 'MEDIUM'; + return 'LOW'; +} + +function mapVuln(vuln: OsvRawVuln, pkg: Package): OsvVulnerability { + const { severity, cvss } = extractSeverity(vuln); + return { + id: vuln.id, + summary: vuln.summary ?? '', + severity, + cvss, + aliases: vuln.aliases ?? [], + remediation: extractRemediation(vuln, pkg), + references: (vuln.references ?? []).map(r => r.url ?? '').filter(Boolean) + }; +} + +async function queryBatch(queries: OsvQuery[], packages: Package[]): Promise { + const res = await fetchWithTimeout(`${OSV_URL}/v1/querybatch`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ queries }) + }); + + if (!res.ok) { + throw new Error(`OSV.dev returned HTTP ${res.status}`); + } + + const data = (await res.json()) as OsvBatchResponse; + const vulnerable: VulnerablePackage[] = []; + + for (let i = 0; i < data.results.length; i++) { + const result = data.results[i]; + const pkg = packages[i]; + if (!result || !pkg) continue; + + const vulns = result.vulns ?? []; + if (vulns.length === 0) continue; + + vulnerable.push({ + ...pkg, + vulnerabilities: vulns.map(v => mapVuln(v, pkg)) + }); + } + + return vulnerable; +} + +export async function checkPackagesForVulns(packages: Package[]): Promise { + const allVulnerable: VulnerablePackage[] = []; + + for (let i = 0; i < packages.length; i += BATCH_SIZE) { + const chunk = packages.slice(i, i + BATCH_SIZE); + + const queries: OsvQuery[] = chunk.map(pkg => ({ + package: { + name: pkg.name, + ecosystem: OSV_ECOSYSTEM[pkg.ecosystem] ?? pkg.ecosystem + }, + version: pkg.version + })); + + const results = await queryBatch(queries, chunk); + allVulnerable.push(...results); + } + + return allVulnerable; +} diff --git a/src/services/deps/commands.ts b/src/services/deps/commands.ts new file mode 100644 index 0000000..1d5799c --- /dev/null +++ b/src/services/deps/commands.ts @@ -0,0 +1,155 @@ +import { Command } from 'commander'; +import { success, fail } from '../../lib/output.js'; +import { loadConfig } from '../../lib/config.js'; +import type { ScanOptions, Ecosystem } from './types.js'; +import { runScan } from './scan.js'; +import { runDiff } from './diff.js'; +import { runFrisk } from './frisk.js'; +import { runOutdated } from './outdated.js'; +import { runLicenseCheck } from './license-check.js'; +import { buildConnectivityData } from './connectivity.js'; + +export function registerDepsCommands(program: Command): void { + const deps = program + .command('deps') + .description('Dependency scanning, CVE detection, and license auditing'); + + // ─── frisk (primary) ──────────────────────────────────────────────────────── + + deps + .command('frisk') + .description('Scan all dependencies for CVEs and output remediation paths (requires OSV.dev)') + .option('--ecosystem ', 'Filter to one ecosystem: npm, nuget, maven, all', 'all') + .option('--direct-only', 'Only scan direct dependencies (default: include transitive)', false) + .option('--include-dev', 'Include dev/test dependencies', false) + .action(async (opts: { ecosystem: string; directOnly: boolean; includeDev: boolean }, cmd: Command) => { + const startTime = Date.now(); + try { + const globalOpts = cmd.optsWithGlobals(); + const config = loadConfig({ configPath: globalOpts.config as string | undefined }); + const scanOpts: ScanOptions = { + ecosystem: opts.ecosystem as Ecosystem | 'all', + includeTransitive: !opts.directOnly, + includeDev: opts.includeDev + }; + const data = await runFrisk(config, scanOpts); + success(data, 'deps', 'frisk', startTime); + } catch (err) { + fail(err, 'deps', 'frisk', startTime); + } + }); + + // ─── scan ─────────────────────────────────────────────────────────────────── + + deps + .command('scan') + .description('Inventory all dependencies from manifest files (local only, no network)') + .option('--ecosystem ', 'Filter to one ecosystem: npm, nuget, maven, all', 'all') + .option('--include-transitive', 'Include transitive dependencies', false) + .option('--include-dev', 'Include dev/test dependencies', false) + .action((opts: { ecosystem: string; includeTransitive: boolean; includeDev: boolean }, cmd: Command) => { + const startTime = Date.now(); + try { + const globalOpts = cmd.optsWithGlobals(); + const config = loadConfig({ configPath: globalOpts.config as string | undefined }); + const scanOpts: ScanOptions = { + ecosystem: opts.ecosystem as Ecosystem | 'all', + includeTransitive: opts.includeTransitive, + includeDev: opts.includeDev + }; + const data = runScan(config, scanOpts); + success(data, 'deps', 'scan', startTime); + } catch (err) { + fail(err, 'deps', 'scan', startTime); + } + }); + + // ─── diff ─────────────────────────────────────────────────────────────────── + + deps + .command('diff') + .description('Show dependency changes between two git refs (local only, no network)') + .requiredOption('--from ', 'Base git ref (commit, tag, or branch)') + .option('--to ', 'Target git ref (default: working tree)') + .option('--ecosystem ', 'Filter to one ecosystem: npm, nuget, maven, all', 'all') + .option('--include-dev', 'Include dev/test dependencies', false) + .action((opts: { from: string; to?: string; ecosystem: string; includeDev: boolean }, cmd: Command) => { + const startTime = Date.now(); + try { + const globalOpts = cmd.optsWithGlobals(); + const config = loadConfig({ configPath: globalOpts.config as string | undefined }); + const scanOpts: ScanOptions = { + ecosystem: opts.ecosystem as Ecosystem | 'all', + includeTransitive: true, + includeDev: opts.includeDev + }; + const data = runDiff(config, opts.from, opts.to ?? null, scanOpts); + success(data, 'deps', 'diff', startTime); + } catch (err) { + fail(err, 'deps', 'diff', startTime); + } + }); + + // ─── outdated ─────────────────────────────────────────────────────────────── + + deps + .command('outdated') + .description('Check for newer versions available in Artifactory (requires Artifactory)') + .option('--ecosystem ', 'Filter to one ecosystem: npm, nuget, maven, all', 'all') + .option('--major', 'Only show major version bumps') + .option('--minor', 'Only show minor version bumps or higher') + .option('--patch', 'Only show patch version bumps or higher') + .action(async (opts: { ecosystem: string; major?: boolean; minor?: boolean; patch?: boolean }, cmd: Command) => { + const startTime = Date.now(); + try { + const globalOpts = cmd.optsWithGlobals(); + const config = loadConfig({ configPath: globalOpts.config as string | undefined }); + const scanOpts: ScanOptions = { ecosystem: opts.ecosystem as Ecosystem | 'all' }; + const filterType = opts.major ? 'major' : opts.minor ? 'minor' : opts.patch ? 'patch' : undefined; + const data = await runOutdated(config, scanOpts, filterType); + success(data, 'deps', 'outdated', startTime); + } catch (err) { + fail(err, 'deps', 'outdated', startTime); + } + }); + + // ─── license-check ────────────────────────────────────────────────────────── + + deps + .command('license-check') + .description('Report licenses for all direct dependencies via Artifactory (requires Artifactory)') + .option('--ecosystem ', 'Filter to one ecosystem: npm, nuget, maven, all', 'all') + .option('--include-dev', 'Include dev/test dependencies', false) + .action(async (opts: { ecosystem: string; includeDev: boolean }, cmd: Command) => { + const startTime = Date.now(); + try { + const globalOpts = cmd.optsWithGlobals(); + const config = loadConfig({ configPath: globalOpts.config as string | undefined }); + const scanOpts: ScanOptions = { + ecosystem: opts.ecosystem as Ecosystem | 'all', + includeDev: opts.includeDev + }; + const data = await runLicenseCheck(config, scanOpts); + success(data, 'deps', 'license-check', startTime); + } catch (err) { + fail(err, 'deps', 'license-check', startTime); + } + }); + + // ─── connectivity ─────────────────────────────────────────────────────────── + + deps + .command('connectivity') + .description('Test network access to Artifactory and OSV.dev, report available tier') + .action(async (_opts: unknown, cmd: Command) => { + const startTime = Date.now(); + try { + const globalOpts = cmd.optsWithGlobals(); + const config = loadConfig({ configPath: globalOpts.config as string | undefined }); + const data = await buildConnectivityData(config); + success(data, 'deps', 'connectivity', startTime); + } catch (err) { + fail(err, 'deps', 'connectivity', startTime); + } + }); +} diff --git a/src/services/deps/connectivity.ts b/src/services/deps/connectivity.ts new file mode 100644 index 0000000..dbf5d81 --- /dev/null +++ b/src/services/deps/connectivity.ts @@ -0,0 +1,82 @@ +import type { ConnectivityData, Tier } from './types.js'; +import type { ResolvedConfig } from '../../types/config.js'; +import { checkOsvConnectivity } from './clients/osv.js'; +import { checkArtifactoryConnectivity } from './clients/artifactory.js'; + +interface TierResult { + tier: Tier; + osvReachable: boolean; + artifactoryReachable: boolean; +} + +// Session-level cache — not persisted to disk +let cachedTier: TierResult | null = null; + +export async function detectTier(config: ResolvedConfig): Promise { + if (cachedTier) return cachedTier; + + const [osvResult, artResult] = await Promise.all([ + checkOsvConnectivity(), + checkArtifactoryConnectivity(config.artifactory) + ]); + + const artifactoryReachable = artResult.reachable && artResult.authenticated; + const osvReachable = osvResult.reachable; + + let tier: Tier = 'local'; + if (artifactoryReachable) tier = 'artifactory'; + if (osvReachable) tier = 'full'; + + cachedTier = { tier, osvReachable, artifactoryReachable }; + return cachedTier; +} + +export function clearTierCache(): void { + cachedTier = null; +} + +export async function buildConnectivityData(config: ResolvedConfig): Promise { + const [osvResult, artResult] = await Promise.all([ + checkOsvConnectivity(), + checkArtifactoryConnectivity(config.artifactory) + ]); + + const artCfg = config.artifactory; + + let tier: Tier = 'local'; + if (artResult.reachable && artResult.authenticated) tier = 'artifactory'; + if (osvResult.reachable) tier = 'full'; + + cachedTier = { + tier, + osvReachable: osvResult.reachable, + artifactoryReachable: artResult.reachable && artResult.authenticated + }; + + return { + artifactory: { + reachable: artResult.reachable, + url: artCfg.baseUrl ?? '(not configured)', + authenticated: artResult.authenticated, + repositories: { + npm: artCfg.npmRepo ?? '(not configured)', + nuget: artCfg.nugetRepo ?? '(not configured)', + maven: artCfg.mavenRepo ?? '(not configured)' + }, + ...(artResult.error ? { error: artResult.error } : {}) + }, + osv: { + reachable: osvResult.reachable, + url: 'https://api.osv.dev', + ...(osvResult.error ? { error: osvResult.error } : {}) + }, + tier, + capabilities: { + scan: true, + diff: true, + outdated: artResult.reachable && artResult.authenticated, + licenseCheck: artResult.reachable && artResult.authenticated, + cveCheck: osvResult.reachable + } + }; +} diff --git a/src/services/deps/diff.ts b/src/services/deps/diff.ts new file mode 100644 index 0000000..5a639b9 --- /dev/null +++ b/src/services/deps/diff.ts @@ -0,0 +1,88 @@ +import type { ResolvedConfig } from '../../types/config.js'; +import type { ScanOptions, DiffData, PackageChange, ChangeType, Ecosystem } from './types.js'; +import { scanRepo, scanRepoAtRef } from './parsers/index.js'; +import { getRepoRoot } from '../../lib/git-context.js'; +import { PncliError } from '../../lib/errors.js'; + +export function runDiff( + config: ResolvedConfig, + from: string, + to: string | null, + opts: ScanOptions +): DiffData { + void config; + const repoRoot = getRepoRoot(); + if (!repoRoot) { + throw new PncliError('Not inside a git repository.', 1); + } + + const fromScan = scanRepoAtRef(repoRoot, from, opts); + const toScan = to ? scanRepoAtRef(repoRoot, to, opts) : scanRepo(repoRoot, opts); + + // Build maps keyed by ecosystem:name (collapse source differences) + type PkgKey = string; + const fromMap = new Map(); // key → version + const toMap = new Map(); + const sourceMap = new Map(); + + for (const pkg of fromScan.packages) { + const key = `${pkg.ecosystem}:${pkg.name}`; + fromMap.set(key, pkg.version); + sourceMap.set(key, pkg.source); + } + for (const pkg of toScan.packages) { + const key = `${pkg.ecosystem}:${pkg.name}`; + toMap.set(key, pkg.version); + sourceMap.set(key, pkg.source); + } + + const changes: PackageChange[] = []; + const allKeys = new Set([...fromMap.keys(), ...toMap.keys()]); + + for (const key of allKeys) { + const [eco, ...nameParts] = key.split(':'); + const name = nameParts.join(':'); + const fromVer = fromMap.get(key) ?? null; + const toVer = toMap.get(key) ?? null; + + let change: ChangeType; + if (!fromVer) { + change = 'added'; + } else if (!toVer) { + change = 'removed'; + } else if (fromVer === toVer) { + continue; // unchanged — omit from output + } else { + change = isDowngrade(fromVer, toVer) ? 'downgraded' : 'upgraded'; + } + + changes.push({ + name, + ecosystem: eco as Ecosystem, + change, + from: fromVer, + to: toVer, + source: sourceMap.get(key) ?? '' + }); + } + + const summary = { added: 0, removed: 0, upgraded: 0, downgraded: 0, unchanged: 0 }; + for (const c of changes) summary[c.change]++; + summary.unchanged = fromScan.packages.length - changes.filter(c => c.change !== 'added').length; + + return { from, to: to ?? 'working tree', changes, summary }; +} + +function parseSemver(v: string): [number, number, number] { + const clean = v.replace(/[^0-9.]/g, ''); + const parts = clean.split('.').map(Number); + return [parts[0] ?? 0, parts[1] ?? 0, parts[2] ?? 0]; +} + +function isDowngrade(from: string, to: string): boolean { + const [fMaj, fMin, fPat] = parseSemver(from); + const [tMaj, tMin, tPat] = parseSemver(to); + if (tMaj !== fMaj) return tMaj < fMaj; + if (tMin !== fMin) return tMin < fMin; + return tPat < fPat; +} diff --git a/src/services/deps/frisk.ts b/src/services/deps/frisk.ts new file mode 100644 index 0000000..3a375f8 --- /dev/null +++ b/src/services/deps/frisk.ts @@ -0,0 +1,45 @@ +import type { ResolvedConfig } from '../../types/config.js'; +import type { ScanOptions, FriskData } from './types.js'; +import { scanRepo } from './parsers/index.js'; +import { checkPackagesForVulns } from './clients/osv.js'; +import { detectTier } from './connectivity.js'; +import { getRepoRoot } from '../../lib/git-context.js'; +import { PncliError } from '../../lib/errors.js'; + +export async function runFrisk(config: ResolvedConfig, opts: ScanOptions): Promise { + const repoRoot = getRepoRoot(); + if (!repoRoot) { + throw new PncliError('Not inside a git repository.', 1); + } + + const { tier, osvReachable } = await detectTier(config); + + if (!osvReachable) { + throw new PncliError( + 'deps frisk requires OSV.dev access but api.osv.dev is not reachable from this machine. ' + + 'Run \'pncli deps connectivity\' to diagnose.', + 503 + ); + } + + // Default frisk: include transitive deps (CVEs hide in transitive deps) + const scanOpts: ScanOptions = { + ...opts, + includeTransitive: opts.includeTransitive ?? true + }; + + const scan = scanRepo(repoRoot, scanOpts); + + if (scan.packages.length === 0) { + return { tier, scanned: 0, vulnerable: 0, packages: [] }; + } + + const vulnerable = await checkPackagesForVulns(scan.packages); + + return { + tier, + scanned: scan.packages.length, + vulnerable: vulnerable.length, + packages: vulnerable + }; +} diff --git a/src/services/deps/license-check.ts b/src/services/deps/license-check.ts new file mode 100644 index 0000000..04eb64e --- /dev/null +++ b/src/services/deps/license-check.ts @@ -0,0 +1,52 @@ +import type { ResolvedConfig } from '../../types/config.js'; +import type { ScanOptions, LicenseCheckData } from './types.js'; +import { scanRepo } from './parsers/index.js'; +import { getLicensedPackages, requireArtifactory } from './clients/artifactory.js'; +import { detectTier } from './connectivity.js'; +import { getRepoRoot } from '../../lib/git-context.js'; +import { PncliError } from '../../lib/errors.js'; + +export async function runLicenseCheck(config: ResolvedConfig, opts: ScanOptions): Promise { + const repoRoot = getRepoRoot(); + if (!repoRoot) { + throw new PncliError('Not inside a git repository.', 1); + } + + requireArtifactory(config.artifactory, 'deps license-check'); + + const { artifactoryReachable } = await detectTier(config); + if (!artifactoryReachable) { + throw new PncliError( + `Artifactory at ${config.artifactory.baseUrl} is not reachable or authentication failed. ` + + `Run 'pncli deps connectivity' to diagnose.`, + 503 + ); + } + + const scan = scanRepo(repoRoot, { ...opts, includeTransitive: false }); + const directPackages = scan.packages.filter(p => p.type === 'direct'); + + const licensed = await getLicensedPackages(directPackages, config.artifactory); + + const byLicense: Record = {}; + let unknown = 0; + + for (const pkg of licensed) { + if (!pkg.license) { + unknown++; + } else { + byLicense[pkg.license] = (byLicense[pkg.license] ?? 0) + 1; + } + } + + return { + source: 'artifactory', + artifactoryUrl: config.artifactory.baseUrl ?? '', + packages: licensed, + summary: { + total: licensed.length, + byLicense, + unknown + } + }; +} diff --git a/src/services/deps/outdated.ts b/src/services/deps/outdated.ts new file mode 100644 index 0000000..2526a16 --- /dev/null +++ b/src/services/deps/outdated.ts @@ -0,0 +1,45 @@ +import type { ResolvedConfig } from '../../types/config.js'; +import type { ScanOptions, OutdatedData } from './types.js'; +import { scanRepo } from './parsers/index.js'; +import { getOutdatedPackages, requireArtifactory } from './clients/artifactory.js'; +import { detectTier } from './connectivity.js'; +import { getRepoRoot } from '../../lib/git-context.js'; +import { PncliError } from '../../lib/errors.js'; + +export async function runOutdated( + config: ResolvedConfig, + opts: ScanOptions, + filterType?: 'major' | 'minor' | 'patch' +): Promise { + const repoRoot = getRepoRoot(); + if (!repoRoot) { + throw new PncliError('Not inside a git repository.', 1); + } + + // Fail fast with a clear message if Artifactory is not configured at all + requireArtifactory(config.artifactory, 'deps outdated'); + + const { artifactoryReachable } = await detectTier(config); + if (!artifactoryReachable) { + throw new PncliError( + `Artifactory at ${config.artifactory.baseUrl} is not reachable or authentication failed. ` + + `Run 'pncli deps connectivity' to diagnose.`, + 503 + ); + } + + const scan = scanRepo(repoRoot, { ...opts, includeTransitive: false }); + const directPackages = scan.packages.filter(p => p.type === 'direct'); + + const outdated = await getOutdatedPackages(directPackages, config.artifactory, filterType); + + const summary = { total: outdated.length, major: 0, minor: 0, patch: 0 }; + for (const pkg of outdated) summary[pkg.updateType]++; + + return { + source: 'artifactory', + artifactoryUrl: config.artifactory.baseUrl ?? '', + outdated, + summary + }; +} diff --git a/src/services/deps/parsers/index.ts b/src/services/deps/parsers/index.ts new file mode 100644 index 0000000..32e0098 --- /dev/null +++ b/src/services/deps/parsers/index.ts @@ -0,0 +1,145 @@ +import fs from 'fs'; +import path from 'path'; +import { execSync } from 'child_process'; +import type { Package, ManifestInfo, ScanOptions, ScanData, Ecosystem } from '../types.js'; +import { findNpmManifests, parseNpmPackages } from './npm.js'; +import { findNugetManifests, parseNugetPackages } from './nuget.js'; +import { findMavenManifests, parseMavenPackages } from './maven.js'; + +function getRepoFiles(repoRoot: string): string[] { + try { + const out = execSync('git ls-files --cached --others --exclude-standard', { + encoding: 'utf8', + cwd: repoRoot + }); + return out.trim().split('\n').filter(Boolean); + } catch { + return []; + } +} + +function readFile(repoRoot: string, relPath: string): string | null { + try { + return fs.readFileSync(path.join(repoRoot, relPath), 'utf8'); + } catch { + return null; + } +} + +function readFileAtRef(repoRoot: string, ref: string, relPath: string): string | null { + try { + return execSync(`git show "${ref}":"${relPath}"`, { + encoding: 'utf8', + cwd: repoRoot, + stdio: ['pipe', 'pipe', 'pipe'] + }); + } catch { + return null; + } +} + +function parseManifests( + repoRoot: string, + manifests: ManifestInfo[], + opts: ScanOptions, + readFn: (relPath: string) => string | null +): Package[] { + const allPackages: Package[] = []; + const seen = new Set(); + + for (const manifest of manifests) { + const content = readFn(manifest.file); + if (!content) continue; + + const lockContent = manifest.lockFile ? readFn(manifest.lockFile) ?? undefined : undefined; + + let pkgs: Package[] = []; + + if (manifest.ecosystem === 'npm') { + pkgs = parseNpmPackages(content, manifest, opts, lockContent); + } else if (manifest.ecosystem === 'nuget') { + // For NuGet, also look for Directory.Packages.props in same or parent dir + const dir = path.dirname(manifest.file); + const propsPath = dir === '.' ? 'Directory.Packages.props' : `${dir}/Directory.Packages.props`; + const propsContent = readFn(propsPath) ?? undefined; + pkgs = parseNugetPackages(content, manifest, opts, lockContent, propsContent); + } else if (manifest.ecosystem === 'maven') { + pkgs = parseMavenPackages(content, manifest, opts, lockContent); + } + + for (const pkg of pkgs) { + const key = `${pkg.ecosystem}:${pkg.name}@${pkg.version}:${pkg.source}`; + if (!seen.has(key)) { + seen.add(key); + allPackages.push(pkg); + } + } + } + + return allPackages; +} + +export function scanRepo(repoRoot: string, opts: ScanOptions = {}): ScanData { + const files = getRepoFiles(repoRoot); + const eco = opts.ecosystem ?? 'all'; + + const npmManifests = eco === 'all' || eco === 'npm' ? findNpmManifests(files) : []; + const nugetManifests = eco === 'all' || eco === 'nuget' ? findNugetManifests(files) : []; + const mavenManifests = eco === 'all' || eco === 'maven' ? findMavenManifests(files) : []; + + const manifests = [...npmManifests, ...nugetManifests, ...mavenManifests]; + const packages = parseManifests(repoRoot, manifests, opts, rel => readFile(repoRoot, rel)); + + return buildScanData(manifests, packages); +} + +export function scanRepoAtRef(repoRoot: string, ref: string, opts: ScanOptions = {}): ScanData { + const eco = opts.ecosystem ?? 'all'; + + // Get file list at that ref + let files: string[] = []; + try { + const out = execSync(`git ls-tree -r --name-only "${ref}"`, { + encoding: 'utf8', + cwd: repoRoot, + stdio: ['pipe', 'pipe', 'pipe'] + }); + files = out.trim().split('\n').filter(Boolean); + } catch { + return buildScanData([], []); + } + + const npmManifests = eco === 'all' || eco === 'npm' ? findNpmManifests(files) : []; + const nugetManifests = eco === 'all' || eco === 'nuget' ? findNugetManifests(files) : []; + const mavenManifests = eco === 'all' || eco === 'maven' ? findMavenManifests(files) : []; + + const manifests = [...npmManifests, ...nugetManifests, ...mavenManifests]; + const packages = parseManifests(repoRoot, manifests, opts, rel => readFileAtRef(repoRoot, ref, rel)); + + return buildScanData(manifests, packages); +} + +function buildScanData(manifests: ManifestInfo[], packages: Package[]): ScanData { + const ecosystemSet = new Set(manifests.map(m => m.ecosystem)); + const byEcosystem: Record = {}; + const byType: Record = { direct: 0, transitive: 0 }; + const byScope: Record = { production: 0, dev: 0 }; + + for (const pkg of packages) { + byEcosystem[pkg.ecosystem] = (byEcosystem[pkg.ecosystem] ?? 0) + 1; + byType[pkg.type] = (byType[pkg.type] ?? 0) + 1; + byScope[pkg.scope] = (byScope[pkg.scope] ?? 0) + 1; + } + + return { + ecosystems: Array.from(ecosystemSet), + manifests, + packages, + summary: { + totalPackages: packages.length, + byEcosystem, + byType, + byScope + } + }; +} diff --git a/src/services/deps/parsers/maven.ts b/src/services/deps/parsers/maven.ts new file mode 100644 index 0000000..a7ce8a7 --- /dev/null +++ b/src/services/deps/parsers/maven.ts @@ -0,0 +1,214 @@ +import path from 'path'; +import type { Package, ManifestInfo, ScanOptions } from '../types.js'; + +export function findMavenManifests(files: string[]): ManifestInfo[] { + const lockFiles = new Set(files.filter(f => path.basename(f) === 'gradle.lockfile')); + const manifests: ManifestInfo[] = []; + + for (const file of files) { + const base = path.basename(file); + if (base === 'pom.xml' || base === 'build.gradle' || base === 'build.gradle.kts') { + const dir = path.dirname(file); + const lockCandidate = dir === '.' ? 'gradle.lockfile' : `${dir}/gradle.lockfile`; + const lockFile = lockFiles.has(lockCandidate) ? lockCandidate : undefined; + manifests.push({ file, ecosystem: 'maven', lockFile }); + } + } + + return manifests; +} + +export function parseMavenPackages( + content: string, + manifest: ManifestInfo, + opts: ScanOptions, + lockContent?: string +): Package[] { + const base = path.basename(manifest.file); + + if (lockContent && manifest.lockFile) { + return parseGradleLock(lockContent, manifest.lockFile, opts); + } + + if (base === 'pom.xml') { + return parsePomXml(content, manifest.file, opts); + } + + if (base === 'build.gradle' || base === 'build.gradle.kts') { + return parseBuildGradle(content, manifest.file, opts); + } + + return []; +} + +function parsePomXml(content: string, filePath: string, opts: ScanOptions): Package[] { + // Extract properties for variable substitution + const props = extractPomProperties(content); + + // Extract dependency management versions + const managedVersions = extractDependencyManagement(content, props); + + // Extract actual dependencies (skip dependencyManagement block) + const withoutMgmt = content.replace(/[\s\S]*?<\/dependencyManagement>/gi, ''); + + const packages: Package[] = []; + const depRegex = /([\s\S]*?)<\/dependency>/gi; + + for (const match of withoutMgmt.matchAll(depRegex)) { + const inner = match[1]; + const groupId = /\s*([^<\s]+)\s*<\/groupId>/i.exec(inner)?.[1]; + const artifactId = /\s*([^<\s]+)\s*<\/artifactId>/i.exec(inner)?.[1]; + if (!groupId || !artifactId) continue; + + const rawVersion = /\s*([^<\s]+)\s*<\/version>/i.exec(inner)?.[1]; + const resolvedVersion = resolveProperty(rawVersion ?? null, props) + ?? managedVersions.get(`${groupId}:${artifactId}`); + if (!resolvedVersion) continue; + + const scope = /\s*([^<\s]+)\s*<\/scope>/i.exec(inner)?.[1] ?? 'compile'; + const isDev = scope === 'test' || scope === 'provided'; + if (isDev && !opts.includeDev) continue; + + packages.push({ + name: `${groupId}:${artifactId}`, + version: resolvedVersion, + ecosystem: 'maven', + source: filePath, + type: 'direct', + scope: isDev ? 'dev' : 'production' + }); + } + + return packages; +} + +function extractPomProperties(content: string): Map { + const props = new Map(); + const propsMatch = /([\s\S]*?)<\/properties>/i.exec(content); + if (!propsMatch) return props; + + const propRegex = /<([a-zA-Z0-9._-]+)>\s*([^<]+)\s*<\/\1>/g; + for (const m of propsMatch[1].matchAll(propRegex)) { + props.set(m[1], m[2].trim()); + } + return props; +} + +function extractDependencyManagement(content: string, props: Map): Map { + const versions = new Map(); + const mgmtMatch = /([\s\S]*?)<\/dependencyManagement>/i.exec(content); + if (!mgmtMatch) return versions; + + const depRegex = /([\s\S]*?)<\/dependency>/gi; + for (const m of mgmtMatch[1].matchAll(depRegex)) { + const inner = m[1]; + const groupId = /\s*([^<\s]+)\s*<\/groupId>/i.exec(inner)?.[1]; + const artifactId = /\s*([^<\s]+)\s*<\/artifactId>/i.exec(inner)?.[1]; + const rawVersion = /\s*([^<\s]+)\s*<\/version>/i.exec(inner)?.[1]; + if (!groupId || !artifactId || !rawVersion) continue; + const resolved = resolveProperty(rawVersion, props); + if (resolved) versions.set(`${groupId}:${artifactId}`, resolved); + } + + return versions; +} + +function resolveProperty(value: string | null, props: Map): string | null { + if (!value) return null; + return value.replace(/\$\{([^}]+)\}/g, (_, key: string) => props.get(key) ?? `\${${key}}`); +} + +const GRADLE_DEV_CONFIGS = new Set(['testImplementation', 'testCompileOnly', 'testRuntimeOnly', 'testApi']); + +function parseBuildGradle(content: string, filePath: string, opts: ScanOptions): Package[] { + const packages: Package[] = []; + const seen = new Set(); + + // String notation: implementation 'group:artifact:version' or "group:artifact:version" + const stringNotation = /\b(\w+)\s+['"]([^:'"]+):([^:'"]+):([^'"]+)['"]/g; + for (const m of content.matchAll(stringNotation)) { + const config = m[1]; + const groupId = m[2]; + const artifactId = m[3]; + const version = m[4].split('@')[0]; // strip classifier + + const isDev = GRADLE_DEV_CONFIGS.has(config); + if (isDev && !opts.includeDev) continue; + + const name = `${groupId}:${artifactId}`; + const key = `${name}@${version}`; + if (seen.has(key)) continue; + seen.add(key); + + packages.push({ + name, + version, + ecosystem: 'maven', + source: filePath, + type: 'direct', + scope: isDev ? 'dev' : 'production' + }); + } + + // Map notation: implementation group: 'x', name: 'y', version: 'z' + const mapNotation = /\b(\w+)\s+group:\s*['"]([^'"]+)['"]\s*,\s*name:\s*['"]([^'"]+)['"]\s*,\s*version:\s*['"]([^'"]+)['"]/g; + for (const m of content.matchAll(mapNotation)) { + const config = m[1]; + const groupId = m[2]; + const artifactId = m[3]; + const version = m[4]; + + const isDev = GRADLE_DEV_CONFIGS.has(config); + if (isDev && !opts.includeDev) continue; + + const name = `${groupId}:${artifactId}`; + const key = `${name}@${version}`; + if (seen.has(key)) continue; + seen.add(key); + + packages.push({ + name, + version, + ecosystem: 'maven', + source: filePath, + type: 'direct', + scope: isDev ? 'dev' : 'production' + }); + } + + return packages; +} + +function parseGradleLock(content: string, filePath: string, opts: ScanOptions): Package[] { + const packages: Package[] = []; + const seen = new Set(); + + // Format: group:artifact:version=config1,config2 + const lineRegex = /^([^:#\s]+):([^:#\s]+):([^=\s]+)=(.+)$/gm; + + for (const m of content.matchAll(lineRegex)) { + const groupId = m[1]; + const artifactId = m[2]; + const version = m[3]; + const configs = m[4].split(',').map(s => s.trim()); + + const isDev = configs.every(c => GRADLE_DEV_CONFIGS.has(c)); + if (isDev && !opts.includeDev) continue; + + const name = `${groupId}:${artifactId}`; + const key = `${name}@${version}`; + if (seen.has(key)) continue; + seen.add(key); + + packages.push({ + name, + version, + ecosystem: 'maven', + source: filePath, + type: 'direct', + scope: isDev ? 'dev' : 'production' + }); + } + + return packages; +} diff --git a/src/services/deps/parsers/npm.ts b/src/services/deps/parsers/npm.ts new file mode 100644 index 0000000..98384ac --- /dev/null +++ b/src/services/deps/parsers/npm.ts @@ -0,0 +1,231 @@ +import path from 'path'; +import type { Package, ManifestInfo, ScanOptions } from '../types.js'; + +interface PackageJson { + dependencies?: Record; + devDependencies?: Record; +} + +interface PackageLockV2Meta { + version?: string; + dev?: boolean; + devOptional?: boolean; +} + +interface PackageLockV2 { + lockfileVersion?: number; + packages?: Record; +} + +export function findNpmManifests(files: string[]): ManifestInfo[] { + const lockFiles = new Set( + files.filter(f => { + const base = path.basename(f); + return base === 'package-lock.json' || base === 'yarn.lock' || base === 'pnpm-lock.yaml'; + }) + ); + + const manifests: ManifestInfo[] = []; + + for (const file of files) { + if (path.basename(file) !== 'package.json') continue; + if (file.split('/').includes('node_modules')) continue; + + const dir = path.dirname(file); + let lockFile: string | undefined; + + for (const lf of ['package-lock.json', 'yarn.lock', 'pnpm-lock.yaml']) { + const candidate = dir === '.' ? lf : `${dir}/${lf}`; + if (lockFiles.has(candidate)) { + lockFile = candidate; + break; + } + } + + manifests.push({ file, ecosystem: 'npm', lockFile }); + } + + return manifests; +} + +export function parseNpmPackages( + content: string, + manifest: ManifestInfo, + opts: ScanOptions, + lockContent?: string +): Package[] { + if (lockContent && manifest.lockFile) { + const base = path.basename(manifest.lockFile); + if (base === 'package-lock.json') { + const pkgs = parsePackageLock(lockContent, manifest.lockFile, opts); + if (pkgs.length > 0) return pkgs; + } else if (base === 'yarn.lock') { + return parseYarnLock(lockContent, manifest.lockFile, opts, content); + } else if (base === 'pnpm-lock.yaml') { + return parsePnpmLock(lockContent, manifest.lockFile, opts); + } + } + return parsePackageJson(content, manifest.file, opts); +} + +function stripVersionPrefix(v: string): string { + return v.replace(/^[\^~>=<* ]+/, '').split(/\s/)[0] ?? v; +} + +function parsePackageJson(content: string, filePath: string, opts: ScanOptions): Package[] { + let pkg: PackageJson; + try { + pkg = JSON.parse(content) as PackageJson; + } catch { + return []; + } + + const packages: Package[] = []; + + for (const [name, version] of Object.entries(pkg.dependencies ?? {})) { + packages.push({ + name, + version: stripVersionPrefix(version), + ecosystem: 'npm', + source: filePath, + type: 'direct', + scope: 'production' + }); + } + + if (opts.includeDev) { + for (const [name, version] of Object.entries(pkg.devDependencies ?? {})) { + packages.push({ + name, + version: stripVersionPrefix(version), + ecosystem: 'npm', + source: filePath, + type: 'direct', + scope: 'dev' + }); + } + } + + return packages; +} + +function parsePackageLock(content: string, lockFilePath: string, opts: ScanOptions): Package[] { + let lock: PackageLockV2; + try { + lock = JSON.parse(content) as PackageLockV2; + } catch { + return []; + } + + if ((lock.lockfileVersion ?? 1) < 2 || !lock.packages) return []; + + const packages: Package[] = []; + + for (const [pkgPath, meta] of Object.entries(lock.packages)) { + if (pkgPath === '') continue; + if (!meta.version) continue; + + const isDev = meta.dev === true || meta.devOptional === true; + if (isDev && !opts.includeDev) continue; + + // node_modules/express → direct + // node_modules/express/node_modules/qs → transitive + const withoutRoot = pkgPath.startsWith('node_modules/') + ? pkgPath.slice('node_modules/'.length) + : pkgPath; + const segments = withoutRoot.split('/node_modules/'); + const isTransitive = segments.length > 1; + if (isTransitive && !opts.includeTransitive) continue; + + const name = segments[segments.length - 1] ?? pkgPath; + + packages.push({ + name, + version: meta.version, + ecosystem: 'npm', + source: lockFilePath, + type: isTransitive ? 'transitive' : 'direct', + scope: isDev ? 'dev' : 'production' + }); + } + + return packages; +} + +function parseYarnLock( + content: string, + lockFilePath: string, + opts: ScanOptions, + packageJsonContent: string +): Package[] { + let pkgJson: PackageJson; + try { + pkgJson = JSON.parse(packageJsonContent) as PackageJson; + } catch { + pkgJson = {}; + } + + const devDeps = new Set(Object.keys(pkgJson.devDependencies ?? {})); + const packages: Package[] = []; + const seen = new Set(); + + // Matches: "express@^4.21.0", "express@^4.0.0, express@^4.1.0": + // version "4.21.0" + const blockRegex = /^"?([^@"\n][^@"\n]*)@[^:]+:?\n\s+version "([^"]+)"/gm; + + for (const match of content.matchAll(blockRegex)) { + const name = match[1].trim().replace(/^"/, ''); + const version = match[2]; + const key = `${name}@${version}`; + if (seen.has(key)) continue; + seen.add(key); + + const isDev = devDeps.has(name); + if (isDev && !opts.includeDev) continue; + + packages.push({ + name, + version, + ecosystem: 'npm', + source: lockFilePath, + type: 'direct', + scope: isDev ? 'dev' : 'production' + }); + } + + return packages; +} + +function parsePnpmLock(content: string, lockFilePath: string, opts: ScanOptions): Package[] { + const packages: Package[] = []; + const seen = new Set(); + + // pnpm-lock.yaml v6+ format: + // packages: + // express@4.21.0: + // dev: false + const blockRegex = /^\s{2}(\/?@?[^@\s/][^@\s]*)@(\d[^:\s]*):\s*\n((?:\s{4}[^\n]+\n)*)/gm; + + for (const match of content.matchAll(blockRegex)) { + const name = match[1].replace(/^\//, ''); + const version = match[2]; + const attrs = match[3] ?? ''; + const key = `${name}@${version}`; + if (seen.has(key)) continue; + seen.add(key); + + const isDev = /^\s+dev:\s*true/m.test(attrs); + if (isDev && !opts.includeDev) continue; + + packages.push({ + name, + version, + ecosystem: 'npm', + source: lockFilePath, + type: 'direct', + scope: isDev ? 'dev' : 'production' + }); + } + + return packages; +} diff --git a/src/services/deps/parsers/nuget.ts b/src/services/deps/parsers/nuget.ts new file mode 100644 index 0000000..e1bc134 --- /dev/null +++ b/src/services/deps/parsers/nuget.ts @@ -0,0 +1,205 @@ +import path from 'path'; +import type { Package, ManifestInfo, ScanOptions } from '../types.js'; + +const PROJ_EXTENSIONS = new Set(['.csproj', '.fsproj', '.vbproj']); + +export function findNugetManifests(files: string[]): ManifestInfo[] { + const lockFiles = new Map(); // dir → lock path + const propsFiles = new Map(); // dir → Directory.Packages.props path + + for (const f of files) { + if (path.basename(f) === 'packages.lock.json') { + lockFiles.set(path.dirname(f), f); + } + if (path.basename(f) === 'Directory.Packages.props') { + propsFiles.set(path.dirname(f), f); + } + } + + const manifests: ManifestInfo[] = []; + + for (const file of files) { + const base = path.basename(file); + const ext = path.extname(file); + + if (PROJ_EXTENSIONS.has(ext) || base === 'packages.config') { + const dir = path.dirname(file); + const lockFile = lockFiles.get(dir); + manifests.push({ file, ecosystem: 'nuget', lockFile }); + } + + if (base === 'Directory.Packages.props') { + const dir = path.dirname(file); + manifests.push({ file, ecosystem: 'nuget', lockFile: lockFiles.get(dir) }); + } + } + + // Remove duplicate lock-backed entries: if a packages.lock.json covers a dir, + // prefer it over the individual project files + return manifests; +} + +export function parseNugetPackages( + content: string, + manifest: ManifestInfo, + opts: ScanOptions, + lockContent?: string, + propsContent?: string +): Package[] { + const base = path.basename(manifest.file); + + if (lockContent) { + return parsePackagesLock(lockContent, manifest.lockFile ?? manifest.file, opts); + } + + if (base === 'packages.config') { + return parsePackagesConfig(content, manifest.file); + } + + if (base === 'Directory.Packages.props') { + return parseDirectoryPackagesProps(content, manifest.file); + } + + // .csproj / .fsproj / .vbproj + return parseCsproj(content, manifest.file, propsContent); +} + +function parseCsproj(content: string, filePath: string, propsContent?: string): Package[] { + // Build version map from Directory.Packages.props if provided + const centralVersions = propsContent ? extractCentralVersions(propsContent) : new Map(); + + const packages: Package[] = []; + + // Match or multiline form + const singleLine = /]+?)\/>/gi; + const multiLine = /]+?)>([\s\S]*?)<\/PackageReference>/gi; + + const extractFromAttrsAndInner = (attrs: string, inner: string): void => { + const nameMatch = /\bInclude="([^"]+)"/i.exec(attrs); + if (!nameMatch) return; + const name = nameMatch[1]; + + let version: string | undefined; + const attrVersion = /\bVersion="([^"]+)"/i.exec(attrs); + if (attrVersion) { + version = attrVersion[1]; + } else if (inner) { + const innerVersion = /([^<]+)<\/Version>/i.exec(inner); + if (innerVersion) version = innerVersion[1]; + } + + // Fall back to central package management + if (!version || version.startsWith('$(')) { + version = centralVersions.get(name) ?? centralVersions.get(name.toLowerCase()); + } + + if (!version) return; + + packages.push({ + name, + version, + ecosystem: 'nuget', + source: filePath, + type: 'direct', + scope: 'production' + }); + }; + + for (const m of content.matchAll(singleLine)) { + extractFromAttrsAndInner(m[1], ''); + } + for (const m of content.matchAll(multiLine)) { + extractFromAttrsAndInner(m[1], m[2] ?? ''); + } + + return packages; +} + +function extractCentralVersions(propsContent: string): Map { + const map = new Map(); + const regex = /; +} + +interface PackagesLockJson { + dependencies?: Record>; +} + +function parsePackagesLock(content: string, filePath: string, opts: ScanOptions): Package[] { + let lock: PackagesLockJson; + try { + lock = JSON.parse(content) as PackagesLockJson; + } catch { + return []; + } + + const packages: Package[] = []; + const seen = new Set(); + + for (const frameworkDeps of Object.values(lock.dependencies ?? {})) { + for (const [name, meta] of Object.entries(frameworkDeps)) { + const version = meta.resolved ?? ''; + if (!version) continue; + + const isTransitive = meta.type === 'Transitive'; + if (isTransitive && !opts.includeTransitive) continue; + + const key = `${name}@${version}`; + if (seen.has(key)) continue; + seen.add(key); + + packages.push({ + name, + version, + ecosystem: 'nuget', + source: filePath, + type: isTransitive ? 'transitive' : 'direct', + scope: 'production' + }); + } + } + + return packages; +} diff --git a/src/services/deps/scan.ts b/src/services/deps/scan.ts new file mode 100644 index 0000000..ff5b1a5 --- /dev/null +++ b/src/services/deps/scan.ts @@ -0,0 +1,14 @@ +import type { ResolvedConfig } from '../../types/config.js'; +import type { ScanOptions, ScanData } from './types.js'; +import { scanRepo } from './parsers/index.js'; +import { getRepoRoot } from '../../lib/git-context.js'; +import { PncliError } from '../../lib/errors.js'; + +export function runScan(config: ResolvedConfig, opts: ScanOptions): ScanData { + void config; // scan is local-only, no network + const repoRoot = getRepoRoot(); + if (!repoRoot) { + throw new PncliError('Not inside a git repository. deps commands require a git repo root.', 1); + } + return scanRepo(repoRoot, opts); +} diff --git a/src/services/deps/types.ts b/src/services/deps/types.ts new file mode 100644 index 0000000..a2ea834 --- /dev/null +++ b/src/services/deps/types.ts @@ -0,0 +1,150 @@ +export type Ecosystem = 'npm' | 'nuget' | 'maven'; +export type DependencyType = 'direct' | 'transitive'; +export type DependencyScope = 'production' | 'dev'; +export type ChangeType = 'added' | 'removed' | 'upgraded' | 'downgraded'; +export type Tier = 'local' | 'artifactory' | 'full'; + +export interface Package { + name: string; + version: string; + ecosystem: Ecosystem; + source: string; + type: DependencyType; + scope: DependencyScope; +} + +export interface ManifestInfo { + file: string; + ecosystem: Ecosystem; + framework?: string; + lockFile?: string; +} + +export interface ScanData { + ecosystems: Ecosystem[]; + manifests: ManifestInfo[]; + packages: Package[]; + summary: { + totalPackages: number; + byEcosystem: Record; + byType: Record; + byScope: Record; + }; +} + +export interface PackageChange { + name: string; + ecosystem: Ecosystem; + change: ChangeType; + from: string | null; + to: string | null; + source: string; +} + +export interface DiffData { + from: string; + to: string; + changes: PackageChange[]; + summary: { + added: number; + removed: number; + upgraded: number; + downgraded: number; + unchanged: number; + }; +} + +export interface OsvVulnerability { + id: string; + summary: string; + severity: string; + cvss: number | null; + aliases: string[]; + remediation: { + fix_available: boolean; + fixed_versions: string[]; + advice: string; + }; + references: string[]; +} + +export interface VulnerablePackage extends Package { + vulnerabilities: OsvVulnerability[]; +} + +export interface FriskData { + tier: Tier; + scanned: number; + vulnerable: number; + packages: VulnerablePackage[]; +} + +export interface OutdatedPackage { + name: string; + ecosystem: Ecosystem; + current: string; + latest: string; + updateType: 'major' | 'minor' | 'patch'; + source: string; + availableInArtifactory: boolean; +} + +export interface OutdatedData { + source: 'artifactory'; + artifactoryUrl: string; + outdated: OutdatedPackage[]; + summary: { + total: number; + major: number; + minor: number; + patch: number; + }; +} + +export interface LicensedPackage { + name: string; + version: string; + ecosystem: Ecosystem; + source: string; + license: string | null; +} + +export interface LicenseCheckData { + source: 'artifactory'; + artifactoryUrl: string; + packages: LicensedPackage[]; + summary: { + total: number; + byLicense: Record; + unknown: number; + }; +} + +export interface ConnectivityData { + artifactory: { + reachable: boolean; + url: string; + authenticated: boolean; + repositories: Record; + error?: string; + }; + osv: { + reachable: boolean; + url: string; + error?: string; + }; + tier: Tier; + capabilities: { + scan: boolean; + diff: boolean; + outdated: boolean; + licenseCheck: boolean; + cveCheck: boolean; + }; +} + +export interface ScanOptions { + ecosystem?: Ecosystem | 'all'; + includeTransitive?: boolean; + includeDev?: boolean; +} diff --git a/src/types/config.ts b/src/types/config.ts index 0d72256..df98473 100644 --- a/src/types/config.ts +++ b/src/types/config.ts @@ -1,3 +1,11 @@ +export interface ArtifactoryConfig { + baseUrl?: string; + token?: string; + npmRepo?: string; + nugetRepo?: string; + mavenRepo?: string; +} + export interface JiraConfig { baseUrl?: string; apiToken?: string; @@ -35,6 +43,7 @@ export interface GlobalConfig { user?: UserConfig; jira?: JiraConfig; bitbucket?: BitbucketConfig; + artifactory?: ArtifactoryConfig; defaults?: Defaults; } @@ -57,6 +66,7 @@ export interface ResolvedConfig { baseUrl: string | undefined; pat: string | undefined; }; + artifactory: ArtifactoryConfig; defaults: { jira: JiraDefaults; bitbucket: BitbucketDefaults; From 8e20ff069cd1a0e7518dd7df44fa6f247d5c3212 Mon Sep 17 00:00:00 2001 From: Sunny Kolattukudy Date: Sun, 5 Apr 2026 14:11:24 -0400 Subject: [PATCH 2/8] chore: update copilot-instructions for deps command group Co-Authored-By: Claude Sonnet 4.6 --- copilot-instructions.md | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/copilot-instructions.md b/copilot-instructions.md index b829f29..68a4ff9 100644 --- a/copilot-instructions.md +++ b/copilot-instructions.md @@ -245,10 +245,42 @@ pncli bitbucket get-build-status # sonar — no subcommands implemented yet ``` -### Artifactory +### Deps ``` -# artifactory — no subcommands implemented yet +pncli deps frisk + --ecosystem Filter to one ecosystem: npm, nuget, maven, all + (default: "all") + --direct-only Only scan direct dependencies (default: include + transitive) (default: false) + --include-dev Include dev/test dependencies (default: false) + +pncli deps scan + --ecosystem Filter to one ecosystem: npm, nuget, maven, all + (default: "all") + --include-transitive Include transitive dependencies (default: false) + --include-dev Include dev/test dependencies (default: false) + +pncli deps diff + --from Base git ref (commit, tag, or branch) + --to Target git ref (default: working tree) + --ecosystem Filter to one ecosystem: npm, nuget, maven, all + (default: "all") + --include-dev Include dev/test dependencies (default: false) + +pncli deps outdated + --ecosystem Filter to one ecosystem: npm, nuget, maven, all + (default: "all") + --major Only show major version bumps + --minor Only show minor version bumps or higher + --patch Only show patch version bumps or higher + +pncli deps license-check + --ecosystem Filter to one ecosystem: npm, nuget, maven, all + (default: "all") + --include-dev Include dev/test dependencies (default: false) + +pncli deps connectivity ``` ### Config From 2a0c65986c4b13d95ba0feda1a617b58a2f55c88 Mon Sep 17 00:00:00 2001 From: Sunny Kolattukudy Date: Sun, 5 Apr 2026 14:12:03 -0400 Subject: [PATCH 3/8] chore: auto-stage copilot-instructions.md in pre-commit hook MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The build step regenerates copilot-instructions.md — stage it automatically so it's never left out of a commit. Co-Authored-By: Claude Sonnet 4.6 --- .husky/pre-commit | 1 + 1 file changed, 1 insertion(+) diff --git a/.husky/pre-commit b/.husky/pre-commit index 74053bf..b1e286f 100644 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,3 +1,4 @@ npm run typecheck npm run lint npm run build +git add copilot-instructions.md From 4084ff68b2e4652133e1745573f9a221edd9bc92 Mon Sep 17 00:00:00 2001 From: Sunny Kolattukudy Date: Sun, 5 Apr 2026 15:59:46 -0400 Subject: [PATCH 4/8] fix: address PR #7 review comments Security: - Replace execSync string interpolation with execFileSync + arg arrays in readFileAtRef and scanRepoAtRef to prevent shell injection from user-controlled --from/--to refs Parser fixes: - yarn.lock: fix blockRegex to match scoped packages (@scope/name); classify direct vs transitive using package.json dep lists and respect opts.includeTransitive - pnpm-lock.yaml: same direct/transitive classification fix; pass packageJsonContent through for cross-referencing - nuget: remove dead propsFiles map that was populated but never read - maven: resolveProperty now returns null when a ${placeholder} remains unresolved, so callers correctly fall through to dependencyManagement versions instead of emitting invalid version strings Diff fixes: - Key on ecosystem:name:source instead of ecosystem:name to preserve multiple versions of the same transitive dep (common in npm) - summary.unchanged now computed directly from the key set rather than mixing collapsed/uncollapsed counts Directory.Packages.props: walk up parent directories to repo root instead of only checking the manifest's own directory (props files are typically at repo root in central package management) Co-Authored-By: Claude Sonnet 4.6 --- src/services/deps/diff.ts | 32 ++++++++++---------- src/services/deps/parsers/index.ts | 28 +++++++++++++----- src/services/deps/parsers/maven.ts | 4 ++- src/services/deps/parsers/npm.ts | 47 +++++++++++++++++++++++------- src/services/deps/parsers/nuget.ts | 4 --- 5 files changed, 76 insertions(+), 39 deletions(-) diff --git a/src/services/deps/diff.ts b/src/services/deps/diff.ts index 5a639b9..1f62419 100644 --- a/src/services/deps/diff.ts +++ b/src/services/deps/diff.ts @@ -19,29 +19,29 @@ export function runDiff( const fromScan = scanRepoAtRef(repoRoot, from, opts); const toScan = to ? scanRepoAtRef(repoRoot, to, opts) : scanRepo(repoRoot, opts); - // Build maps keyed by ecosystem:name (collapse source differences) + // Key on ecosystem:name:source to preserve multiple versions of the same package + // (common with transitive npm deps where the same package appears at different versions) type PkgKey = string; const fromMap = new Map(); // key → version const toMap = new Map(); - const sourceMap = new Map(); for (const pkg of fromScan.packages) { - const key = `${pkg.ecosystem}:${pkg.name}`; + const key = `${pkg.ecosystem}:${pkg.name}:${pkg.source}`; fromMap.set(key, pkg.version); - sourceMap.set(key, pkg.source); } for (const pkg of toScan.packages) { - const key = `${pkg.ecosystem}:${pkg.name}`; + const key = `${pkg.ecosystem}:${pkg.name}:${pkg.source}`; toMap.set(key, pkg.version); - sourceMap.set(key, pkg.source); } const changes: PackageChange[] = []; const allKeys = new Set([...fromMap.keys(), ...toMap.keys()]); for (const key of allKeys) { - const [eco, ...nameParts] = key.split(':'); - const name = nameParts.join(':'); + const parts = key.split(':'); + const eco = parts[0] as Ecosystem; + const source = parts[parts.length - 1] ?? ''; + const name = parts.slice(1, -1).join(':'); const fromVer = fromMap.get(key) ?? null; const toVer = toMap.get(key) ?? null; @@ -56,19 +56,17 @@ export function runDiff( change = isDowngrade(fromVer, toVer) ? 'downgraded' : 'upgraded'; } - changes.push({ - name, - ecosystem: eco as Ecosystem, - change, - from: fromVer, - to: toVer, - source: sourceMap.get(key) ?? '' - }); + changes.push({ name, ecosystem: eco, change, from: fromVer, to: toVer, source }); } const summary = { added: 0, removed: 0, upgraded: 0, downgraded: 0, unchanged: 0 }; for (const c of changes) summary[c.change]++; - summary.unchanged = fromScan.packages.length - changes.filter(c => c.change !== 'added').length; + // Unchanged = keys present in both maps with identical versions + summary.unchanged = [...allKeys].filter(k => { + const fv = fromMap.get(k); + const tv = toMap.get(k); + return fv !== undefined && tv !== undefined && fv === tv; + }).length; return { from, to: to ?? 'working tree', changes, summary }; } diff --git a/src/services/deps/parsers/index.ts b/src/services/deps/parsers/index.ts index 32e0098..6fb20ed 100644 --- a/src/services/deps/parsers/index.ts +++ b/src/services/deps/parsers/index.ts @@ -1,6 +1,6 @@ import fs from 'fs'; import path from 'path'; -import { execSync } from 'child_process'; +import { execSync, execFileSync } from 'child_process'; import type { Package, ManifestInfo, ScanOptions, ScanData, Ecosystem } from '../types.js'; import { findNpmManifests, parseNpmPackages } from './npm.js'; import { findNugetManifests, parseNugetPackages } from './nuget.js'; @@ -28,7 +28,7 @@ function readFile(repoRoot: string, relPath: string): string | null { function readFileAtRef(repoRoot: string, ref: string, relPath: string): string | null { try { - return execSync(`git show "${ref}":"${relPath}"`, { + return execFileSync('git', ['show', `${ref}:${relPath}`], { encoding: 'utf8', cwd: repoRoot, stdio: ['pipe', 'pipe', 'pipe'] @@ -38,6 +38,22 @@ function readFileAtRef(repoRoot: string, ref: string, relPath: string): string | } } +function findPropsContent( + manifestFile: string, + readFn: (relPath: string) => string | null +): string | null { + // Walk up from the manifest directory to the repo root looking for Directory.Packages.props + let dir = path.dirname(manifestFile); + for (let depth = 0; depth < 10; depth++) { + const candidate = dir === '.' ? 'Directory.Packages.props' : `${dir}/Directory.Packages.props`; + const content = readFn(candidate); + if (content) return content; + if (dir === '.') break; + dir = path.dirname(dir); + } + return null; +} + function parseManifests( repoRoot: string, manifests: ManifestInfo[], @@ -58,10 +74,8 @@ function parseManifests( if (manifest.ecosystem === 'npm') { pkgs = parseNpmPackages(content, manifest, opts, lockContent); } else if (manifest.ecosystem === 'nuget') { - // For NuGet, also look for Directory.Packages.props in same or parent dir - const dir = path.dirname(manifest.file); - const propsPath = dir === '.' ? 'Directory.Packages.props' : `${dir}/Directory.Packages.props`; - const propsContent = readFn(propsPath) ?? undefined; + // Walk up from the manifest's directory to find the nearest Directory.Packages.props + const propsContent = findPropsContent(manifest.file, readFn) ?? undefined; pkgs = parseNugetPackages(content, manifest, opts, lockContent, propsContent); } else if (manifest.ecosystem === 'maven') { pkgs = parseMavenPackages(content, manifest, opts, lockContent); @@ -99,7 +113,7 @@ export function scanRepoAtRef(repoRoot: string, ref: string, opts: ScanOptions = // Get file list at that ref let files: string[] = []; try { - const out = execSync(`git ls-tree -r --name-only "${ref}"`, { + const out = execFileSync('git', ['ls-tree', '-r', '--name-only', ref], { encoding: 'utf8', cwd: repoRoot, stdio: ['pipe', 'pipe', 'pipe'] diff --git a/src/services/deps/parsers/maven.ts b/src/services/deps/parsers/maven.ts index a7ce8a7..7ec0761 100644 --- a/src/services/deps/parsers/maven.ts +++ b/src/services/deps/parsers/maven.ts @@ -115,7 +115,9 @@ function extractDependencyManagement(content: string, props: Map function resolveProperty(value: string | null, props: Map): string | null { if (!value) return null; - return value.replace(/\$\{([^}]+)\}/g, (_, key: string) => props.get(key) ?? `\${${key}}`); + const resolved = value.replace(/\$\{([^}]+)\}/g, (_, key: string) => props.get(key) ?? `\${${key}}`); + // If any placeholder remains unresolved, return null so callers fall through to managed versions + return /\$\{[^}]+\}/.test(resolved) ? null : resolved; } const GRADLE_DEV_CONFIGS = new Set(['testImplementation', 'testCompileOnly', 'testRuntimeOnly', 'testApi']); diff --git a/src/services/deps/parsers/npm.ts b/src/services/deps/parsers/npm.ts index 98384ac..883a9d7 100644 --- a/src/services/deps/parsers/npm.ts +++ b/src/services/deps/parsers/npm.ts @@ -62,7 +62,7 @@ export function parseNpmPackages( } else if (base === 'yarn.lock') { return parseYarnLock(lockContent, manifest.lockFile, opts, content); } else if (base === 'pnpm-lock.yaml') { - return parsePnpmLock(lockContent, manifest.lockFile, opts); + return parsePnpmLock(lockContent, manifest.lockFile, opts, content); } } return parsePackageJson(content, manifest.file, opts); @@ -165,30 +165,37 @@ function parseYarnLock( pkgJson = {}; } + const directDeps = new Set(Object.keys(pkgJson.dependencies ?? {})); const devDeps = new Set(Object.keys(pkgJson.devDependencies ?? {})); + const allDirectDeps = new Set([...directDeps, ...devDeps]); const packages: Package[] = []; const seen = new Set(); - // Matches: "express@^4.21.0", "express@^4.0.0, express@^4.1.0": + // Matches both plain and scoped packages: + // express@^4.21.0: → express + // "@types/node@^18.0.0": → @types/node // version "4.21.0" - const blockRegex = /^"?([^@"\n][^@"\n]*)@[^:]+:?\n\s+version "([^"]+)"/gm; + const blockRegex = /^"?(@?[^@"\n][^"\n]*)@[^:]+:?\n\s+version "([^"]+)"/gm; for (const match of content.matchAll(blockRegex)) { - const name = match[1].trim().replace(/^"/, ''); + const name = match[1].trim().replace(/^"|"$/g, ''); const version = match[2]; const key = `${name}@${version}`; if (seen.has(key)) continue; seen.add(key); - const isDev = devDeps.has(name); + const isDev = devDeps.has(name) && !directDeps.has(name); if (isDev && !opts.includeDev) continue; + const isTransitive = !allDirectDeps.has(name); + if (isTransitive && !opts.includeTransitive) continue; + packages.push({ name, version, ecosystem: 'npm', source: lockFilePath, - type: 'direct', + type: isTransitive ? 'transitive' : 'direct', scope: isDev ? 'dev' : 'production' }); } @@ -196,13 +203,29 @@ function parseYarnLock( return packages; } -function parsePnpmLock(content: string, lockFilePath: string, opts: ScanOptions): Package[] { +function parsePnpmLock( + content: string, + lockFilePath: string, + opts: ScanOptions, + packageJsonContent: string +): Package[] { + let pkgJson: PackageJson; + try { + pkgJson = JSON.parse(packageJsonContent) as PackageJson; + } catch { + pkgJson = {}; + } + + const directDeps = new Set(Object.keys(pkgJson.dependencies ?? {})); + const devDeps = new Set(Object.keys(pkgJson.devDependencies ?? {})); + const allDirectDeps = new Set([...directDeps, ...devDeps]); + const packages: Package[] = []; const seen = new Set(); // pnpm-lock.yaml v6+ format: // packages: - // express@4.21.0: + // /express@4.21.0: or /@types/node@18.0.0: // dev: false const blockRegex = /^\s{2}(\/?@?[^@\s/][^@\s]*)@(\d[^:\s]*):\s*\n((?:\s{4}[^\n]+\n)*)/gm; @@ -214,15 +237,19 @@ function parsePnpmLock(content: string, lockFilePath: string, opts: ScanOptions) if (seen.has(key)) continue; seen.add(key); - const isDev = /^\s+dev:\s*true/m.test(attrs); + const isDev = devDeps.has(name) && !directDeps.has(name) + || /^\s+dev:\s*true/m.test(attrs); if (isDev && !opts.includeDev) continue; + const isTransitive = !allDirectDeps.has(name); + if (isTransitive && !opts.includeTransitive) continue; + packages.push({ name, version, ecosystem: 'npm', source: lockFilePath, - type: 'direct', + type: isTransitive ? 'transitive' : 'direct', scope: isDev ? 'dev' : 'production' }); } diff --git a/src/services/deps/parsers/nuget.ts b/src/services/deps/parsers/nuget.ts index e1bc134..323031f 100644 --- a/src/services/deps/parsers/nuget.ts +++ b/src/services/deps/parsers/nuget.ts @@ -5,15 +5,11 @@ const PROJ_EXTENSIONS = new Set(['.csproj', '.fsproj', '.vbproj']); export function findNugetManifests(files: string[]): ManifestInfo[] { const lockFiles = new Map(); // dir → lock path - const propsFiles = new Map(); // dir → Directory.Packages.props path for (const f of files) { if (path.basename(f) === 'packages.lock.json') { lockFiles.set(path.dirname(f), f); } - if (path.basename(f) === 'Directory.Packages.props') { - propsFiles.set(path.dirname(f), f); - } } const manifests: ManifestInfo[] = []; From 5d69975a8f1b3ae8550b1f7c3fd43dc3bb118c2e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 5 Apr 2026 14:17:39 -0400 Subject: [PATCH 5/8] chore(main): release 1.1.0 (#5) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3feff6b..073940b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [1.1.0](https://github.com/kolatts/pncli/compare/v1.0.1...v1.1.0) (2026-04-05) + + +### Features + +* add PNCLI_EMAIL and PNCLI_USERID as global user identity ([81f0091](https://github.com/kolatts/pncli/commit/81f00913a3ca4295631ad5e3701dac6a1553e70a)) +* add user identity prompts to config init wizard ([e200a5a](https://github.com/kolatts/pncli/commit/e200a5a7defd54ba4a39a49518dd6342533c23dc)) +* enterprise testing — user identity, Jira v2, husky, v1.1.0 ([8fb3b2e](https://github.com/kolatts/pncli/commit/8fb3b2e086fe1cd4e6ca31e1b8592dfecefdfbd7)) +* Jira custom fields + auto-generated copilot docs ([#6](https://github.com/kolatts/pncli/issues/6)) ([a88c01b](https://github.com/kolatts/pncli/commit/a88c01bd681679e738407690d284ac640893fe0d)) +* switch Jira to API v2 with Bearer token auth ([6372db9](https://github.com/kolatts/pncli/commit/6372db9bba3c4213f466cdf021caed4dc11e510f)) + ## [1.0.1](https://github.com/kolatts/pncli/compare/v1.0.0...v1.0.1) (2026-04-04) From f4f6c2a425af95159bcbf3aaacf403d0015bc538 Mon Sep 17 00:00:00 2001 From: Sunny Kolattukudy Date: Sun, 5 Apr 2026 18:11:47 -0400 Subject: [PATCH 6/8] fix: Jira error deserialization, Connection header, and exit codes (#8) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Jira error deserialization, Connection header, and exit codes - Parse Jira 400 responses correctly: read errorMessages (string[]) and errors (Record) instead of broken array indexing - Add Connection: close header to Jira and Bitbucket requests - Introduce src/lib/exitCodes.ts with sysexits-style codes (69, 77, 78) - fail() now maps HTTP 401/403 → 77, network failures → 69, general → 1 - Replace all hardcoded exit code literals with named constants Co-Authored-By: Claude Sonnet 4.6 * fix: address PR feedback — array errors shape and ExitCode return type - Handle errors-as-array (other APIs) alongside errors-as-object (Jira) - Tighten exitCodeFromStatus return type to ExitCode union Co-Authored-By: Claude Sonnet 4.6 --------- Co-authored-by: Sunny Kolattukudy Co-authored-by: Claude Sonnet 4.6 --- src/cli.ts | 3 ++- src/lib/exitCodes.ts | 16 +++++++++++++++ src/lib/http.ts | 35 +++++++++++++++++++++++++++------ src/lib/output.ts | 4 +++- src/services/config/commands.ts | 9 +++++---- 5 files changed, 55 insertions(+), 12 deletions(-) create mode 100644 src/lib/exitCodes.ts diff --git a/src/cli.ts b/src/cli.ts index ab40ad3..96cee6b 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,6 +1,7 @@ import { Command } from 'commander'; import { createRequire } from 'module'; import { setGlobalOptions, setGlobalUser } from './lib/output.js'; +import { ExitCode } from './lib/exitCodes.js'; import { loadConfig } from './lib/config.js'; import { registerGitCommands } from './services/git/commands.js'; import { registerJiraCommands } from './services/jira/commands.js'; @@ -63,5 +64,5 @@ Services: program.parseAsync(process.argv).catch((err: unknown) => { process.stderr.write(`Fatal: ${err instanceof Error ? err.message : String(err)}\n`); - process.exit(1); + process.exit(ExitCode.GENERAL_ERROR); }); diff --git a/src/lib/exitCodes.ts b/src/lib/exitCodes.ts new file mode 100644 index 0000000..306c5e6 --- /dev/null +++ b/src/lib/exitCodes.ts @@ -0,0 +1,16 @@ +export const ExitCode = { + SUCCESS: 0, + GENERAL_ERROR: 1, + USAGE_ERROR: 2, + NETWORK_ERROR: 69, // EX_UNAVAILABLE from sysexits.h + AUTH_ERROR: 77, // EX_NOPERM from sysexits.h + CONFIG_ERROR: 78, // EX_CONFIG from sysexits.h +} as const; + +export type ExitCode = (typeof ExitCode)[keyof typeof ExitCode]; + +export function exitCodeFromStatus(httpStatus: number): ExitCode { + if (httpStatus === 401 || httpStatus === 403) return ExitCode.AUTH_ERROR; + if (httpStatus === 0) return ExitCode.NETWORK_ERROR; + return ExitCode.GENERAL_ERROR; +} diff --git a/src/lib/http.ts b/src/lib/http.ts index c7c72da..bec5c50 100644 --- a/src/lib/http.ts +++ b/src/lib/http.ts @@ -1,5 +1,6 @@ import type { ResolvedConfig } from '../types/config.js'; import { PncliError } from './errors.js'; +import { ExitCode } from './exitCodes.js'; import { log } from './output.js'; export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH'; @@ -73,8 +74,28 @@ async function request( try { const body = await response.text(); const parsed = JSON.parse(body); - if (parsed.message) message = parsed.message; - else if (parsed.errors?.[0]?.message) message = parsed.errors[0].message; + const parts: string[] = []; + // Jira: errorMessages is string[] + if (Array.isArray(parsed.errorMessages)) { + parts.push(...(parsed.errorMessages as string[]).filter(Boolean)); + } + // errors as object map (Jira: Record) + if (parsed.errors && typeof parsed.errors === 'object' && !Array.isArray(parsed.errors)) { + for (const [field, msg] of Object.entries(parsed.errors as Record)) { + parts.push(`${field}: ${msg}`); + } + } + // errors as array of objects with message field (other APIs) + if (Array.isArray(parsed.errors)) { + for (const e of parsed.errors as Array<{ message?: string }>) { + if (e?.message) parts.push(e.message); + } + } + // Generic APIs: { message: "..." } + if (parts.length === 0 && parsed.message) { + parts.push(String(parsed.message)); + } + if (parts.length > 0) message = parts.join('; '); } catch { // ignore parse errors } @@ -106,7 +127,8 @@ export class HttpClient { return { 'Authorization': `Bearer ${apiToken}`, 'Content-Type': 'application/json', - 'Accept': 'application/json' + 'Accept': 'application/json', + 'Connection': 'close' }; } @@ -116,7 +138,8 @@ export class HttpClient { return { 'Authorization': `Bearer ${pat}`, 'Content-Type': 'application/json', - 'Accept': 'application/json' + 'Accept': 'application/json', + 'Connection': 'close' }; } @@ -139,7 +162,7 @@ export class HttpClient { const safeHeaders = { ...headers, Authorization: '[REDACTED]' }; process.stderr.write(`DRY RUN: ${init.method} ${url}\nHeaders: ${JSON.stringify(safeHeaders, null, 2)}\n`); if (opts.body) process.stderr.write(`Body: ${JSON.stringify(opts.body, null, 2)}\n`); - process.exit(0); + process.exit(ExitCode.SUCCESS); } return request(url, init, opts.timeoutMs ?? 30000); @@ -164,7 +187,7 @@ export class HttpClient { const safeHeaders = { ...headers, Authorization: '[REDACTED]' }; process.stderr.write(`DRY RUN: ${init.method} ${url}\nHeaders: ${JSON.stringify(safeHeaders, null, 2)}\n`); if (opts.body) process.stderr.write(`Body: ${JSON.stringify(opts.body, null, 2)}\n`); - process.exit(0); + process.exit(ExitCode.SUCCESS); } return request(url, init, opts.timeoutMs ?? 30000); diff --git a/src/lib/output.ts b/src/lib/output.ts index e13999f..b81c2f5 100644 --- a/src/lib/output.ts +++ b/src/lib/output.ts @@ -1,6 +1,7 @@ import chalk from 'chalk'; import type { Meta, SuccessEnvelope, ErrorEnvelope, ErrorDetail } from '../types/common.js'; import { PncliError } from './errors.js'; +import { ExitCode, exitCodeFromStatus } from './exitCodes.js'; let globalOptions = { pretty: false, verbose: false }; let globalUser: { email: string | undefined; userId: string | undefined } = { email: undefined, userId: undefined }; @@ -62,7 +63,8 @@ export function fail( (globalOptions.pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope)) + '\n' ); - process.exit(1); + const exitCode = err instanceof PncliError ? exitCodeFromStatus(err.status) : ExitCode.GENERAL_ERROR; + process.exit(exitCode); } export function log(message: string): void { diff --git a/src/services/config/commands.ts b/src/services/config/commands.ts index 7775e83..98987a1 100644 --- a/src/services/config/commands.ts +++ b/src/services/config/commands.ts @@ -9,6 +9,7 @@ import { getGlobalConfigPath } from '../../lib/config.js'; import { success, fail, warn } from '../../lib/output.js'; +import { ExitCode } from '../../lib/exitCodes.js'; import fs from 'fs'; export function registerConfigCommands(program: Command): void { @@ -30,7 +31,7 @@ export function registerConfigCommands(program: Command): void { // Handle prompt cancellation (Ctrl+C) gracefully if (err instanceof Error && err.message.includes('User force closed')) { process.stderr.write('\nSetup cancelled.\n'); - process.exit(1); + process.exit(ExitCode.GENERAL_ERROR); } fail(err, 'config', 'init', start); } @@ -167,7 +168,7 @@ async function initGlobalConfig(start: number): Promise { if (!confirmed) { process.stderr.write('Aborted.\n'); - process.exit(0); + process.exit(ExitCode.SUCCESS); } writeGlobalConfig({ @@ -234,7 +235,7 @@ async function initRepoConfig(start: number): Promise { if (!confirmed) { process.stderr.write('Aborted.\n'); - process.exit(0); + process.exit(ExitCode.SUCCESS); } // Warn if .pncli.json already exists @@ -245,7 +246,7 @@ async function initRepoConfig(start: number): Promise { }); if (!overwrite) { process.stderr.write('Aborted.\n'); - process.exit(0); + process.exit(ExitCode.SUCCESS); } } From 58819b4f5683d47c41b8c8ff11b9507fd4c474a6 Mon Sep 17 00:00:00 2001 From: Sunny Kolattukudy Date: Sun, 5 Apr 2026 18:17:31 -0400 Subject: [PATCH 7/8] fix: address second round of PR #7 review comments MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add shared semver utility (semver.ts) with pre-release/build-metadata aware comparison, replacing lossy strip-non-numeric logic in diff and artifactory client - Rename remediation snake_case fields to camelCase (fixAvailable, fixedVersions) to match rest of CLI output shape - Remove always-true availableInArtifactory field; add uncheckedEcosystems to OutdatedData so callers know which ecosystems had no repo configured - Drop unused repoRoot parameter from parseManifests - Implement NuGet manifest deduplication: when multiple project files share the same packages.lock.json, keep only one representative to avoid inflated package counts - Add Kotlin DSL / Groovy parenthesised form support to parseBuildGradle (handles both implementation("g:a:v") and implementation 'g:a:v') - Fix Artifactory config init prompt — deps frisk uses OSV.dev, not Artifactory; prompt now only mentions outdated and license-check Co-Authored-By: Claude Sonnet 4.6 --- src/services/config/commands.ts | 2 +- src/services/deps/clients/artifactory.ts | 44 ++++++-------------- src/services/deps/clients/osv.ts | 6 +-- src/services/deps/diff.ts | 14 +------ src/services/deps/outdated.ts | 7 +++- src/services/deps/parsers/index.ts | 5 +-- src/services/deps/parsers/maven.ts | 4 +- src/services/deps/parsers/nuget.ts | 21 ++++++++-- src/services/deps/semver.ts | 52 ++++++++++++++++++++++++ src/services/deps/types.ts | 6 +-- 10 files changed, 101 insertions(+), 60 deletions(-) create mode 100644 src/services/deps/semver.ts diff --git a/src/services/config/commands.ts b/src/services/config/commands.ts index 98987a1..aa5462a 100644 --- a/src/services/config/commands.ts +++ b/src/services/config/commands.ts @@ -116,7 +116,7 @@ async function initGlobalConfig(start: number): Promise { process.stderr.write('\n── Artifactory ───────────────────────────────────\n'); const useArtifactory = await confirm({ - message: 'Configure Artifactory for dependency scanning (deps frisk, outdated, license-check)?', + message: 'Configure Artifactory for dependency commands (deps outdated, deps license-check)?', default: false }); diff --git a/src/services/deps/clients/artifactory.ts b/src/services/deps/clients/artifactory.ts index f25f9b8..bac5887 100644 --- a/src/services/deps/clients/artifactory.ts +++ b/src/services/deps/clients/artifactory.ts @@ -1,6 +1,7 @@ import type { Ecosystem, LicensedPackage, OutdatedPackage } from '../types.js'; import type { ArtifactoryConfig } from '../../../types/config.js'; import { PncliError } from '../../../lib/errors.js'; +import { isNewer, updateType } from '../semver.js'; const TIMEOUT_MS = 15_000; @@ -161,41 +162,23 @@ async function getLatestMaven( } } -function parseSemver(v: string): [number, number, number] { - const clean = v.replace(/[^0-9.]/g, ''); - const parts = clean.split('.').map(Number); - return [parts[0] ?? 0, parts[1] ?? 0, parts[2] ?? 0]; -} - -function getUpdateType(current: string, latest: string): 'major' | 'minor' | 'patch' { - const [cMaj, cMin] = parseSemver(current); - const [lMaj, lMin] = parseSemver(latest); - if (lMaj > cMaj) return 'major'; - if (lMin > cMin) return 'minor'; - return 'patch'; -} - -function isNewer(current: string, latest: string): boolean { - const [cMaj, cMin, cPat] = parseSemver(current); - const [lMaj, lMin, lPat] = parseSemver(latest); - if (lMaj !== cMaj) return lMaj > cMaj; - if (lMin !== cMin) return lMin > cMin; - return lPat > cPat; -} - export async function getOutdatedPackages( packages: Array<{ name: string; version: string; ecosystem: Ecosystem; source: string }>, config: ArtifactoryConfig, filterType?: 'major' | 'minor' | 'patch' -): Promise { +): Promise<{ outdated: OutdatedPackage[]; uncheckedEcosystems: string[] }> { const { baseUrl, token } = config; - if (!baseUrl || !token) return []; + if (!baseUrl || !token) return { outdated: [], uncheckedEcosystems: [] }; const outdated: OutdatedPackage[] = []; + const unchecked = new Set(); for (const pkg of packages) { const repoName = repoForEcosystem(config, pkg.ecosystem); - if (!repoName) continue; // ecosystem repo not configured — skip silently + if (!repoName) { + unchecked.add(pkg.ecosystem); + continue; + } let latest: string | null = null; if (pkg.ecosystem === 'npm') { @@ -208,10 +191,10 @@ export async function getOutdatedPackages( if (!latest || !isNewer(pkg.version, latest)) continue; - const updateType = getUpdateType(pkg.version, latest); + const type = updateType(pkg.version, latest); if (filterType) { const order = { major: 3, minor: 2, patch: 1 }; - if (order[updateType] < order[filterType]) continue; + if (order[type] < order[filterType]) continue; } outdated.push({ @@ -219,13 +202,12 @@ export async function getOutdatedPackages( ecosystem: pkg.ecosystem, current: pkg.version, latest, - updateType, - source: pkg.source, - availableInArtifactory: true + updateType: type, + source: pkg.source }); } - return outdated; + return { outdated, uncheckedEcosystems: [...unchecked] }; } async function getLicenseNpm( diff --git a/src/services/deps/clients/osv.ts b/src/services/deps/clients/osv.ts index fa64099..d5bf874 100644 --- a/src/services/deps/clients/osv.ts +++ b/src/services/deps/clients/osv.ts @@ -84,12 +84,12 @@ function extractRemediation(vuln: OsvRawVuln, pkg: Package): OsvVulnerability['r } const unique = [...new Set(fixedVersions)]; - const fix_available = unique.length > 0; - const advice = fix_available + const fixAvailable = unique.length > 0; + const advice = fixAvailable ? `Upgrade ${pkg.name} to ${unique.join(' or ')}` : `No fix available yet for ${pkg.name}. Monitor ${vuln.id} for updates.`; - return { fix_available, fixed_versions: unique, advice }; + return { fixAvailable, fixedVersions: unique, advice }; } function extractSeverity(vuln: OsvRawVuln): { severity: string; cvss: number | null } { diff --git a/src/services/deps/diff.ts b/src/services/deps/diff.ts index 1f62419..24a0321 100644 --- a/src/services/deps/diff.ts +++ b/src/services/deps/diff.ts @@ -3,6 +3,7 @@ import type { ScanOptions, DiffData, PackageChange, ChangeType, Ecosystem } from import { scanRepo, scanRepoAtRef } from './parsers/index.js'; import { getRepoRoot } from '../../lib/git-context.js'; import { PncliError } from '../../lib/errors.js'; +import { isDowngrade } from './semver.js'; export function runDiff( config: ResolvedConfig, @@ -71,16 +72,3 @@ export function runDiff( return { from, to: to ?? 'working tree', changes, summary }; } -function parseSemver(v: string): [number, number, number] { - const clean = v.replace(/[^0-9.]/g, ''); - const parts = clean.split('.').map(Number); - return [parts[0] ?? 0, parts[1] ?? 0, parts[2] ?? 0]; -} - -function isDowngrade(from: string, to: string): boolean { - const [fMaj, fMin, fPat] = parseSemver(from); - const [tMaj, tMin, tPat] = parseSemver(to); - if (tMaj !== fMaj) return tMaj < fMaj; - if (tMin !== fMin) return tMin < fMin; - return tPat < fPat; -} diff --git a/src/services/deps/outdated.ts b/src/services/deps/outdated.ts index 2526a16..515cdab 100644 --- a/src/services/deps/outdated.ts +++ b/src/services/deps/outdated.ts @@ -31,7 +31,11 @@ export async function runOutdated( const scan = scanRepo(repoRoot, { ...opts, includeTransitive: false }); const directPackages = scan.packages.filter(p => p.type === 'direct'); - const outdated = await getOutdatedPackages(directPackages, config.artifactory, filterType); + const { outdated, uncheckedEcosystems } = await getOutdatedPackages( + directPackages, + config.artifactory, + filterType + ); const summary = { total: outdated.length, major: 0, minor: 0, patch: 0 }; for (const pkg of outdated) summary[pkg.updateType]++; @@ -40,6 +44,7 @@ export async function runOutdated( source: 'artifactory', artifactoryUrl: config.artifactory.baseUrl ?? '', outdated, + uncheckedEcosystems, summary }; } diff --git a/src/services/deps/parsers/index.ts b/src/services/deps/parsers/index.ts index 6fb20ed..adf1bf3 100644 --- a/src/services/deps/parsers/index.ts +++ b/src/services/deps/parsers/index.ts @@ -55,7 +55,6 @@ function findPropsContent( } function parseManifests( - repoRoot: string, manifests: ManifestInfo[], opts: ScanOptions, readFn: (relPath: string) => string | null @@ -102,7 +101,7 @@ export function scanRepo(repoRoot: string, opts: ScanOptions = {}): ScanData { const mavenManifests = eco === 'all' || eco === 'maven' ? findMavenManifests(files) : []; const manifests = [...npmManifests, ...nugetManifests, ...mavenManifests]; - const packages = parseManifests(repoRoot, manifests, opts, rel => readFile(repoRoot, rel)); + const packages = parseManifests(manifests, opts, rel => readFile(repoRoot, rel)); return buildScanData(manifests, packages); } @@ -128,7 +127,7 @@ export function scanRepoAtRef(repoRoot: string, ref: string, opts: ScanOptions = const mavenManifests = eco === 'all' || eco === 'maven' ? findMavenManifests(files) : []; const manifests = [...npmManifests, ...nugetManifests, ...mavenManifests]; - const packages = parseManifests(repoRoot, manifests, opts, rel => readFileAtRef(repoRoot, ref, rel)); + const packages = parseManifests(manifests, opts, rel => readFileAtRef(repoRoot, ref, rel)); return buildScanData(manifests, packages); } diff --git a/src/services/deps/parsers/maven.ts b/src/services/deps/parsers/maven.ts index 7ec0761..35804e9 100644 --- a/src/services/deps/parsers/maven.ts +++ b/src/services/deps/parsers/maven.ts @@ -126,8 +126,8 @@ function parseBuildGradle(content: string, filePath: string, opts: ScanOptions): const packages: Package[] = []; const seen = new Set(); - // String notation: implementation 'group:artifact:version' or "group:artifact:version" - const stringNotation = /\b(\w+)\s+['"]([^:'"]+):([^:'"]+):([^'"]+)['"]/g; + // String notation — Groovy: implementation 'g:a:v' / Kotlin DSL: implementation("g:a:v") + const stringNotation = /\b(\w+)\s*\(?\s*['"]([^:'"]+):([^:'"]+):([^'"]+)['"]\s*\)?/g; for (const m of content.matchAll(stringNotation)) { const config = m[1]; const groupId = m[2]; diff --git a/src/services/deps/parsers/nuget.ts b/src/services/deps/parsers/nuget.ts index 323031f..3c1893b 100644 --- a/src/services/deps/parsers/nuget.ts +++ b/src/services/deps/parsers/nuget.ts @@ -30,9 +30,24 @@ export function findNugetManifests(files: string[]): ManifestInfo[] { } } - // Remove duplicate lock-backed entries: if a packages.lock.json covers a dir, - // prefer it over the individual project files - return manifests; + // Deduplicate: when multiple project files share the same packages.lock.json, + // keep only one representative manifest per lock file to avoid parsing the same + // lock multiple times and inflating package counts. + const deduped: ManifestInfo[] = []; + const seenLocks = new Set(); + + for (const manifest of manifests) { + if (!manifest.lockFile) { + deduped.push(manifest); + continue; + } + if (!seenLocks.has(manifest.lockFile)) { + seenLocks.add(manifest.lockFile); + deduped.push(manifest); + } + } + + return deduped; } export function parseNugetPackages( diff --git a/src/services/deps/semver.ts b/src/services/deps/semver.ts new file mode 100644 index 0000000..f10cc8d --- /dev/null +++ b/src/services/deps/semver.ts @@ -0,0 +1,52 @@ +interface ParsedVersion { + major: number; + minor: number; + patch: number; + preRelease: string; +} + +function parse(v: string): ParsedVersion { + // Strip leading non-numeric (e.g. 'v1.0.0') + const clean = v.replace(/^[^0-9]*/, ''); + // Split off pre-release/build metadata + const dashIdx = clean.indexOf('-'); + const plusIdx = clean.indexOf('+'); + const metaStart = plusIdx > -1 ? plusIdx : Infinity; + const preStart = dashIdx > -1 && dashIdx < metaStart ? dashIdx : Infinity; + const numeric = clean.slice(0, Math.min(preStart, metaStart)); + const preRelease = preStart < Infinity + ? clean.slice(preStart + 1, metaStart < Infinity ? metaStart : undefined) + : ''; + const parts = numeric.split('.').map(n => parseInt(n, 10) || 0); + return { major: parts[0] ?? 0, minor: parts[1] ?? 0, patch: parts[2] ?? 0, preRelease }; +} + +/** Returns negative / zero / positive like Array.sort comparators. */ +export function compareSemver(a: string, b: string): number { + const av = parse(a); + const bv = parse(b); + if (av.major !== bv.major) return av.major - bv.major; + if (av.minor !== bv.minor) return av.minor - bv.minor; + if (av.patch !== bv.patch) return av.patch - bv.patch; + // A release (no pre-release) is greater than any pre-release of the same version + if (!av.preRelease && bv.preRelease) return 1; + if (av.preRelease && !bv.preRelease) return -1; + if (av.preRelease && bv.preRelease) return av.preRelease.localeCompare(bv.preRelease); + return 0; +} + +export function isNewer(current: string, latest: string): boolean { + return compareSemver(latest, current) > 0; +} + +export function updateType(current: string, latest: string): 'major' | 'minor' | 'patch' { + const cv = parse(current); + const lv = parse(latest); + if (lv.major > cv.major) return 'major'; + if (lv.minor > cv.minor) return 'minor'; + return 'patch'; +} + +export function isDowngrade(from: string, to: string): boolean { + return compareSemver(to, from) < 0; +} diff --git a/src/services/deps/types.ts b/src/services/deps/types.ts index a2ea834..0e4e288 100644 --- a/src/services/deps/types.ts +++ b/src/services/deps/types.ts @@ -61,8 +61,8 @@ export interface OsvVulnerability { cvss: number | null; aliases: string[]; remediation: { - fix_available: boolean; - fixed_versions: string[]; + fixAvailable: boolean; + fixedVersions: string[]; advice: string; }; references: string[]; @@ -86,13 +86,13 @@ export interface OutdatedPackage { latest: string; updateType: 'major' | 'minor' | 'patch'; source: string; - availableInArtifactory: boolean; } export interface OutdatedData { source: 'artifactory'; artifactoryUrl: string; outdated: OutdatedPackage[]; + uncheckedEcosystems: string[]; summary: { total: number; major: number; From 1a6036bb1faa099df6ff1ba4a5391ab3025f7821 Mon Sep 17 00:00:00 2001 From: Sunny Kolattukudy Date: Sun, 5 Apr 2026 18:20:20 -0400 Subject: [PATCH 8/8] fix: use write callback before process.exit to avoid libuv assertion on Windows process.exit() called synchronously after process.stdout/stderr.write() triggers a libuv assertion on Windows because the write handle is torn down before the kernel flushes the buffer. Move process.exit() into the write callback in output.ts fail() and http.ts dry-run paths. Co-Authored-By: Claude Sonnet 4.6 --- src/lib/http.ts | 14 ++++++++------ src/lib/output.ts | 8 +++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/lib/http.ts b/src/lib/http.ts index bec5c50..0d3053d 100644 --- a/src/lib/http.ts +++ b/src/lib/http.ts @@ -160,9 +160,10 @@ export class HttpClient { if (this.dryRun) { const safeHeaders = { ...headers, Authorization: '[REDACTED]' }; - process.stderr.write(`DRY RUN: ${init.method} ${url}\nHeaders: ${JSON.stringify(safeHeaders, null, 2)}\n`); - if (opts.body) process.stderr.write(`Body: ${JSON.stringify(opts.body, null, 2)}\n`); - process.exit(ExitCode.SUCCESS); + const msg = `DRY RUN: ${init.method} ${url}\nHeaders: ${JSON.stringify(safeHeaders, null, 2)}\n` + + (opts.body ? `Body: ${JSON.stringify(opts.body, null, 2)}\n` : ''); + process.stderr.write(msg, () => process.exit(ExitCode.SUCCESS)); + return new Promise(() => { /* exit pending */ }); } return request(url, init, opts.timeoutMs ?? 30000); @@ -185,9 +186,10 @@ export class HttpClient { if (this.dryRun) { const safeHeaders = { ...headers, Authorization: '[REDACTED]' }; - process.stderr.write(`DRY RUN: ${init.method} ${url}\nHeaders: ${JSON.stringify(safeHeaders, null, 2)}\n`); - if (opts.body) process.stderr.write(`Body: ${JSON.stringify(opts.body, null, 2)}\n`); - process.exit(ExitCode.SUCCESS); + const msg = `DRY RUN: ${init.method} ${url}\nHeaders: ${JSON.stringify(safeHeaders, null, 2)}\n` + + (opts.body ? `Body: ${JSON.stringify(opts.body, null, 2)}\n` : ''); + process.stderr.write(msg, () => process.exit(ExitCode.SUCCESS)); + return new Promise(() => { /* exit pending */ }); } return request(url, init, opts.timeoutMs ?? 30000); diff --git a/src/lib/output.ts b/src/lib/output.ts index b81c2f5..c4bb6b5 100644 --- a/src/lib/output.ts +++ b/src/lib/output.ts @@ -59,12 +59,10 @@ export function fail( if (msg) process.stderr.write(msg + '\n'); - process.stdout.write( - (globalOptions.pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope)) + '\n' - ); - + const output = (globalOptions.pretty ? JSON.stringify(envelope, null, 2) : JSON.stringify(envelope)) + '\n'; const exitCode = err instanceof PncliError ? exitCodeFromStatus(err.status) : ExitCode.GENERAL_ERROR; - process.exit(exitCode); + process.stdout.write(output, () => process.exit(exitCode)); + throw new PncliError(errorDetail.message, errorDetail.status); } export function log(message: string): void {