Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 12 additions & 4 deletions src/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,15 @@ async function main() {
await installGit();

// Initialize error tracking and telemetry (no-ops if opted out).
//
// Both SDKs are lazy-loaded via dynamic `import()` inside these functions,
// so the `ARCHGATE_TELEMETRY=0` path skips the parse/init cost entirely.
//
// Await telemetry so the repo context is resolved before the preAction
// hook fires `command_executed` — otherwise that event always lands
// without `repo_id`. The repo lookup is a few cached git subprocesses
// (~5–20ms on a cold run) and runs exactly once per invocation.
initSentry();
await initTelemetry();
// without `repo_id` (see PR #211). The two init calls run concurrently,
// so the wall-clock cost is bounded by whichever is slowest.
await Promise.all([initSentry(), initTelemetry()]);

const logLevelOption = new Option("--log-level <level>", "Set log verbosity")
.choices(["error", "warn", "info", "debug"] as const)
Expand Down Expand Up @@ -137,6 +140,11 @@ async function main() {

// Flush telemetry and error tracking before exit
await Promise.all([flushTelemetry(), flushSentry()]);

// Belt-and-braces: force exit so any stray handle left by a third-party
// SDK (posthog-node, @sentry/node-core, etc.) can't linger and make the
// CLI feel laggy. All flushes above have already completed.
process.exit(0);
}

/**
Expand Down
31 changes: 7 additions & 24 deletions src/commands/adr/list.ts
Original file line number Diff line number Diff line change
@@ -1,30 +1,14 @@
import { existsSync, readdirSync } from "node:fs";
import { join } from "node:path";
import { existsSync } from "node:fs";
import { styleText } from "node:util";

import type { Command } from "@commander-js/extra-typings";

import { parseAdr, type AdrDocument } from "../../formats/adr";
import { parseAllAdrs } from "../../engine/loader";
import { exitWith } from "../../helpers/exit";
import { logError } from "../../helpers/log";
import { formatJSON, isAgentContext } from "../../helpers/output";
import { findProjectRoot, projectPaths } from "../../helpers/paths";

async function loadAdrs(adrsDir: string): Promise<AdrDocument[]> {
const files = readdirSync(adrsDir).filter((f) => f.endsWith(".md"));
const results = await Promise.all(
files.map(async (file) => {
try {
const content = await Bun.file(join(adrsDir, file)).text();
return parseAdr(content, file);
} catch {
return null;
}
})
);
return results.filter((r): r is AdrDocument => r !== null);
}

export function registerAdrListCommand(adr: Command) {
adr
.command("list")
Expand All @@ -47,17 +31,16 @@ export function registerAdrListCommand(adr: Command) {
return;
}

const files = readdirSync(paths.adrsDir).filter((f) =>
f.endsWith(".md")
);
// parseAllAdrs is cached per-process and shared with the check /
// review-context engines, so we don't need a separate readdir pass
// to bail early on empty dirs.
const adrs = (await parseAllAdrs(projectRoot)).map((e) => e.adr);

if (files.length === 0) {
if (adrs.length === 0) {
console.log("No ADRs found.");
return;
}

const adrs = await loadAdrs(paths.adrsDir);

// Filter by domain if specified
const filtered = options.domain
? adrs.filter((a) => a.frontmatter.domain === options.domain)
Expand Down
14 changes: 9 additions & 5 deletions src/commands/check.ts
Original file line number Diff line number Diff line change
Expand Up @@ -101,16 +101,20 @@ export function registerCheckCommand(program: Command) {
// Determine output format for telemetry
const outputFormat = opts.ci ? "ci" : useJson ? "json" : "console";

// Build the summary once and share it with the reporters, telemetry,
// and exit-code resolver. Previously each of those built its own
// summary — 3 walks over the same result set.
const summary = buildSummary(result);

if (opts.ci) {
reportCI(result);
reportCI(result, summary);
} else if (useJson) {
reportJSON(result, opts.json ? true : undefined);
reportJSON(result, opts.json ? true : undefined, summary);
} else {
reportConsole(result, opts.verbose ?? false);
reportConsole(result, opts.verbose ?? false, summary);
}

// Track aggregate check results (no file paths or violation content)
const summary = buildSummary(result);
trackCheckResult({
total_rules: summary.total,
passed: summary.passed,
Expand All @@ -128,7 +132,7 @@ export function registerCheckCommand(program: Command) {
check_duration_ms: Math.round(result.totalDurationMs),
});

const exitCode = getExitCode(result);
const exitCode = getExitCode(result, summary);
// Only 0, 1, and 2 are emitted by getExitCode()
await exitWith(exitCode as 0 | 1 | 2);
});
Expand Down
37 changes: 8 additions & 29 deletions src/engine/context.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
import { readdirSync } from "node:fs";
import { join } from "node:path";

import { parseAdr } from "../formats/adr";
import type { AdrDocument, AdrDomain } from "../formats/adr";
import { projectPaths } from "../helpers/paths";
import { getChangedFiles, getStagedFiles } from "./git-files";
import { loadRuleAdrs } from "./loader";
import { loadRuleAdrs, parseAllAdrs } from "./loader";
import type { ReportSummary } from "./reporter";
import { buildSummary } from "./reporter";
import { runChecks } from "./runner";
Expand Down Expand Up @@ -171,30 +166,14 @@ export function matchFilesToAdrs(
return results.sort((a, b) => a.domain.localeCompare(b.domain));
}

/** Load all ADR documents (not just those with rules) from the project. */
/**
* Load all ADR documents (not just those with rules) from the project.
* Shares the per-process parse cache with `loadRuleAdrs` so
* `review-context --run-checks` only reads the ADR directory once.
*/
async function loadAllAdrs(projectRoot: string): Promise<AdrDocument[]> {
const pp = projectPaths(projectRoot);

let files: string[];
try {
files = readdirSync(pp.adrsDir).filter((f) => f.endsWith(".md"));
} catch {
return [];
}

const results = await Promise.all(
files.map(async (file) => {
try {
const filePath = join(pp.adrsDir, file);
const content = await Bun.file(filePath).text();
return parseAdr(content, filePath);
} catch {
return null;
}
})
);

return results.filter((adr): adr is AdrDocument => adr !== null);
const parsed = await parseAllAdrs(projectRoot);
return parsed.map((e) => e.adr);
}

const EMPTY_SUMMARY: ReportSummary = {
Expand Down
48 changes: 35 additions & 13 deletions src/engine/git-files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,22 +21,44 @@ async function runGit(args: string[], cwd: string): Promise<string> {
return text;
}

/**
* Cache of tracked-files lookups per project root. `archgate check` calls
* `resolveScopedFiles` once per ADR — without this cache that's N spawns of
* `git ls-files` (one per ADR) instead of 1, which adds ~25ms × N on Windows.
* The in-memory lifetime matches the process; file changes during a single
* CLI invocation are not expected.
*/
const trackedFilesCache = new Map<string, Promise<Set<string> | null>>();

/** Get all git-tracked (non-ignored) files in the project. */
export async function getGitTrackedFiles(
export function getGitTrackedFiles(
projectRoot: string
): Promise<Set<string> | null> {
try {
const result = await runGit(
["ls-files", "--cached", "--others", "--exclude-standard"],
projectRoot
);
const files = new Set(result.trim().split("\n").filter(Boolean));
logDebug("Git tracked files:", files.size);
return files;
} catch {
logDebug("Git tracked files lookup failed (not a git repo?)");
return null;
}
const cached = trackedFilesCache.get(projectRoot);
if (cached) return cached;

const promise = (async () => {
try {
const result = await runGit(
["ls-files", "--cached", "--others", "--exclude-standard"],
projectRoot
);
const files = new Set(result.trim().split("\n").filter(Boolean));
logDebug("Git tracked files:", files.size);
return files;
} catch {
logDebug("Git tracked files lookup failed (not a git repo?)");
return null;
}
})();

trackedFilesCache.set(projectRoot, promise);
return promise;
}

/** Reset the tracked-files cache. For testing only. */
export function _resetGitFilesCache(): void {
trackedFilesCache.clear();
}

/** Resolve scoped files for an ADR based on its files globs. Respects .gitignore. */
Expand Down
96 changes: 68 additions & 28 deletions src/engine/loader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,67 @@ function checkRuleSyntax(source: string): SyntaxViolation[] {
return violations;
}

export interface ParsedAdrEntry {
file: string;
adr: AdrDocument;
}

/**
* Process-level cache of `readdir + read + parse` for each project root.
* `archgate review-context --run-checks` used to parse every ADR twice
* (once for briefings, once for rule loading); the cache lets both paths
* share the I/O. `archgate check` + `adr list` benefit too.
*
* Cache lifetime is per-process — consistent with other per-invocation
* caches in this codebase (git ls-files, repo context, install method).
*/
const parsedAdrsCache = new Map<string, Promise<ParsedAdrEntry[]>>();

/** Reset the parsed-ADRs cache. For testing only. */
export function _resetAdrParseCache(): void {
parsedAdrsCache.clear();
}

/**
* Read and parse every ADR markdown file in the project, caching the result
* per-process. Returns entries in directory order. Unparseable files are
* silently skipped (logged at debug level).
*/
export function parseAllAdrs(projectRoot: string): Promise<ParsedAdrEntry[]> {
const cached = parsedAdrsCache.get(projectRoot);
if (cached) return cached;

const pp = projectPaths(projectRoot);
const adrsDir = pp.adrsDir;

const promise = (async () => {
let files: string[];
try {
files = readdirSync(adrsDir).filter((f) => f.endsWith(".md"));
} catch {
return [];
}

const parsed = await Promise.all(
files.map(async (file): Promise<ParsedAdrEntry | null> => {
const filePath = join(adrsDir, file);
try {
const content = await Bun.file(filePath).text();
return { file, adr: parseAdr(content, filePath) };
} catch (err) {
logDebug(`Skipping unparseable ADR: ${filePath}`, err);
return null;
}
})
);

return parsed.filter((e): e is ParsedAdrEntry => e !== null);
})();

parsedAdrsCache.set(projectRoot, promise);
return promise;
}

/**
* Discover ADRs with rules: true and dynamically import their companion .rules.ts files.
*/
Expand All @@ -152,36 +213,15 @@ export async function loadRuleAdrs(

const adrsDir = pp.adrsDir;

let files: string[];
try {
files = readdirSync(adrsDir).filter((f) => f.endsWith(".md"));
} catch {
return [];
}

// Phase 1: Read and parse all ADR files in parallel
const parsedAdrs = await Promise.all(
files.map(async (file) => {
const filePath = join(adrsDir, file);
try {
const content = await Bun.file(filePath).text();
return { file, adr: parseAdr(content, filePath) };
} catch (err) {
logDebug(`Skipping unparseable ADR: ${filePath}`, err);
return null;
}
})
);
// Phase 1: Read and parse all ADR files in parallel (cached per process)
const parsedAdrs = await parseAllAdrs(projectRoot);

// Filter to ADRs that have rules enabled
const ruleAdrs = parsedAdrs.filter(
(entry): entry is NonNullable<typeof entry> => {
if (entry === null) return false;
if (!entry.adr.frontmatter.rules) return false;
if (filterAdrId && entry.adr.frontmatter.id !== filterAdrId) return false;
return true;
}
);
const ruleAdrs = parsedAdrs.filter((entry) => {
if (!entry.adr.frontmatter.rules) return false;
if (filterAdrId && entry.adr.frontmatter.id !== filterAdrId) return false;
return true;
});

// Phase 2: Verify companion files exist and import rule sets in parallel
const ruleResults = await Promise.all(
Expand Down
Loading
Loading