diff --git a/AGENTS.md b/AGENTS.md index df4ff504a..1c6faf186 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -984,84 +984,93 @@ mock.module("./some-module", () => ({ ### Architecture - -* **Dashboard widget interval computed from terminal width and layout before API calls**: Dashboard widget interval from terminal width: \`computeOptimalInterval()\` in \`src/lib/api/dashboards.ts\` calculates chart interval before API calls. Formula: \`colWidth = floor(layout.w / 6 \* termWidth)\`, \`chartWidth = colWidth - 4 - gutterW\`, \`idealSeconds = periodSeconds / chartWidth\`. Snaps to nearest Sentry bucket (1m–1d). \`periodToSeconds()\` parses \`"24h"\`, \`"7d"\` etc. \`queryWidgetTimeseries\` uses \`params.interval ?? widget.interval\`. + +* **api-client.ts split into domain modules under src/lib/api/**: Monolithic \`src/lib/api-client.ts\` was split into domain modules under \`src/lib/api/\`: infrastructure.ts, organizations.ts, projects.ts, teams.ts, repositories.ts, issues.ts, events.ts, traces.ts, logs.ts, seer.ts, trials.ts, users.ts. Original file is now a ~100-line barrel re-export (already in \`biome.jsonc\` \`noBarrelFile\` override). Add new API functions to the appropriate domain module, not the barrel file. - -* **DSN org prefix normalization in arg-parsing.ts**: DSN org prefix normalization — four code paths: (1) \`extractOrgIdFromHost\` strips \`o\` prefix during DSN parsing. (2) \`stripDsnOrgPrefix()\` handles user-typed \`o1081365/\` in \`parseOrgProjectArg()\`. (3) \`normalizeNumericOrg()\` in \`resolve-target.ts\` resolves bare numeric IDs via DB cache or uncached API call. (4) Dashboard's \`resolveOrgFromTarget()\` pipes through \`resolveEffectiveOrg()\`. Critical: many API endpoints reject numeric org IDs with 404/403 — always normalize to slugs before API calls. + +* **cli.sentry.dev is served from gh-pages branch via GitHub Pages**: \`cli.sentry.dev\` is served from gh-pages branch via GitHub Pages. Craft's gh-pages target runs \`git rm -r -f .\` before extracting docs — persist extra files via \`postReleaseCommand\` in \`.craft.yml\`. Install script supports \`--channel nightly\`, downloading from the \`nightly\` release tag directly. version.json is only used by upgrade/version-check flow. - -* **GHCR versioned nightly tags for delta upgrade support**: GHCR nightly with delta upgrades: Three tag types: \`:nightly\` (rolling), \`:nightly-\\` (immutable), \`:patch-\\` (delta). Delta patches use TRDIFF10 (zstd-compressed), ~50KB vs ~29MB full. Client via \`Bun.zstdDecompressSync()\`. N-1 only, full fallback, SHA-256 verify, 60% size threshold. npm/Node excluded. Test: \`mockGhcrNightlyVersion()\`. + +* **Debug ID sourcemap matching works but each build generates independent debug IDs**: Binary build pipeline: esbuild bundles TS→minified JS+\`.map\` (not \`Bun.build()\` — collision bug oven-sh/bun#14585 + empty sourcemap \`names\`); config: \`platform:"node"\`, \`format:"esm"\`, \`external:\["bun:\*"]\`. Then \`injectDebugId()\` + \`uploadSourcemaps()\` (prefix \`~/$bunfs/root/\`), then \`Bun.build({compile:true, minify:false})\`. Debug IDs via \`globalThis.\_sentryDebugIds\`. \`binpunch\` strips unused ICU data. CRITICAL: \`SENTRY\_AUTH\_TOKEN\` is a GitHub \*\*environment\*\* secret (in \`production\`), not repo secret. Jobs needing it must declare \`environment: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) && 'production' || '' }}\` — otherwise secret resolves empty and uploads silently skip. Build also requires \`SENTRY\_CLIENT\_ID\` env var (exits 1 without it); use \`bun run --env-file=.env.local build\` locally. - -* **Issue list auto-pagination beyond API's 100-item cap**: Sentry API silently caps \`limit\` at 100 per request. \`listIssuesAllPages()\` auto-paginates using Link headers, bounded by MAX\_PAGINATION\_PAGES (50). \`API\_MAX\_PER\_PAGE\` constant is shared across all paginated consumers. \`--limit\` means total results everywhere (max 1000, default 25). Org-all mode uses \`fetchOrgAllIssues()\`; explicit \`--cursor\` does single-page fetch to preserve cursor chain. + +* **Issue resolve --in grammar: release + @next + @commit sentinels**: \`sentry issue resolve --in\` grammar: (a) omitted → immediate resolve, (b) \`\\` → \`inRelease\` (monorepo strings like \`spotlight@1.2.3\` pass through as-is), (c) \`@next\` → \`inNextRelease\`, (d) \`@commit\` → auto-detect git HEAD + match against Sentry repos, (e) \`@commit:\@\\` → explicit. \`@commit:\` is the unambiguous anchor. \`parseResolveSpec\` returns \`ParsedResolveSpec\` (\`kind: 'static' | 'commit-auto' | 'commit-explicit'\`); \`resolveCommitSpec\` in \`src/commands/issue/resolve-commit-spec.ts\` uses \`getHeadCommit\`/\`getRepositoryName\` from \`src/lib/git.ts\` and matches against Sentry repo \`externalSlug\` or \`name\` via \`listRepositoriesCached\`. Sentry's \`statusDetails.inCommit\` requires \`{commit, repository}\` object — not bare SHA. All failure paths throw \`ValidationError\` with hints; never silent fallback. - -* **resolveProjectBySlug carries full projectData to avoid redundant getProject calls**: resolveProjectBySlug carries full projectData to skip redundant API calls: Returns \`{ org, project, projectData: SentryProject }\` from \`findProjectsBySlug()\`. \`ResolvedOrgProject\`/\`ResolvedTarget\` have optional \`projectData?\` (populated only in project-search path). Downstream commands use \`resolved.projectData ?? await getProject(org, project)\` to save ~500-800ms. + +* **Nightly delta upgrade buildNightlyPatchGraph fetches ALL patch tags — O(N) HTTP calls**: Nightly delta upgrade via GHCR versioned tags (GitHub releases are immutable): nightlies publish to GHCR as \`nightly-0.14.0-dev.1772661724\`. \`src/lib/delta-upgrade.ts\` supports stable (GitHub Releases) and nightly (GHCR). \`filterAndSortChainTags\` filters \`patch-\*\` via \`Bun.semver.order()\`. GHCR uses \`fetchWithRetry\` (10s+1 retry) with \`AbortSignal.any()\`; \`isExternalAbort\` skips retries for external aborts (critical for background prefetch). Patches cached to \`~/.sentry/patch-cache/\` (7-day TTL). CI tag filter: \`grep '^nightly-\[0-9]'\`. \`fetchManifest()\` throws \`UpgradeError("network\_error")\` on network failure/non-200. - -* **Self-hosted OAuth device flow requires Sentry 26.1.0+ and SENTRY\_CLIENT\_ID**: Self-hosted OAuth requires Sentry 26.1.0+ with \`SENTRY\_URL\` and \`SENTRY\_CLIENT\_ID\` env vars. Users must create a public OAuth app in Settings → Developer Settings. Client ID NOT optional for self-hosted. Fallback: \`sentry auth login --token\`. \`getSentryUrl()\`/\`getClientId()\` read lazily so URL parsing from arguments can set \`SENTRY\_URL\` after import. + +* **npm bundle requires Node.js >= 22 due to node:sqlite polyfill**: The npm package (dist/bin.cjs) requires Node.js >= 22 because the bun:sqlite polyfill uses \`node:sqlite\`. A runtime version guard in the esbuild banner catches this early. When writing esbuild banner strings in TS template literals, double-escape: \`\\\\\\\n\` in TS → \`\\\n\` in output → newline at runtime. Single \`\\\n\` produces a literal newline inside a JS string, causing SyntaxError. - -* **Sentry CLI markdown-first formatting pipeline replaces ad-hoc ANSI**: Formatters build CommonMark strings; \`renderMarkdown()\` renders to ANSI for TTY or raw markdown for non-TTY. Key helpers: \`colorTag()\`, \`mdKvTable()\`, \`mdRow()\`, \`mdTableHeader()\` (\`:\` suffix = right-aligned), \`renderTextTable()\`. \`isPlainOutput()\` checks \`SENTRY\_PLAIN\_OUTPUT\` > \`NO\_COLOR\` > \`!isTTY\`. Batch path: \`formatXxxTable()\`. Streaming path: \`StreamingTable\` (TTY) or raw markdown rows (plain). Both share \`buildXxxRowCells()\`. + +* **Numeric issue ID resolution returns org:undefined despite API success**: Numeric issue ID resolution in \`resolveNumericIssue()\`: (1) try DSN/env/config for org, (2) if found use \`getIssueInOrg(org, id)\` with region routing, (3) else fall back to unscoped \`getIssue(id)\`, (4) extract org from \`issue.permalink\` via \`parseSentryUrl\` as final fallback. \`parseSentryUrl\` handles path-based (\`/organizations/{org}/...\`) and subdomain-style URLs. \`matchSubdomainOrg()\` filters region subdomains by requiring slug length > 2. Self-hosted uses path-based only. - -* **Sentry dashboard API rejects discover/transaction-like widget types — use spans**: Sentry API dataset gotchas: (1) Events/Explore API accepts \`spans\`, \`transactions\`, \`logs\`, \`errors\`, \`discover\`. \`spansIndexed\` is INVALID (500). Valid list in \`EVENTS\_API\_DATASETS\`. (2) Dashboard \`widgetType\`: \`discover\` and \`transaction-like\` rejected as deprecated — use \`spans\`. \`WIDGET\_TYPES\` (active) vs \`ALL\_WIDGET\_TYPES\` (includes deprecated for parsing). Tests must use \`error-events\` not \`discover\`. (3) \`sort\` param only on \`spans\` dataset. (4) \`tracemetrics\` uses comma-separated aggregates; only line/area/bar/table/big\_number displays. + +* **repo\_cache SQLite table for offline Sentry repo lookups**: Schema v14 adds \`repo\_cache\` table in \`src/lib/db/schema.ts\` + helpers in \`src/lib/db/repo-cache.ts\` (7-day TTL). \`listAllRepositories(org)\` in \`src/lib/api/repositories.ts\` paginates through \`listRepositoriesPaginated\` using \`API\_MAX\_PER\_PAGE\` and \`MAX\_PAGINATION\_PAGES\` — never use the unpaginated \`listRepositories\` for cache-backed lookups (silently caps at ~25). \`listRepositoriesCached(org)\` wraps it with cache-first lookup and a try/catch around \`setCachedRepos\` so read-only databases (macOS \`sudo brew install\`) don't crash commands whose API fetch already succeeded. Used by \`@commit\` resolver to match git origin \`owner/repo\` against Sentry repo \`externalSlug\` or \`name\`. - -* **Sentry issue stats field: time-series controlled by groupStatsPeriod**: Issue stats and list layout: \`stats\` depends on \`groupStatsPeriod\` (\`""\`, \`"14d"\`, \`"24h"\`, \`"auto"\`). Critical: \`count\` is period-scoped — use \`lifetime.count\` for true total. \`--compact\` is tri-state (\`optional: true\`): explicit overrides, \`undefined\` triggers \`shouldAutoCompact(rowCount)\` — compact if \`3N + 3 > termHeight\`. TREND column hidden < 100 cols. Stricli boolean flags with \`optional: true\` produce \`boolean | undefined\` enabling this auto-detect pattern. + +* **Seer trial prompt uses middleware layering in bin.ts error handling chain**: Seer trial prompt via error middleware layering: \`bin.ts\` chain is \`main() → executeWithAutoAuth() → executeWithSeerTrialPrompt() → runCommand()\`. Seer trial prompts (\`no\_budget\`/\`not\_enabled\`) caught by inner wrapper; auth errors bubble to outer. Trial API: \`GET /api/0/customers/{org}/\` → \`productTrials\[]\` (prefer \`seerUsers\`, fallback \`seerAutofix\`). Start: \`PUT /api/0/customers/{org}/product-trial/\`. SaaS-only; self-hosted 404s gracefully. \`ai\_disabled\` excluded. \`startSeerTrial\` accepts \`category\` from trial object — don't hardcode. - -* **Sentry log IDs are UUIDv7 — enables deterministic retention checks**: Sentry log IDs are UUIDv7 (first 12 hex = ms timestamp, version char \`7\` at pos 13). Traces and event IDs are NOT v7. \`decodeUuidV7Timestamp()\` and \`ageInDaysFromUuidV7()\` in \`src/lib/hex-id.ts\` return null for non-v7 IDs, safe to call unconditionally. Enables deterministic 'past retention' messages instead of 'may have been deleted'. Wired in \`recoverHexId\` (short-circuit) and \`log/view.ts#throwNotFoundError\`. \`RETENTION\_DAYS.log = 90\` in \`src/lib/retention.ts\`; traces/events are \`null\` (plan-dependent). \`LOG\_RETENTION\_PERIOD\` is DERIVED as \`\` \`${RETENTION\_DAYS.log}d\` \`\` — never hardcode \`'90d'\` or \`'90 days'\`. Shared hex primitives (\`HEX\_ID\_RE\`, \`SPAN\_ID\_RE\`, \`UUID\_DASH\_RE\`, \`LEADING\_HEX\_RE\`, \`MIDDLE\_ELLIPSIS\_RE\`, \`HexEntityType\`) live in \`hex-id.ts\` — \`HexEntityType\` stays here (not recovery.ts) to avoid inverted dependency. + +* **SQLite DB functions are synchronous — async signatures are historical artifacts**: All \`src/lib/db/\` functions do synchronous SQLite operations. Many have \`async\` signatures — historical artifact from PR #89 (JSON file→SQLite migration). Safe to convert to sync. Legitimately async exceptions: \`clearAuth()\` (cache dir cleanup), \`getCachedDetection()\`/\`getCachedProjectRoot()\`/\`setCachedProjectRoot()\` (stat for mtime), \`refreshToken()\`/\`performTokenRefresh()\` (HTTP calls). - -* **SKILL.md is fully generated — edit source files, not output**: SKILL.md is fully generated — edit sources not output: Skill files under \`plugins/sentry-cli/skills/sentry-cli/\` generated by \`bun run generate:skill\`. CI auto-commits. Edit sources: (1) \`docs/src/content/docs/agent-guidance.md\` for SKILL.md content, (2) \`src/commands/\*/\` flag \`brief\` strings for reference descriptions, (3) \`docs/src/content/docs/commands/\*.md\` for examples. \`bun run check:skill\` fails if stale. +### Decision - -* **Stricli route errors are uninterceptable — only post-run detection works**: Stricli error gaps: (1) Route failures uninterceptable — Stricli writes stderr, returns \`ExitCode.UnknownCommand\` (-5 / 251 in Bun). Only post-\`run()\` \`process.exitCode\` check works. (2) \`OutputError\` calls \`process.exit()\` immediately, bypassing telemetry. (3) \`defaultCommand: 'help'\` bypasses built-in fuzzy matching — fixed by \`resolveCommandPath()\` in \`introspect.ts\` with \`fuzzyMatch()\` (up to 3 suggestions). JSON includes \`suggestions\` array. (4) Plural alias detection in \`app.ts\`. + +* **Raw markdown output for non-interactive terminals, rendered for TTY**: Markdown-first output pipeline: custom renderer in \`src/lib/formatters/markdown.ts\` walks \`marked\` tokens to produce ANSI-styled output. Commands build CommonMark using helpers (\`mdKvTable()\`, \`mdRow()\`, \`colorTag()\`, \`escapeMarkdownCell()\`, \`safeCodeSpan()\`) and pass through \`renderMarkdown()\`. \`isPlainOutput()\` precedence: \`SENTRY\_PLAIN\_OUTPUT\` > \`NO\_COLOR\` > \`FORCE\_COLOR\` > \`!isTTY\`. \`--json\` always outputs JSON. Colors defined in \`COLORS\` object in \`colors.ts\`. Tests run non-TTY so assertions match raw CommonMark; use \`stripAnsi()\` helper for rendered-mode assertions. - -* **Three Sentry APIs for span custom attributes with different capabilities**: Three Sentry span APIs: (1) \`/trace/{traceId}/\` — hierarchical tree; \`additional\_attributes\` enumerates requested attrs; returns \`measurements\` (zero-filled on non-browser, stripped by \`filterSpanMeasurements()\`). (2) \`/projects/{org}/{project}/trace-items/{itemId}/\` — single span full detail; ALL attributes as \`{name,type,value}\[]\` automatically. (3) \`/events/?dataset=spans\&field=X\` — list/search; explicit \`field\` params. \`--fields\` flag filters JSON output AND requests extra API fields via \`extractExtraApiFields()\`. \`FIELD\_GROUP\_ALIASES\` supports shorthand expansion. +### Gotcha -### Decision + +* **@sentry/api SDK passes Request object to custom fetch — headers lost on Node.js**: @sentry/api SDK calls \`\_fetch(request)\` with no init object. In \`authenticatedFetch\`, \`init\` is undefined so \`prepareHeaders\` creates empty headers — on Node.js this strips Content-Type (HTTP 415). Fix: fall back to \`input.headers\` when \`init\` is undefined. Use \`unwrapPaginatedResult\` (not \`unwrapResult\`) to access the Response's Link header for pagination. \`per\_page\` is not in SDK types; cast query to pass it at runtime. - -* **400 Bad Request from Sentry API indicates a CLI bug, not a user error**: Telemetry 400 convention: 400 = CLI bug (capture to Sentry), 401-499 = user error (skip). \`isUserApiError()\` uses \`> 400\` (exclusive). \`isExpectedUserError()\` guard in \`app.ts\` skips ContextError, ResolutionError, ValidationError, SeerError, 401-499 ApiErrors. Captures 400, 5xx, unknown. Skipped errors → breadcrumbs. For \`ApiError\`, call \`Sentry.setContext('api\_error', {...})\` before \`captureException\` — SDK doesn't auto-capture custom properties. + +* **Completion drift test requires new issue-positional commands be registered**: \`test/lib/completions.property.test.ts\` scans the route tree for commands whose positional placeholder contains "org" or equals "issue", then asserts every such command appears in \`ORG\_PROJECT\_COMMANDS\` or \`ORG\_ONLY\_COMMANDS\` (in \`src/lib/complete.ts\`). When adding a new issue subcommand (resolve, unresolve, merge, etc.) using \`issueIdPositional\`, register it in \`ORG\_PROJECT\_COMMANDS\` or the test fails with \`expect(combined.has(cmd)).toBe(true)\` → false. - -* **CLI UX philosophy: auto-recover when intent is clear, warn gently**: UX principle: don't fail when intent is clear — do the intent and nudge via \`log.warn()\` to stderr. Keep errors in Sentry telemetry for visibility (e.g., SeerError for upsell tracking). Two recovery tiers: (1) auto-correct when semantics identical (AND→space), (2) auto-recover with warning when semantics differ (OR→space, warn about union→intersection). Only throw when intent can't be fulfilled. Model after \`gh\` CLI. AI agents are primary consumers constructing natural OR/AND queries. + +* **Cross-org merge check must reject undefined orgs, not filter them**: In \`src/commands/issue/merge.ts\` \`resolveAllIssues\`, bare numeric issue IDs without DSN/config context resolve with \`org: undefined\`. Filtering \`undefined\` out of the org set before the cross-org check lets mixed-org merges slip through — call goes to known org's endpoint and fails with confusing API error instead of friendly "Cannot merge issues across organizations". Require every resolved issue to have defined org (or reject undefined) before proceeding. Applies anywhere resolved entities must share an org. - -* **Trace-related commands must handle project consistently across CLI**: Trace/log commands and project scoping: \`getDetailedTrace\` accepts optional numeric \`projectId\` (not hardcoded \`-1\`). Resolve slug→ID via \`getProject()\`. \`formatSimpleSpanTree\` shows orphan annotation only when \`projectFiltered\` is set. \`buildProjectQuery()\` in \`arg-parsing.ts\` prepends \`project:\\` to queries (used by \`trace/logs.ts\`, \`log/list.ts\`). Multi-project: \`--query 'project:\[cli,backend]'\`. Trace-logs endpoint (\`/organizations/{org}/trace-logs/\`) is org-scoped — uses \`resolveOrg()\` not \`resolveOrgAndProject()\`. Endpoint is PRIVATE (no \`@sentry/api\` types); hand-written \`TraceLogSchema\` in \`src/types/sentry.ts\` required. + +* **Install script: BSD sed and awk JSON parsing breaks OCI digest extraction**: The install script parses OCI manifests with awk (no jq). Key trap: BSD sed \`\n\` is literal, not newline. Fix: single awk pass tracking last-seen \`"digest"\`, printing when \`"org.opencontainers.image.title"\` matches target. The config digest (\`sha256:44136fa...\`) is a 2-byte \`{}\` blob — downloading it instead of the real binary causes \`gunzip: unexpected end of file\`. -### Gotcha + +* **Multi-region fan-out: distinguish all-403 from empty orgs with hasSuccessfulRegion flag**: In \`listOrganizationsUncached\` (\`src/lib/api/organizations.ts\`), \`Promise.allSettled\` collects multi-region results. Don't use \`flatResults.length === 0\` to detect all-regions-failed — a region returning 200 OK with zero orgs pushes nothing into \`flatResults\`. Track a \`hasSuccessfulRegion\` boolean on any \`"fulfilled"\` settlement. Only re-throw 403 \`ApiError\` when \`!hasSuccessfulRegion && lastScopeError\`. - -* **Bugbot flags defensive null-checks as dead code — keep them with JSDoc justification**: Bugbot/Seer false positives to dismiss with JSDoc: (1) defensive null-checks flagged as dead code. (2) stderr spinner during \`--json\` — progress is stderr, JSON is stdout. (3) \`Number(projectData.id)\` without \`isFinite\` — Sentry project IDs always numeric strings. Real bugs bots catch: \`mock.module()\` in non-isolated test files, missing lower-bound assertions on tracked values. PR review workflow: separate fixup commit per round (not amend), reply via \`gh api\` REST, resolve threads via GraphQL \`resolveReviewThread\`. + +* **Multiple mockFetch calls replace each other — use unified mocks for multi-endpoint tests**: Bun test mocking gotchas: (1) \`mockFetch()\` replaces \`globalThis.fetch\` — calling twice replaces the first. Use one unified mock dispatching by URL. (2) \`mock.module()\` pollutes module registry for ALL subsequent files — tests using it must live in \`test/isolated/\` and run via \`test:isolated\`. (3) For \`Bun.spawn\`, use direct property assignment in \`beforeEach\`/\`afterEach\`. - -* **Bun mock.module for node:tty requires default export and class stubs**: Bun testing gotchas: (1) \`mock.module()\` for CJS built-ins needs \`default\` re-export plus named exports, top-level BEFORE \`await import()\` — static imports capture bindings early. Lives in \`test/isolated/\`. (2) Destructuring imports capture binding at load; verify via call-count > 0. (3) \`Bun.mmap()\` always opens PROT\_WRITE — use \`new Uint8Array(await Bun.file(path).arrayBuffer())\`. (4) Wrap \`Bun.which()\` with optional \`pathEnv\` for deterministic testing. (5) Mocking \`@sentry/node-core/light\`: \`startSpan\` must pass mock span to callback: \`startSpan: (\_, fn) => fn({ setStatus(){}, setAttribute(){}, end(){} })\`. + +* **Sentry bulk merge --into is a preference, not a guarantee**: Sentry bulk merge API quirks: \`PUT /api/0/organizations/{org}/issues/?id=X\&id=Y\` with \`{merge:1}\` picks canonical parent by event count, not input order — \`--into\` is a preference only. Commands must check API response's \`parent\` against requested preference and warn when Sentry picked differently. Strip \`org/\` prefix from user input before comparing to \`issue.shortId\`. Empty-result merges return 204, not 200. Server-side fingerprint rules only apply to NEW events — existing fragmented issues must be merged via this endpoint (async, prefer org-scoped); verify by re-fetching child's \`id\`. - -* **Sentry issue list --query passes OR/AND operators to API causing 400**: Hex ID recovery pipeline (\`src/lib/hex-id-recovery.ts\`) heals malformed IDs across \`event/trace/log/span view\`: (1) \`preNormalize\` strips URL fragment prefixes (\`span-\`, \`event-\`, \`txn-\`, \`trace-\`) in a loop for idempotency, THEN checks sentinel values (\`null\`, \`latest\`) on stripped result, then handles UUID dashes. (2) \`tryPreNormalizedValidId\` — if \`cleaned\` is already a valid full-length hex ID, return immediately. (3) \`stripTrailingNonHex\` requires the stripped tail to contain non-hex chars (prevents 32-hex trace splitting). (4) \`looksLikeSlug(rawInput)\` runs BEFORE fuzzy lookup. (5) UUIDv7 retention short-circuit for expired logs. (6) Cross-entity redirect: 16-hex → \`trace view\` resolves parent via \`findTraceBySpanId\`. \`MIN\_FUZZY\_PREFIX=8\` matches Sentry UI. Adapters use \`listSpans\`/\`listTransactions\`/\`listLogs\`; return \`\[]\` when \`ctx.project\` absent. Validation deferred until after target resolution so adapters have org/project context. \`handleRecoveryResult\` emits \`log.warn\` on success. + +* **useTestConfigDir without isolateProjectRoot causes DSN scanning of repo tree**: \`useTestConfigDir()\` creates temp dirs under \`.test-tmp/\` in the repo tree. Without \`{ isolateProjectRoot: true }\`, \`findProjectRoot\` walks up and finds the repo's \`.git\`, causing DSN detection to scan real source code and trigger network calls against test mocks (timeouts). Always pass \`isolateProjectRoot: true\` when tests exercise \`resolveOrg\`, \`detectDsn\`, or \`findProjectRoot\`. ### Pattern - -* **Codecov patch coverage only counts test:unit and test:isolated, not E2E**: CI coverage merges \`test:unit\` (\`test/lib test/commands test/types --coverage\`) and \`test:isolated\` (\`test/isolated --coverage\`) into \`coverage/merged.lcov\`. E2E tests (\`test/e2e\`) are NOT included in coverage reports. So func tests that spy on exports (e.g., \`spyOn(apiClient, 'getLogs')\`) give zero coverage to the mocked function's body. To cover \`api-client.ts\` function bodies in unit tests, mock \`globalThis.fetch\` + \`setOrgRegion()\` + \`setAuthToken()\` and call the real function. + +* **apiRequestToRegion throws ApiError on 204/205 — don't return null as T**: apiRequestToRegion throws ApiError on 204/205 — don't return null as T: Empty responses (204/205) throw \`ApiError(204)\` rather than returning \`null as T\`, keeping the \`T\` return type sound. Callers expecting 204 (e.g. Sentry's bulk mutate for "no matching issues") must catch \`ApiError\` with \`status === 204\` and translate to a domain-specific error. Don't use \`null as T\` to signal empty bodies — TypeScript hides the null and causes runtime crashes on property access. + + +* **findProjectsByPattern as fuzzy fallback for exact slug misses**: When \`findProjectsBySlug\` returns empty (no exact match), use \`findProjectsByPattern\` as a fallback to suggest similar projects. \`findProjectsByPattern\` does bidirectional word-boundary matching (\`matchesWordBoundary\`) against all projects in all orgs — the same logic used for directory name inference. In the \`project-search\` handler, call it after the exact miss, format matches as \`\/\\` suggestions in the \`ResolutionError\`. This avoids a dead-end error for typos like 'patagonai' when 'patagon-ai' exists. Note: \`findProjectsByPattern\` makes additional API calls (lists all projects per org), so only call it on the failure path. + + +* **Issue command hint: pass subcommand name only, not full path**: \`resolveIssue({command, commandBase})\` builds error hints via \`buildCommandHint(command, issueId, base="sentry issue")\` which concatenates \`${base} ${command} ...\`. Pass the subcommand name only (e.g. \`"resolve"\`, \`"view"\`) and let \`commandBase\` default to \`"sentry issue"\`. Passing \`command: "issue resolve"\` with \`commandBase: "issue"\` produces doubled prefixes like \`"issue issue resolve \"\`. Pattern followed by \`view\`/\`events\`/\`explain\`/\`plan\`/\`resolve\`/\`unresolve\`/\`merge\` in \`src/commands/issue/\`. + + +* **Preserve ApiError type so classifySilenced can silence 4xx errors**: \`classifySilenced\` in \`src/lib/error-reporting.ts\` only silences \`ApiError\` with status 401-499 — wrapping API errors in generic \`CliError\` loses \`status\` and causes 403s etc. to be captured as Sentry issues. Re-throw as \`ApiError\` preserving \`status\`/\`detail\`/\`endpoint\`. Keep message terse: \`ApiError.format()\` appends \`detail\`/\`endpoint\` on own lines, so including detail in message causes duplicated output. Pattern: \`throw new ApiError(\\\`Failed to X (HTTP ${error.status}).\\\`, error.status, error.detail, error.endpoint)\`. Also: ValidationError without a \`field\` arg previously produced empty \`cli\_error.kind\`, fingerprint-collapsing all unfielded ValidationErrors into one group. Fix (PR #776): \`setGroupingTags\` falls back to normalized message prefix via \`extractMessagePrefix\`. When adding ValidationError sites, pass \`field\` or ensure distinctive message prefix. - -* **I/O concurrency limits belong at the call site, not in generic combinators**: Concurrency limits for filesystem operations (stat, read) should live in the module that makes the I/O calls, not in generic utility functions like \`anyTrue()\`. Pattern: define a module-scoped \`pLimit()\` instance with a named constant (e.g., \`STAT\_CONCURRENCY = 32\` in \`project-root.ts\`, \`CACHE\_IO\_CONCURRENCY\` in \`response-cache.ts\`, \`pLimit(50)\` in \`code-scanner.ts\`). This keeps combinators pure and makes the budget explicit at the I/O boundary. Value guidance: stat() calls are lighter than full reads — use ~32 for stats vs ~50 for file reads, staying well below macOS's typical 256 FD ceiling. A shared module-level limiter caps total concurrent I/O across all call sites within that module. + +* **Resolve --into flag through resolveIssue for alias parity with positionals**: Merge/batch commands with a \`--into \\` (or similar canonical-parent) flag should pass it through \`resolveIssue()\` the same way positional args are resolved, then compare by numeric \`id\` — not by string-matching against \`issue.shortId\`. This gives alias support (\`f-g\`, \`fr-a3\`) and org-qualified forms (\`my-org/CLI-G5\`) for free, and avoids asymmetry where positionals accept aliases but flags don't. Makes \`orderForMerge\` async. Pattern in \`src/commands/issue/merge.ts\`. - -* **Pagination contextKey must include all query-varying parameters with escaping**: Pagination \`contextKey\` must encode every query-varying parameter (sort, query, period) with \`escapeContextKeyValue()\` (replaces \`|\` with \`%7C\`). Provide fallback before escaping since \`flags.period\` may be \`undefined\` in tests despite defaults: \`flags.period ? escapeContextKeyValue(flags.period) : "90d"\`. \`DEFAULT\_PERIOD = "90d"\` is a module constant. Compare via \`formatTimeRangeFlag(flags.period) !== DEFAULT\_PERIOD\` or \`appendPeriodHint()\` from \`time-range.ts\`. + +* **Sentry SDK tree-shaking patches must be regenerated via bun patch workflow**: Sentry SDK tree-shaking via bun patch: \`patchedDependencies\` in \`package.json\` tree-shakes unused exports from \`@sentry/core\` and \`@sentry/node-core\` (AI integrations, feature flags, profiler). Bumping SDK versions: (1) remove old patches/entries, (2) \`rm -rf ~/.bun/install/cache/@sentry\` (edits persist in cache), (3) \`bun install\` fresh, (4) \`bun patch @sentry/core\`, edit, \`bun patch --commit\`; repeat for node-core. Preserved exports: \`\_INTERNAL\_safeUnref\`, \`\_INTERNAL\_safeDateNow\` (core), \`nodeRuntimeMetricsIntegration\` (node-core). Manual \`git diff\` patches fail — always use \`bun patch --commit\`. - -* **Redact sensitive flags in raw argv before sending to telemetry**: Telemetry argv redaction and context: \`withTelemetry\` calls \`initTelemetryContext()\` — sets user ID, email, runtime, is\_self\_hosted. Org from \`getDefaultOrganization()\` (SQLite). \`SENSITIVE\_FLAGS\` in \`telemetry.ts\` (currently \`token\`) — scans for \`--token\`/\`-token\`, replaces value with \`\[REDACTED]\`. Handles \`--flag value\` and \`--flag=value\`. Command tag format: \`sentry.issue.list\` (dot-joined with \`sentry.\` prefix). + +* **Shared pagination infrastructure: buildPaginationContextKey and parseCursorFlag**: Bidirectional pagination via cursor stack in \`src/lib/db/pagination.ts\`. \`resolveCursor(flag, key, contextKey)\` maps keywords (next/prev/first/last) to \`{cursor, direction}\`. \`advancePaginationState\` manages stack — back-then-forward truncates stale entries. \`hasPreviousPage\` checks \`page\_index > 0\`. \`paginationHint()\` builds nav strings. All list commands use this. Critical: \`resolveCursor()\` must be called inside \`org-all\` override closures, not before \`dispatchOrgScopedList\`. - -* **Shared recovery-org resolution helper for hex-ID recovery paths**: \`tryResolveRecoveryOrg(parsed)\` in \`src/lib/hex-id-recovery.ts\` resolves \`ParsedOrgProject\` → \`{org, project?} | null\` for recovery adapters. Calls \`resolveEffectiveOrg()\` to normalize numeric org IDs (\`o1234567\` → slug) — critical because many Sentry API endpoints reject numeric org IDs with 404/403. Skips \`auto-detect\` and \`project-search\` (expensive DSN scans without clear benefit for fuzzy prefix lookup which needs a project scope anyway). Re-throws \`AuthError\` (triggers auto-login), swallows other errors defensively. Both \`event/view.ts\` and \`trace-target.ts\` consume this shared helper instead of duplicating the switch — \`trace-target\` wraps it by first calling \`parseOrgProjectArg(targetArg)\`. + +* **Telemetry instrumentation pattern: withTracingSpan + captureException for handled errors**: For graceful-fallback operations, use \`withTracingSpan\` from \`src/lib/telemetry.ts\` for child spans and \`captureException\` from \`@sentry/bun\` (named import — Biome forbids namespace imports) with \`level: 'warning'\` for non-fatal errors. \`withTracingSpan\` uses \`onlyIfParent: true\` — no-op without active transaction. User-visible fallbacks use \`log.warn()\` not \`log.debug()\`. Several commands bypass telemetry by importing \`buildCommand\` from \`@stricli/core\` directly instead of \`../../lib/command.js\` (trace/list, trace/view, log/view, api.ts, help.ts). - -* **Stricli parse functions can perform validation and sanitization at flag-parse time**: Stricli's \`parse\` fn on \`kind: "parsed"\` flags runs during argument parsing before \`func()\`. Can throw errors (including \`ValidationError\`) and log warnings. Current uses: \`parseCursorFlag\`, \`sanitizeQuery\`, \`parsePeriod\` (returns \`TimeRange\`), \`parseSort\`/\`parseSortFlag\`, \`numberParser\`/\`parseLimit\`. Optional period flags: \`flags.period\` is \`TimeRange | undefined\` — commands use \`TIME\_RANGE\_\*\` constants as defaults. \`formatTimeRangeFlag()\` converts back to display strings. \`appendPeriodHint(parts, period, flagName)\` in \`time-range.ts\` encapsulates \`if (formatted !== DEFAULT\_PERIOD) parts.push(...)\` across 4+ hint builders. + +* **Testing Stricli command func() bodies via spyOn mocking**: To unit-test a Stricli command's \`func()\` body: (1) \`const func = await cmd.loader()\`, (2) \`func.call(mockContext, flags, ...args)\` with mock \`stdout\`, \`stderr\`, \`cwd\`, \`setContext\`. (3) \`spyOn\` namespace imports to mock dependencies (e.g., \`spyOn(apiClient, 'getLogs')\`). The \`loader()\` return type union causes \`.call()\` LSP errors — these are false positives that pass \`tsc --noEmit\`. When API functions are renamed (e.g., \`getLog\` → \`getLogs\`), update both spy target name AND mock return shape (single → array). Slug normalization (\`normalizeSlug\`) replaces underscores with dashes but does NOT lowercase — test assertions must match original casing (e.g., \`'CAM-82X'\` not \`'cam-82x'\`). diff --git a/docs/src/content/docs/contributing.md b/docs/src/content/docs/contributing.md index a41ccc372..de345bd14 100644 --- a/docs/src/content/docs/contributing.md +++ b/docs/src/content/docs/contributing.md @@ -53,7 +53,7 @@ cli/ │ │ ├── cli/ # defaults, feedback, fix, setup, upgrade │ │ ├── dashboard/ # list, view, create, add, edit, delete │ │ ├── event/ # view, list -│ │ ├── issue/ # list, events, explain, plan, view +│ │ ├── issue/ # list, events, explain, plan, view, resolve, unresolve, merge │ │ ├── log/ # list, view │ │ ├── org/ # list, view │ │ ├── project/ # create, delete, list, view diff --git a/docs/src/fragments/commands/issue.md b/docs/src/fragments/commands/issue.md index ebc503342..f6e2ec98f 100644 --- a/docs/src/fragments/commands/issue.md +++ b/docs/src/fragments/commands/issue.md @@ -104,3 +104,59 @@ sentry issue plan 123456789 --cause 0 - GitHub integration configured with repository access - Code mappings set up to link stack frames to source files - Root cause analysis must be completed (`sentry issue explain`) before generating a plan + +### Resolve and reopen issues + +```bash +# Resolve immediately (no regression tracking) +sentry issue resolve CLI-G5 + +# Resolve in a specific release — future events on newer releases are +# regression-flagged +sentry issue resolve CLI-G5 --in 0.26.1 + +# Monorepo-style releases work too (no special parsing) +sentry issue resolve CLI-G5 --in spotlight@1.2.3 + +# Resolve in the next release (tied to current HEAD) +sentry issue resolve CLI-G5 --in @next +sentry issue resolve CLI-G5 -i @next + +# Resolve in the current git HEAD — auto-detects the Sentry repo from +# your git origin remote (hard-errors if it can't) +sentry issue resolve CLI-G5 --in @commit + +# Explicit commit + repo (no git inspection; repo must be registered in Sentry) +sentry issue resolve CLI-G5 --in @commit:getsentry/cli@abc123def + +# Reopen a resolved issue +sentry issue unresolve CLI-G5 +sentry issue reopen CLI-G5 # alias +``` + +:::note[How `@commit` auto-detects] +`--in @commit` reads `HEAD` and the `origin` remote, parses the remote as +`owner/repo`, then looks it up in your org's Sentry repositories (cached +locally for 7 days). If any step fails, the command stops with a clear +error pointing you at `--in @commit:@` or `sentry repo list /` +— no silent fallback to a different resolution mode. +::: + +### Merge fragmented issues + +Consolidate multiple issues (e.g. same logical error split by Sentry's +default stack-trace grouping) into a single canonical group: + +```bash +# Let Sentry auto-pick the parent (typically the largest by event count) +sentry issue merge CLI-K9 CLI-15H CLI-15N + +# Pin the canonical parent explicitly — accepts the same formats as +# positional args, including org-qualified and project-alias forms +sentry issue merge CLI-K9 CLI-15H CLI-15N --into CLI-K9 +sentry issue merge my-org/CLI-K9 my-org/CLI-15H --into my-org/CLI-K9 +sentry issue merge cli-k9 cli-15h --into cli-k9 # alias form + +# Cross-org merges are rejected — all issues must share an organization +# Non-error issue types (performance, info, etc.) cannot be merged +``` diff --git a/plugins/sentry-cli/skills/sentry-cli/SKILL.md b/plugins/sentry-cli/skills/sentry-cli/SKILL.md index 8863d12fe..553f9b9ae 100644 --- a/plugins/sentry-cli/skills/sentry-cli/SKILL.md +++ b/plugins/sentry-cli/skills/sentry-cli/SKILL.md @@ -298,6 +298,9 @@ Manage Sentry issues - `sentry issue explain ` — Analyze an issue's root cause using Seer AI - `sentry issue plan ` — Generate a solution plan using Seer AI - `sentry issue view ` — View details of a specific issue +- `sentry issue resolve ` — Mark an issue as resolved +- `sentry issue unresolve ` — Reopen a resolved issue +- `sentry issue merge ` — Merge 2+ issues into a single canonical group → Full flags and examples: `references/issue.md` diff --git a/plugins/sentry-cli/skills/sentry-cli/references/issue.md b/plugins/sentry-cli/skills/sentry-cli/references/issue.md index 48ea343f7..8da412a66 100644 --- a/plugins/sentry-cli/skills/sentry-cli/references/issue.md +++ b/plugins/sentry-cli/skills/sentry-cli/references/issue.md @@ -165,4 +165,67 @@ sentry issue view FRONT-ABC sentry issue view FRONT-ABC -w ``` +### `sentry issue resolve ` + +Mark an issue as resolved + +**Flags:** +- `-i, --in - Resolve in a release, next release, or commit ('' | '@next' | '@commit' | '@commit:@')` + +**Examples:** + +```bash +# Resolve immediately (no regression tracking) +sentry issue resolve CLI-G5 + +# Resolve in a specific release — future events on newer releases are +# regression-flagged +sentry issue resolve CLI-G5 --in 0.26.1 + +# Monorepo-style releases work too (no special parsing) +sentry issue resolve CLI-G5 --in spotlight@1.2.3 + +# Resolve in the next release (tied to current HEAD) +sentry issue resolve CLI-G5 --in @next +sentry issue resolve CLI-G5 -i @next + +# Resolve in the current git HEAD — auto-detects the Sentry repo from +# your git origin remote (hard-errors if it can't) +sentry issue resolve CLI-G5 --in @commit + +# Explicit commit + repo (no git inspection; repo must be registered in Sentry) +sentry issue resolve CLI-G5 --in @commit:getsentry/cli@abc123def + +# Reopen a resolved issue +sentry issue unresolve CLI-G5 +sentry issue reopen CLI-G5 # alias +``` + +### `sentry issue unresolve ` + +Reopen a resolved issue + +### `sentry issue merge ` + +Merge 2+ issues into a single canonical group + +**Flags:** +- `-i, --into - Prefer this issue as the canonical parent (must match one of the provided IDs)` + +**Examples:** + +```bash +# Let Sentry auto-pick the parent (typically the largest by event count) +sentry issue merge CLI-K9 CLI-15H CLI-15N + +# Pin the canonical parent explicitly — accepts the same formats as +# positional args, including org-qualified and project-alias forms +sentry issue merge CLI-K9 CLI-15H CLI-15N --into CLI-K9 +sentry issue merge my-org/CLI-K9 my-org/CLI-15H --into my-org/CLI-K9 +sentry issue merge cli-k9 cli-15h --into cli-k9 # alias form + +# Cross-org merges are rejected — all issues must share an organization +# Non-error issue types (performance, info, etc.) cannot be merged +``` + All commands also support `--json`, `--fields`, `--help`, `--log-level`, and `--verbose` flags. diff --git a/src/commands/issue/index.ts b/src/commands/issue/index.ts index 9fdb513d8..4c87591df 100644 --- a/src/commands/issue/index.ts +++ b/src/commands/issue/index.ts @@ -2,7 +2,10 @@ import { buildRouteMap } from "../../lib/route-map.js"; import { eventsCommand } from "./events.js"; import { explainCommand } from "./explain.js"; import { listCommand } from "./list.js"; +import { mergeCommand } from "./merge.js"; import { planCommand } from "./plan.js"; +import { resolveCommand } from "./resolve.js"; +import { unresolveCommand } from "./unresolve.js"; import { viewCommand } from "./view.js"; export const issueRoute = buildRouteMap({ @@ -12,24 +15,35 @@ export const issueRoute = buildRouteMap({ explain: explainCommand, plan: planCommand, view: viewCommand, + resolve: resolveCommand, + unresolve: unresolveCommand, + merge: mergeCommand, }, + // `reopen` is a friendlier synonym for `unresolve` — shipped as an alias + // so either command works identically. + aliases: { reopen: "unresolve" }, defaultCommand: "view", docs: { brief: "Manage Sentry issues", fullDescription: "View and manage issues from your Sentry projects.\n\n" + "Commands:\n" + - " list List issues in a project\n" + - " events List events for a specific issue\n" + - " view View details of a specific issue\n" + - " explain Analyze an issue using Seer AI\n" + - " plan Generate a solution plan using Seer AI\n\n" + - "Magic selectors (available for view, events, explain, plan):\n" + + " list List issues in a project\n" + + " events List events for a specific issue\n" + + " view View details of a specific issue\n" + + " explain Analyze an issue using Seer AI\n" + + " plan Generate a solution plan using Seer AI\n" + + " resolve Mark an issue as resolved (optionally in a release)\n" + + " unresolve Reopen a resolved issue (alias: reopen)\n" + + " merge Merge 2+ issues into a single group\n\n" + + "Magic selectors (available for view, events, explain, plan, resolve, unresolve):\n" + " @latest Most recent unresolved issue\n" + " @most_frequent Issue with the highest event frequency\n\n" + "Examples:\n" + " sentry issue view @latest\n" + " sentry issue events CLI-G\n" + + " sentry issue resolve CLI-12Z --in 0.26.1\n" + + " sentry issue merge CLI-K9 CLI-15H CLI-15N\n" + " sentry issue explain @most_frequent\n" + " sentry issue plan my-org/@latest\n\n" + "Alias: `sentry issues` → `sentry issue list`", diff --git a/src/commands/issue/merge.ts b/src/commands/issue/merge.ts new file mode 100644 index 000000000..b56ea9b13 --- /dev/null +++ b/src/commands/issue/merge.ts @@ -0,0 +1,329 @@ +/** + * sentry issue merge + * + * Merge 2+ issues into a single canonical group. Sentry's web UI has this + * as a bulk action; here we expose it as a direct command. + * + * ## Flow + * + * 1. Collect 2+ issue args (variadic positional) + * 2. Resolve each to a numeric group ID + org via `resolveIssue` + * 3. Verify all issues are in the same org (cross-org merge rejected by API) + * 4. Optionally pin the canonical parent via `--into` + * 5. Call `mergeIssues(org, groupIds)` — the API auto-picks the parent + * unless we pre-sort with our chosen parent first + * 6. Emit the merge result + * + * Sentry picks the parent by size (largest by event count). When `--into` + * is provided we sort the selected issue to the front of the list — Sentry + * honors first-in-list as the parent when multiple issues have equivalent + * weight. See the Sentry source at `src/sentry/api/helpers/group_index`. + */ + +import type { SentryContext } from "../../context.js"; +import { type MergeIssuesResult, mergeIssues } from "../../lib/api-client.js"; +import { buildCommand } from "../../lib/command.js"; +import { + ApiError, + CliError, + ResolutionError, + ValidationError, +} from "../../lib/errors.js"; +import { muted, warning } from "../../lib/formatters/index.js"; +import { CommandOutput } from "../../lib/formatters/output.js"; +import { logger } from "../../lib/logger.js"; +import { buildIssueUrl } from "../../lib/sentry-urls.js"; +import type { SentryIssue } from "../../types/index.js"; +import { resolveIssue } from "./utils.js"; + +const log = logger.withTag("issue.merge"); + +const COMMAND = "merge"; +/** Full command path for error messages (this one isn't passed to resolveIssue). */ +const COMMAND_PATH = "issue merge"; + +type MergeFlags = { + readonly json: boolean; + readonly fields?: string[]; + readonly into?: string; +}; + +/** Result emitted by the command — extends API result with display fields. */ +type MergeCommandResult = { + /** Org slug the merge was scoped to (all inputs must share an org). */ + org: string; + /** Short ID of the canonical parent the children were merged into. */ + parentShortId: string; + /** Short IDs of the merged children (excludes parent). */ + childShortIds: string[]; + /** Raw API response (numeric group IDs). */ + raw: MergeIssuesResult; +}; + +function formatMerged(result: MergeCommandResult): string { + const head = `${muted("Merged")} ${result.childShortIds.length} issue(s) into ${result.parentShortId}`; + const children = result.childShortIds.map((id) => ` • ${id}`).join("\n"); + const url = buildIssueUrl(result.org, result.raw.parent); + return `${head}\n${children}\n\nSee: ${url}`; +} + +function jsonTransform(result: MergeCommandResult): unknown { + return { + org: result.org, + parent: { + shortId: result.parentShortId, + id: result.raw.parent, + url: buildIssueUrl(result.org, result.raw.parent), + }, + children: result.childShortIds.map((shortId, i) => ({ + shortId, + id: result.raw.children[i] ?? "", + })), + }; +} + +/** + * Resolve all issue args in parallel and validate they share an org. + */ +async function resolveAllIssues( + args: readonly string[], + cwd: string +): Promise<{ org: string; issues: SentryIssue[] }> { + const resolved = await Promise.all( + args.map((arg) => + resolveIssue({ + issueArg: arg, + cwd, + command: COMMAND, + }) + ) + ); + + // Every resolved issue must have a concrete org slug — otherwise we + // can't safely decide whether a merge is cross-org. A bare numeric ID + // that couldn't pick up an org from DSN/env/config would return + // `org: undefined`; those must error before we proceed. + const missingOrg = resolved.filter((r) => !r.org); + if (missingOrg.length > 0) { + const badIds = missingOrg.map((r) => r.issue.shortId).join(", "); + throw new ValidationError( + `Could not determine the organization for: ${badIds}.\n\n` + + "Provide the org explicitly (e.g. /) so the merge\n" + + "can verify all issues belong to the same organization." + ); + } + + // Collect the fully-resolved orgs and require a single one. + const orgs = new Set(resolved.map((r) => r.org as string)); + if (orgs.size > 1) { + throw new ValidationError( + `Cannot merge issues across organizations (${Array.from(orgs).join(", ")}).\n\n` + + "All issues must belong to the same organization." + ); + } + + const [org] = orgs; + if (!org) { + // Unreachable — resolved.length >= 1 (callers guard for <2) and we + // just asserted every entry has a non-empty org. + throw new CliError("Internal error: resolved issue missing org slug."); + } + + // Dedupe on resolved numeric ID: a user may pass the same issue in + // multiple forms (`CLI-K9`, `my-org/CLI-K9`, `100`), all of which + // collapse to the same group after resolution. Without this check we + // would send `?id=100&id=100` to Sentry, which the API dedupes server + // side — returning 204 ("no matching issues") — and then we re-throw + // that as a confusing "no matching issues" error. Catch it here instead. + const issues = resolved.map((r) => r.issue); + const uniqueIds = new Set(issues.map((i) => i.id)); + if (uniqueIds.size < 2) { + throw new ValidationError( + `Merge needs at least 2 distinct issues (all inputs resolved to ${issues[0]?.shortId ?? "the same issue"}).\n\n` + + "Check your argument list — you may have passed the same issue in\n" + + "multiple forms (short ID + org-qualified + numeric all count as one)." + ); + } + + return { org, issues }; +} + +/** + * Sort issues so that the one matching `into` is first. Sentry's merge + * endpoint picks the parent by size; pre-sorting nudges the tie-break + * toward the caller's preference for typical cases. + * + * Accepts the same formats as the positional args — bare short ID, + * numeric group ID, org-qualified short ID, or project-alias suffix + * (`f-g`, `fr-a3`, etc). Aliases are resolved by running the input + * through `resolveIssue` and comparing the resulting numeric ID against + * the already-resolved issues. + * + * Fast path: try a direct string match first (avoids an API call when + * the user passes the canonical form). Fall back to `resolveIssue` only + * when the direct match misses, then look up by numeric ID. + * + * When `into` doesn't match any issue in the list, that's a user error — + * we throw so the user can correct the mistake instead of silently getting + * a different parent. + */ +async function orderForMerge( + issues: SentryIssue[], + into: string | undefined, + cwd: string +): Promise { + if (!into) { + return issues; + } + const normalized = into.trim(); + // Fast path: direct match on shortId / id (bare or org-qualified form). + // Short IDs are canonically uppercase (e.g. CLI-K9); users sometimes + // type them in the case of the org/project part, so match + // case-insensitively to avoid paying for the API fallback unnecessarily. + // The Sentry API's `shortId` field is always uppercase. + const lastSlash = normalized.lastIndexOf("/"); + const bare = lastSlash >= 0 ? normalized.slice(lastSlash + 1) : normalized; + const bareUpper = bare.toUpperCase(); + const normalizedUpper = normalized.toUpperCase(); + const direct = issues.find( + (i) => + i.shortId === bareUpper || + i.id === bare || + i.shortId === normalizedUpper || + i.id === normalized + ); + if (direct) { + return [direct, ...issues.filter((i) => i !== direct)]; + } + + // Fallback: resolve `into` through the same pipeline as positional args + // (handles project-alias suffixes like `f-g`, URLs, @selectors, etc). + // This adds a second API round trip in the alias-only case, but avoids + // reimplementing the alias lookup logic here. + // + // Only a clean "not found" (ResolutionError, or ApiError with status 404) + // is swallowed — real errors (auth, 5xx, network, ContextError) propagate + // so the user sees a proper diagnostic instead of the misleading + // "did not match any of the provided issues". + let resolvedId: string | undefined; + try { + const { issue: resolvedIssue } = await resolveIssue({ + issueArg: normalized, + cwd, + command: COMMAND, + }); + resolvedId = resolvedIssue.id; + } catch (error) { + if ( + error instanceof ResolutionError || + (error instanceof ApiError && error.status === 404) + ) { + // Clean not-found — fall through to the "not among provided" error. + } else { + throw error; + } + } + + if (resolvedId) { + const match = issues.find((i) => i.id === resolvedId); + if (match) { + return [match, ...issues.filter((i) => i !== match)]; + } + } + + throw new ValidationError( + `--into '${into}' did not match any of the provided issues.\n\n` + + `Provided: ${issues.map((i) => i.shortId).join(", ")}`, + "into" + ); +} + +export const mergeCommand = buildCommand({ + docs: { + brief: "Merge 2+ issues into a single canonical group", + fullDescription: + "Consolidate multiple issues into one. Useful when the same logical\n" + + "error was split into separate groups (e.g. by Sentry's default\n" + + "stack-trace grouping before fingerprint rules were applied).\n\n" + + "Sentry picks the canonical parent based on event count — typically\n" + + "the largest group. --into is a preference, not a guarantee: if your\n" + + "choice has fewer events, Sentry may still pick a different parent,\n" + + "in which case a warning is printed to stderr.\n\n" + + "All issues must belong to the same organization. Only error-type\n" + + "issues can be merged (the API rejects performance/info issues).\n\n" + + "Examples:\n" + + " sentry issue merge CLI-K9 CLI-15H CLI-15N\n" + + " sentry issue merge CLI-K9 CLI-15H --into CLI-K9\n" + + " sentry issue merge my-org/CLI-AB my-org/CLI-CD", + }, + output: { + human: formatMerged, + jsonTransform, + }, + parameters: { + positional: { + kind: "array", + parameter: { + placeholder: "issue", + brief: "Issue IDs to merge (2 or more required)", + parse: String, + }, + }, + flags: { + into: { + kind: "parsed", + parse: String, + brief: + "Prefer this issue as the canonical parent (must match one of the provided IDs)", + optional: true, + }, + }, + aliases: { + i: "into", + }, + }, + async *func(this: SentryContext, flags: MergeFlags, ...args: string[]) { + const { cwd } = this; + + if (args.length < 2) { + throw new ValidationError( + `'sentry ${COMMAND_PATH}' needs at least 2 issue IDs (got ${args.length}).\n\n` + + "Example: sentry issue merge CLI-K9 CLI-15H CLI-15N" + ); + } + + const { org, issues } = await resolveAllIssues(args, cwd); + const ordered = await orderForMerge(issues, flags.into, cwd); + const groupIds = ordered.map((i) => i.id); + // `--into` is a preference, not a guarantee — track it so we can warn + // if Sentry picks a different parent (typically the largest by event + // count takes precedence over the requested ordering). + const requestedParentId = flags.into ? groupIds[0] : undefined; + + log.debug( + `Merging ${groupIds.length} issues in ${org}: ${ordered.map((i) => i.shortId).join(", ")}` + ); + + const raw = await mergeIssues(org, groupIds); + + // Map numeric IDs back to short IDs for display. + const idToShort = new Map(ordered.map((i) => [i.id, i.shortId])); + const parentShortId = idToShort.get(raw.parent) ?? raw.parent; + const childShortIds = raw.children.map((id) => idToShort.get(id) ?? id); + + if (requestedParentId && requestedParentId !== raw.parent) { + const requestedShortId = idToShort.get(requestedParentId) ?? flags.into; + this.stderr.write( + `${warning("Warning:")} --into '${requestedShortId}' was a preference, not a guarantee. ` + + `Sentry selected ${parentShortId} as the canonical parent based on event count.\n` + ); + } + + yield new CommandOutput({ + org, + parentShortId, + childShortIds, + raw, + }); + }, +}); diff --git a/src/commands/issue/resolve-commit-spec.ts b/src/commands/issue/resolve-commit-spec.ts new file mode 100644 index 000000000..3cd7f701e --- /dev/null +++ b/src/commands/issue/resolve-commit-spec.ts @@ -0,0 +1,167 @@ +/** + * Resolve `@commit` / `@commit:@` specs into a concrete + * `{inCommit: {commit, repository}}` payload. + * + * The bare `@commit` form auto-detects: + * 1. Current git HEAD SHA (must be inside a git work tree) + * 2. Current git origin URL → parse to `owner/repo` via {@link parseRemoteUrl} + * 3. Matching Sentry-registered repo (by `externalSlug` first, then `name`) + * + * The explicit form `@commit:@` skips steps 1-2 and goes straight + * to step 3 with the user-provided repo name, but still validates that the + * repo is registered in Sentry (otherwise the API rejects the payload). + * + * Every failure mode raises a `ValidationError` with a concrete remediation + * hint — no silent fallback to another resolution mode. Per the design, a + * half-correct `--in` request is worse than a clear error the user can fix. + */ + +import { execFileSync } from "node:child_process"; +import type { ResolveCommitSpec } from "../../lib/api/issues.js"; +import { listRepositoriesCached } from "../../lib/api-client.js"; +import { ValidationError } from "../../lib/errors.js"; +import { + getHeadCommit, + isInsideGitWorkTree, + parseRemoteUrl, +} from "../../lib/git.js"; +import type { SentryRepository } from "../../types/index.js"; + +/** Fetch the git origin URL without throwing when it's missing. */ +function getGitOriginUrl(cwd: string): string | undefined { + try { + return execFileSync("git", ["remote", "get-url", "origin"], { + cwd, + encoding: "utf8", + stdio: ["ignore", "pipe", "ignore"], + }).trim(); + } catch { + return; + } +} + +/** + * Find the Sentry repo whose `externalSlug` or `name` matches the local + * `owner/repo` derived from `git remote get-url origin`. + * + * Returns `null` when no match is found — the caller surfaces this as + * a user-facing error with an available-repos list. + */ +function findSentryRepoMatchingOrigin( + originOwnerRepo: string, + sentryRepos: SentryRepository[] +): SentryRepository | null { + // Prefer externalSlug (canonical `owner/repo` from the integration) + // then fall back to `name` for repos without that field populated. + const match = + sentryRepos.find((r) => r.externalSlug === originOwnerRepo) ?? + sentryRepos.find((r) => r.name === originOwnerRepo); + return match ?? null; +} + +/** + * Find the Sentry repo matching a user-provided repo name exactly. + * Checks `name` first (the canonical identifier used by the API's + * InCommitValidator) and then `externalSlug` for convenience. + */ +function findSentryRepoByName( + repoName: string, + sentryRepos: SentryRepository[] +): SentryRepository | null { + return ( + sentryRepos.find((r) => r.name === repoName) ?? + sentryRepos.find((r) => r.externalSlug === repoName) ?? + null + ); +} + +/** Format a short list of available repos for error messages. */ +function formatAvailableRepos(repos: SentryRepository[]): string { + if (repos.length === 0) { + return "No repositories are registered in Sentry for this organization."; + } + const MAX = 10; + const names = repos + .slice(0, MAX) + .map((r) => ` - ${r.name}`) + .join("\n"); + const more = + repos.length > MAX + ? `\n ... and ${repos.length - MAX} more (sentry repo list /)` + : ""; + return `Available repositories in this organization:\n${names}${more}`; +} + +/** + * Resolve a {@link ResolveCommitSpec} (either auto-detect or explicit) into + * the concrete `{commit, repository}` payload the Sentry API expects. + * + * @throws {ValidationError} When any step of the resolution fails — no + * fallback to `inRelease` or other modes. The error message always names + * the next action the user can take. + */ +export async function resolveCommitSpec( + spec: ResolveCommitSpec, + orgSlug: string, + cwd: string +): Promise<{ commit: string; repository: string }> { + if (spec.kind === "auto") { + if (!isInsideGitWorkTree(cwd)) { + throw new ValidationError( + "--in @commit requires a git repository. Run from inside a checkout, or use --in @next / --in / --in @commit:@.", + "in" + ); + } + + let headSha: string; + try { + headSha = getHeadCommit(cwd); + } catch { + throw new ValidationError( + "--in @commit could not read HEAD (is this a fresh repo with no commits?). Make a commit first, or pass --in @commit:@ explicitly.", + "in" + ); + } + + const originUrl = getGitOriginUrl(cwd); + if (!originUrl) { + throw new ValidationError( + "--in @commit could not determine the git 'origin' remote. Add an origin remote, or pass --in @commit:@ explicitly.", + "in" + ); + } + + const originOwnerRepo = parseRemoteUrl(originUrl); + if (!originOwnerRepo) { + throw new ValidationError( + `--in @commit could not parse the origin URL ('${originUrl}') as 'owner/repo'. Use --in @commit:@ explicitly.`, + "in" + ); + } + + const sentryRepos = await listRepositoriesCached(orgSlug); + const match = findSentryRepoMatchingOrigin(originOwnerRepo, sentryRepos); + if (!match) { + throw new ValidationError( + `--in @commit: no Sentry repository matches local origin '${originOwnerRepo}' in organization '${orgSlug}'.\n\n` + + `${formatAvailableRepos(sentryRepos)}\n\n` + + "Register the repo in Sentry, or pass --in @commit:@ with a registered name.", + "in" + ); + } + + return { commit: headSha, repository: match.name }; + } + + // Explicit: @commit:@ — validate the repo is registered. + const sentryRepos = await listRepositoriesCached(orgSlug); + const match = findSentryRepoByName(spec.repository, sentryRepos); + if (!match) { + throw new ValidationError( + `--in @commit:${spec.repository}@${spec.commit}: no Sentry repository named '${spec.repository}' in organization '${orgSlug}'.\n\n` + + `${formatAvailableRepos(sentryRepos)}`, + "in" + ); + } + return { commit: spec.commit, repository: match.name }; +} diff --git a/src/commands/issue/resolve.ts b/src/commands/issue/resolve.ts new file mode 100644 index 000000000..161b94dec --- /dev/null +++ b/src/commands/issue/resolve.ts @@ -0,0 +1,178 @@ +/** + * sentry issue resolve + * + * Mark an issue as resolved, optionally tied to a release, the next + * release, or a specific commit. Mirrors the "Resolve" dropdown in the + * Sentry web UI. + * + * ## Flow + * + * 1. Parse positional issue arg (same formats as `issue view`) + * 2. Parse `--in` spec (static release/next-release vs commit-requiring) + * 3. Resolve to numeric group ID + org via `resolveIssue` + * 4. For commit specs, resolve `{commit, repository}` via git + repo cache + * 5. `updateIssueStatus(issueId, "resolved", { statusDetails, orgSlug })` + * 6. Emit the updated issue + */ + +import type { SentryContext } from "../../context.js"; +import { + parseResolveSpec, + RESOLVE_COMMIT_EXPLICIT_PREFIX, + RESOLVE_COMMIT_SENTINEL, + RESOLVE_NEXT_RELEASE_SENTINEL, + type ResolveStatusDetails, + updateIssueStatus, +} from "../../lib/api-client.js"; +import { buildCommand } from "../../lib/command.js"; +import { ContextError } from "../../lib/errors.js"; +import { formatIssueDetails, muted } from "../../lib/formatters/index.js"; +import { CommandOutput } from "../../lib/formatters/output.js"; +import { logger } from "../../lib/logger.js"; +import type { SentryIssue } from "../../types/index.js"; +import { resolveCommitSpec } from "./resolve-commit-spec.js"; +import { issueIdPositional, resolveIssue } from "./utils.js"; + +const log = logger.withTag("issue.resolve"); + +/** Subcommand name passed to resolveIssue for error-hint construction. */ +const COMMAND = "resolve"; + +type ResolveFlags = { + readonly json: boolean; + readonly fields?: string[]; + readonly in?: string; +}; + +/** + * Wrapped result emitted by the command — carries the `in` spec so the + * human formatter can surface it ("Resolved in 0.26.1", "Resolved in next + * release", etc.) without re-parsing. + */ +type ResolveResult = { + issue: SentryIssue; + spec: ResolveStatusDetails | null; +}; + +/** + * Describe the resolution spec for the human-readable output footer. + * + * Exhaustive over {@link ResolveStatusDetails} variants — any future + * variant added to the union triggers a TypeScript error at the + * `never`-typed `_exhaustive` assignment, preventing silent + * misclassification. + */ +function describeSpec(spec: ResolveStatusDetails | null): string { + if (!spec) { + return "immediately"; + } + if ("inRelease" in spec) { + return `in release '${spec.inRelease}'`; + } + if ("inCommit" in spec) { + return `in commit ${spec.inCommit.commit.slice(0, 12)} (repo '${spec.inCommit.repository}')`; + } + if ("inNextRelease" in spec) { + return "in the next release"; + } + // Exhaustiveness check — TypeScript will error here if a new variant + // is added to ResolveStatusDetails without a matching branch above. + const _exhaustive: never = spec; + return _exhaustive; +} + +function formatResolved(result: ResolveResult): string { + const { issue, spec } = result; + const head = `${muted("Resolved")} ${describeSpec(spec)}`; + return `${head}\n\n${formatIssueDetails(issue)}`; +} + +function jsonTransform(result: ResolveResult): unknown { + return { ...result.issue, resolved_in: result.spec ?? undefined }; +} + +export const resolveCommand = buildCommand({ + docs: { + brief: "Mark an issue as resolved", + fullDescription: + "Resolve an issue, optionally tied to a release or commit.\n\n" + + "Resolution spec (--in / -i):\n" + + ` ${RESOLVE_NEXT_RELEASE_SENTINEL} Resolve in the next release (tied to HEAD)\n` + + ` ${RESOLVE_COMMIT_SENTINEL} Resolve in the current git HEAD — auto-detects repo\n` + + ` ${RESOLVE_COMMIT_EXPLICIT_PREFIX}@ Resolve in an explicit repo + commit (repo must be registered in Sentry)\n` + + " Resolve in this specific release (e.g., 0.26.1, spotlight@1.2.3)\n" + + " (omitted) Resolve immediately (no regression tracking)\n\n" + + "@commit auto-detection requires a git repository whose 'origin' remote\n" + + "maps to a Sentry-registered repo. The command errors out clearly if any\n" + + "part of the detection fails — use the explicit form to override.\n\n" + + "Examples:\n" + + " sentry issue resolve CLI-12Z\n" + + " sentry issue resolve CLI-12Z --in 0.26.1\n" + + " sentry issue resolve CLI-196 --in @next\n" + + " sentry issue resolve CLI-XX --in @commit\n" + + " sentry issue resolve CLI-XX -i @commit:getsentry/cli@abc123\n" + + " sentry issue resolve my-org/CLI-AB", + }, + output: { + human: formatResolved, + jsonTransform, + }, + parameters: { + positional: issueIdPositional, + flags: { + in: { + kind: "parsed", + parse: String, + brief: `Resolve in a release, next release, or commit ('' | '${RESOLVE_NEXT_RELEASE_SENTINEL}' | '${RESOLVE_COMMIT_SENTINEL}' | '${RESOLVE_COMMIT_EXPLICIT_PREFIX}@')`, + optional: true, + }, + }, + aliases: { + i: "in", + }, + }, + async *func(this: SentryContext, flags: ResolveFlags, issueArg: string) { + const { cwd } = this; + const parsed = parseResolveSpec(flags.in); + + const { org, issue } = await resolveIssue({ + issueArg, + cwd, + command: COMMAND, + }); + + // Static specs (release / next-release / omitted) are ready to send. + // Commit specs need git + Sentry-repo lookup before they become a + // concrete { inCommit: { commit, repository } } payload. + let statusDetails: ResolveStatusDetails | undefined; + if (parsed?.kind === "static") { + statusDetails = parsed.details; + } else if (parsed?.kind === "commit") { + if (!org) { + // Sentry's InCommit validator looks up the repo by name within the + // issue's org — we can't resolve the commit without knowing the org. + throw new ContextError( + "Organization", + "sentry issue resolve / --in @commit", + [], + "--in @commit needs an organization context to look up the Sentry repo registry." + ); + } + const resolved = await resolveCommitSpec(parsed.spec, org, cwd); + statusDetails = { inCommit: resolved }; + } + + const updated = await updateIssueStatus(issue.id, "resolved", { + statusDetails, + orgSlug: org, + }); + + log.debug( + `Resolved ${updated.shortId} ${describeSpec(statusDetails ?? null)}` + ); + yield new CommandOutput({ + issue: updated, + spec: statusDetails ?? null, + }); + }, +}); diff --git a/src/commands/issue/unresolve.ts b/src/commands/issue/unresolve.ts new file mode 100644 index 000000000..07775118b --- /dev/null +++ b/src/commands/issue/unresolve.ts @@ -0,0 +1,61 @@ +/** + * sentry issue unresolve (aliased: reopen) + * + * Move an issue back to the "unresolved" state. Inverse of `issue resolve`. + */ + +import type { SentryContext } from "../../context.js"; +import { updateIssueStatus } from "../../lib/api-client.js"; +import { buildCommand } from "../../lib/command.js"; +import { formatIssueDetails, muted } from "../../lib/formatters/index.js"; +import { CommandOutput } from "../../lib/formatters/output.js"; +import { logger } from "../../lib/logger.js"; +import type { SentryIssue } from "../../types/index.js"; +import { issueIdPositional, resolveIssue } from "./utils.js"; + +const log = logger.withTag("issue.unresolve"); + +const COMMAND = "unresolve"; + +type UnresolveFlags = { + readonly json: boolean; + readonly fields?: string[]; +}; + +function formatUnresolved(issue: SentryIssue): string { + return `${muted("Reopened")}\n\n${formatIssueDetails(issue)}`; +} + +export const unresolveCommand = buildCommand({ + docs: { + brief: "Reopen a resolved issue", + fullDescription: + "Mark an issue as unresolved. Inverse of `sentry issue resolve`.\n\n" + + "Examples:\n" + + " sentry issue unresolve CLI-12Z\n" + + " sentry issue reopen CLI-12Z\n" + + " sentry issue unresolve my-org/CLI-AB", + }, + output: { + human: formatUnresolved, + }, + parameters: { + positional: issueIdPositional, + }, + async *func(this: SentryContext, _flags: UnresolveFlags, issueArg: string) { + const { cwd } = this; + + const { org, issue } = await resolveIssue({ + issueArg, + cwd, + command: COMMAND, + }); + + const updated = await updateIssueStatus(issue.id, "unresolved", { + orgSlug: org, + }); + + log.debug(`Reopened ${updated.shortId}`); + yield new CommandOutput(updated); + }, +}); diff --git a/src/lib/api-client.ts b/src/lib/api-client.ts index 1e3e6170b..957964c70 100644 --- a/src/lib/api-client.ts +++ b/src/lib/api-client.ts @@ -57,6 +57,15 @@ export { type IssuesPage, listIssuesAllPages, listIssuesPaginated, + type MergeIssuesResult, + mergeIssues, + type ParsedResolveSpec, + parseResolveSpec, + RESOLVE_COMMIT_EXPLICIT_PREFIX, + RESOLVE_COMMIT_SENTINEL, + RESOLVE_NEXT_RELEASE_SENTINEL, + type ResolveCommitSpec, + type ResolveStatusDetails, tryGetIssueByShortId, updateIssueStatus, } from "./api/issues.js"; @@ -107,7 +116,9 @@ export { updateRelease, } from "./api/releases.js"; export { + listAllRepositories, listRepositories, + listRepositoriesCached, listRepositoriesPaginated, } from "./api/repositories.js"; export { diff --git a/src/lib/api/infrastructure.ts b/src/lib/api/infrastructure.ts index b5b57385d..f3a0f05b7 100644 --- a/src/lib/api/infrastructure.ts +++ b/src/lib/api/infrastructure.ts @@ -314,6 +314,21 @@ export async function apiRequestToRegion( ); } + // 204 No Content / 205 Reset Content have no body by spec — calling + // response.json() on them throws SyntaxError. Callers that expect a + // body on success receive a clear ApiError here instead of crashing + // downstream on `data.`. Callers that expect 204 (e.g. the + // bulk mutate endpoint returns 204 when no IDs match) should catch + // this ApiError and handle it explicitly. + if (response.status === 204 || response.status === 205) { + throw new ApiError( + `API returned ${response.status} ${response.statusText} (no body)`, + response.status, + "The server returned no content — the request may have matched no records.", + endpoint + ); + } + const data = await response.json(); if (schema) { diff --git a/src/lib/api/issues.ts b/src/lib/api/issues.ts index 87a872fd2..9b3200ff7 100644 --- a/src/lib/api/issues.ts +++ b/src/lib/api/issues.ts @@ -10,7 +10,7 @@ import { listAnOrganization_sIssues } from "@sentry/api"; import type { SentryIssue } from "../../types/index.js"; import { applyCustomHeaders } from "../custom-headers.js"; -import { ApiError } from "../errors.js"; +import { ApiError, ValidationError } from "../errors.js"; import { resolveOrgRegion } from "../region.js"; import { @@ -394,21 +394,242 @@ export async function tryGetIssueByShortId( } } +/** + * Resolution-release tracking for {@link updateIssueStatus}. Maps to the + * `statusDetails` shape expected by Sentry's bulk mutate endpoint: + * + * - `inRelease` — resolve in a specific named release (e.g. `"0.26.1"`). + * Future events seen on releases **after** this one will regression-flag. + * - `inNextRelease: true` — resolve in the next release after the current + * commit. Commonly used when the fix is merged but not yet tagged. + * - `inCommit` — resolve tied to a commit in a Sentry-registered repo. + * Sentry resolves when a release containing the commit is created. Both + * `commit` (SHA) and `repository` (Sentry-registered repo name) are + * required by the API's InCommitValidator. + */ +export type ResolveStatusDetails = + | { inRelease: string } + | { inNextRelease: true } + | { inCommit: { commit: string; repository: string } }; + +/** + * Sentinel string meaning "resolve in the next release (tied to HEAD)". + * Chosen to never clash with a real version string — `@` is not a valid + * character in semver or Sentry release slugs. + */ +export const RESOLVE_NEXT_RELEASE_SENTINEL = "@next"; + +/** + * Bare sentinel meaning "resolve at the current git HEAD, auto-detecting + * the Sentry-registered repo from the git origin URL". The command layer + * resolves this into a concrete `{inCommit: {...}}` before calling the API. + */ +export const RESOLVE_COMMIT_SENTINEL = "@commit"; + +/** + * Prefix for the explicit commit form: `@commit:@`. + * Kept unambiguous against monorepo release strings like `pkg@1.2.3` by + * requiring the full `@commit:` anchor at the start. + */ +export const RESOLVE_COMMIT_EXPLICIT_PREFIX = "@commit:"; + +/** + * Parsed representation of the `@commit` spec family — either an + * "auto-detect from HEAD" request or an explicit repo + SHA pair. + * + * Auto-detection needs git + an API call, so the CLI layer converts this + * into `ResolveStatusDetails` asynchronously via a separate resolver. + */ +export type ResolveCommitSpec = + | { kind: "auto" } + | { kind: "explicit"; repository: string; commit: string }; + +/** + * Parsed representation of the fully-static portion of the `--in` grammar. + * Static specs ({@link ResolveStatusDetails}) are ready to ship to the API; + * commit specs ({@link ResolveCommitSpec}) need further resolution through + * git and the Sentry repo list before they become `{inCommit: ...}`. + */ +export type ParsedResolveSpec = + | { kind: "static"; details: ResolveStatusDetails } + | { kind: "commit"; spec: ResolveCommitSpec }; + +/** + * Parse an `--in` resolution-spec string into its structured form. + * + * Grammar (see command docs): + * + * - `@next` → `{kind: "static", details: {inNextRelease: true}}` + * - `@commit` → `{kind: "commit", spec: {kind: "auto"}}` + * - `@commit:@` → `{kind: "commit", spec: {kind: "explicit", ...}}` + * - anything else → `{kind: "static", details: {inRelease: }}` + * + * The explicit commit form requires a `@` payload after the + * `@commit:` anchor. Monorepo release strings like `pkg@1.2.3` are + * unambiguously treated as releases because they lack the `@commit:` prefix. + * + * Empty/whitespace-only input returns `null` (treated as "no spec" by the + * caller, which resolves immediately without release tracking). + * + * @throws {ValidationError} When `@commit:` prefix is given without a + * well-formed `@` payload. + */ +export function parseResolveSpec( + spec: string | undefined +): ParsedResolveSpec | null { + if (!spec) { + return null; + } + const trimmed = spec.trim(); + if (!trimmed) { + return null; + } + // Sentinel matches are case-insensitive (`@NEXT`, `@Commit`, etc.) so + // typos and mixed-case variants don't silently fall through to + // inRelease. But the payload after `@commit:` keeps its original case, + // since repository names and SHAs are case-sensitive. + const lower = trimmed.toLowerCase(); + if (lower === RESOLVE_NEXT_RELEASE_SENTINEL) { + return { kind: "static", details: { inNextRelease: true } }; + } + if (lower === RESOLVE_COMMIT_SENTINEL) { + return { kind: "commit", spec: { kind: "auto" } }; + } + if (lower.startsWith(RESOLVE_COMMIT_EXPLICIT_PREFIX)) { + const payload = trimmed.slice(RESOLVE_COMMIT_EXPLICIT_PREFIX.length); + // Split on the LAST `@` so repo names containing `@` (rare but legal + // for scoped npm-style names like `@acme/web`) resolve correctly. + const splitIdx = payload.lastIndexOf("@"); + if (splitIdx <= 0 || splitIdx === payload.length - 1) { + throw new ValidationError( + `Invalid --in spec: '${spec}' — expected '${RESOLVE_COMMIT_EXPLICIT_PREFIX}@'.`, + "in" + ); + } + const repository = payload.slice(0, splitIdx).trim(); + const commit = payload.slice(splitIdx + 1).trim(); + if (!(repository && commit)) { + throw new ValidationError( + `Invalid --in spec: '${spec}' — repo and SHA must both be non-empty.`, + "in" + ); + } + return { kind: "commit", spec: { kind: "explicit", repository, commit } }; + } + // Anything else starting with `@` is almost certainly a mistyped + // sentinel — reject with a clear message instead of silently creating + // a release named (e.g.) "@netx" that the user didn't intend. + if (trimmed.startsWith("@")) { + throw new ValidationError( + `Invalid --in spec: '${spec}' is not a recognized sentinel.\n\n` + + `Expected '${RESOLVE_NEXT_RELEASE_SENTINEL}', '${RESOLVE_COMMIT_SENTINEL}', or '${RESOLVE_COMMIT_EXPLICIT_PREFIX}@'.\n` + + "If you meant a literal release name, it cannot start with '@'.", + "in" + ); + } + return { kind: "static", details: { inRelease: trimmed } }; +} + /** * Update an issue's status. + * + * When `status === "resolved"`, optional `statusDetails` can pin the fix + * to a release or commit (see {@link ResolveStatusDetails}). Without + * `statusDetails`, the issue is resolved immediately with no regression + * tracking — equivalent to clicking "Resolve" in the Sentry UI. + * + * When `options.orgSlug` is provided, the request is routed to that org's + * region via the org-scoped endpoint. Without it, falls back to the legacy + * global `/issues/{id}/` endpoint (works but not region-aware). */ -export function updateIssueStatus( +export async function updateIssueStatus( issueId: string, - status: "resolved" | "unresolved" | "ignored" + status: "resolved" | "unresolved" | "ignored", + options?: { + statusDetails?: ResolveStatusDetails; + orgSlug?: string; + } ): Promise { - // Use raw request - the SDK's updateAnIssue requires org slug but - // the legacy /issues/{id}/ endpoint works without it - return apiRequest(`/issues/${issueId}/`, { + const body: Record = { status }; + if (options?.statusDetails) { + body.statusDetails = options.statusDetails; + } + if (options?.orgSlug) { + // Region-aware org-scoped endpoint — preferred when org is known. + const regionUrl = await resolveOrgRegion(options.orgSlug); + const { data } = await apiRequestToRegion( + regionUrl, + `/organizations/${encodeURIComponent(options.orgSlug)}/issues/${encodeURIComponent(issueId)}/`, + { method: "PUT", body } + ); + return data; + } + // Legacy global endpoint — works without org but not region-aware. + return apiRequest(`/issues/${encodeURIComponent(issueId)}/`, { method: "PUT", - body: { status }, + body, }); } +/** Result of a successful issue-merge operation. */ +export type MergeIssuesResult = { + /** Numeric group ID that the merged issues were consolidated into. */ + parent: string; + /** Numeric group IDs that were merged into the parent (excludes parent). */ + children: string[]; +}; + +/** + * Merge multiple issues into a single canonical group. + * + * Sentry auto-picks the canonical parent (typically the largest by event + * count). Future events with fingerprints previously matching any of the + * children will flow into the parent group. + * + * @param orgSlug - Organization slug (required for the bulk mutate endpoint) + * @param groupIds - At least 2 numeric group IDs to merge + * @throws {ApiError} When fewer than 2 IDs are provided, or the API rejects + * (e.g. `"Only error issues can be merged."` for non-error issue types) + */ +export async function mergeIssues( + orgSlug: string, + groupIds: readonly string[] +): Promise { + if (groupIds.length < 2) { + throw new ValidationError( + `Need at least 2 issues to merge (got ${groupIds.length}).` + ); + } + // The bulk mutate endpoint accepts repeated `?id=X` query params plus a + // `{merge: 1}` body. The SDK wraps this but its typed `query` shape + // doesn't expose the array semantics cleanly, so use raw request. + const regionUrl = await resolveOrgRegion(orgSlug); + const query = groupIds.map((id) => `id=${encodeURIComponent(id)}`).join("&"); + const path = `/organizations/${encodeURIComponent(orgSlug)}/issues/?${query}`; + type MergeResponse = { merge: MergeIssuesResult }; + try { + const { data } = await apiRequestToRegion(regionUrl, path, { + method: "PUT", + body: { merge: 1 }, + }); + return data.merge; + } catch (error) { + // The bulk-mutate endpoint returns 204 when no matching issues are + // found — e.g. IDs out of scope, or issues deleted between resolution + // and the merge call. Catch the generic "no body" ApiError and re-throw + // with a friendlier user-facing message. + if (error instanceof ApiError && error.status === 204) { + throw new ApiError( + `No matching issues found for merge in '${orgSlug}'.`, + 204, + "All provided issue IDs are out of scope or no longer exist.", + path + ); + } + throw error; + } +} + /** * Resolve a share ID to basic issue data via the public share endpoint. * diff --git a/src/lib/api/repositories.ts b/src/lib/api/repositories.ts index e91942e34..bbb0ffcbf 100644 --- a/src/lib/api/repositories.ts +++ b/src/lib/api/repositories.ts @@ -7,17 +7,26 @@ import { listAnOrganization_sRepositories } from "@sentry/api"; import type { SentryRepository } from "../../types/index.js"; +import { getCachedRepos, setCachedRepos } from "../db/repo-cache.js"; +import { logger } from "../logger.js"; import { + API_MAX_PER_PAGE, getOrgSdkConfig, + MAX_PAGINATION_PAGES, type PaginatedResponse, unwrapPaginatedResult, unwrapResult, } from "./infrastructure.js"; +const log = logger.withTag("api.repositories"); + /** * List repositories in an organization. * Uses region-aware routing for multi-region support. + * + * Returns a single unpaginated page (typically 25 items). For the full + * list, use {@link listAllRepositories} — it walks every page. */ export async function listRepositories( orgSlug: string @@ -65,3 +74,74 @@ export async function listRepositoriesPaginated( "Failed to list repositories" ); } + +/** + * List **all** repositories in an organization by walking every page. + * + * Used by the offline repo cache and anywhere else we need the complete + * set (not just the first page). Stops at {@link MAX_PAGINATION_PAGES} + * as a safety net for pathological cases. + * + * @param orgSlug - Organization slug + * @returns All Sentry-registered repositories across all pages + */ +export async function listAllRepositories( + orgSlug: string +): Promise { + const all: SentryRepository[] = []; + let cursor: string | undefined; + for (let page = 0; page < MAX_PAGINATION_PAGES; page++) { + const { data, nextCursor } = await listRepositoriesPaginated(orgSlug, { + cursor, + perPage: API_MAX_PER_PAGE, + }); + all.push(...data); + if (!nextCursor) { + return all; + } + cursor = nextCursor; + } + log.warn( + `Stopped paginating repositories for '${orgSlug}' after ${MAX_PAGINATION_PAGES} pages — ` + + "some repos may be missing from the cache." + ); + return all; +} + +/** + * List repositories in an organization, preferring the offline cache. + * + * On cache hit: returns the stored list immediately (no network). + * On cache miss (or stale): refetches the **complete** list via + * {@link listAllRepositories} (walks all pages) and refreshes the cache. + * Network/API failures bubble up — callers should decide how to handle + * them (typically a hard error, since repo resolution has no other + * source of truth). + * + * The cache write is wrapped in try/catch so a read-only or corrupted + * SQLite database doesn't crash a command whose primary API fetch + * already succeeded — following the project's established cache-write + * resilience pattern. + * + * @param orgSlug - Organization slug + * @returns All Sentry-registered repositories for the org + */ +export async function listRepositoriesCached( + orgSlug: string +): Promise { + const cached = getCachedRepos(orgSlug); + if (cached) { + return cached; + } + const fresh = await listAllRepositories(orgSlug); + try { + setCachedRepos(orgSlug, fresh); + } catch (error) { + // Non-essential: the primary API fetch already succeeded. A read-only + // DB or transient write failure shouldn't fail the whole command. + log.debug( + `Could not persist repo cache for '${orgSlug}': ${error instanceof Error ? error.message : String(error)}` + ); + } + return fresh; +} diff --git a/src/lib/complete.ts b/src/lib/complete.ts index 2f2c2786a..f0b947f14 100644 --- a/src/lib/complete.ts +++ b/src/lib/complete.ts @@ -93,6 +93,9 @@ export const ORG_PROJECT_COMMANDS = new Set([ "issue view", "issue explain", "issue plan", + "issue resolve", + "issue unresolve", + "issue merge", "project list", "project view", "project delete", diff --git a/src/lib/db/repo-cache.ts b/src/lib/db/repo-cache.ts new file mode 100644 index 000000000..dcce5c99f --- /dev/null +++ b/src/lib/db/repo-cache.ts @@ -0,0 +1,95 @@ +/** + * Cached Sentry repository list (per org). + * + * Powers `issue resolve --in @commit` — avoids a `GET /organizations/{org}/repos/` + * round trip on every invocation when the cache is fresh. The cache stores + * the entire repo list as a JSON blob since the typical lookup pattern is + * "match git origin → find one repo", not "get one repo by ID". + * + * TTL matches other caches (~7 days via {@link CACHE_TTL_MS}). A stale + * cache is refreshed on the next call path that hits the API anyway. + */ + +import type { SentryRepository } from "../../types/index.js"; +import { recordCacheHit } from "../telemetry.js"; +import { getDatabase } from "./index.js"; +import { runUpsert } from "./utils.js"; + +/** + * How long cached repo lists are considered fresh. Kept shorter than the + * project cache (30 days) because repo-to-integration links change more + * often than project listings do. + */ +export const REPO_CACHE_TTL_MS = 7 * 24 * 60 * 60 * 1000; + +type RepoCacheRow = { + org_slug: string; + repos_json: string; + cached_at: number; +}; + +/** + * Fetch the cached repo list for an org. Returns `null` when no cache + * entry exists or the entry is older than {@link REPO_CACHE_TTL_MS}. + * + * Unparseable JSON (from a corrupted or stale schema) is treated as a + * cache miss — the caller refetches and the broken row is overwritten. + */ +export function getCachedRepos(orgSlug: string): SentryRepository[] | null { + const db = getDatabase(); + const row = db + .query("SELECT * FROM repo_cache WHERE org_slug = ?") + .get(orgSlug) as RepoCacheRow | undefined; + + if (!row) { + recordCacheHit("repo", false); + return null; + } + + const age = Date.now() - row.cached_at; + if (age > REPO_CACHE_TTL_MS) { + recordCacheHit("repo", false); + return null; + } + + try { + const repos = JSON.parse(row.repos_json) as SentryRepository[]; + if (!Array.isArray(repos)) { + recordCacheHit("repo", false); + return null; + } + recordCacheHit("repo", true); + return repos; + } catch { + // Corrupted cache — treat as miss; overwritten on next setCachedRepos. + recordCacheHit("repo", false); + return null; + } +} + +/** + * Upsert the cached repo list for an org. Overwrites the previous entry + * (there's only ever one row per org). + */ +export function setCachedRepos( + orgSlug: string, + repos: SentryRepository[] +): void { + const db = getDatabase(); + runUpsert( + db, + "repo_cache", + { + org_slug: orgSlug, + repos_json: JSON.stringify(repos), + cached_at: Date.now(), + }, + ["org_slug"] + ); +} + +/** Clear the cached repo list for one org (for tests and manual refresh). */ +export function clearCachedRepos(orgSlug: string): void { + const db = getDatabase(); + db.query("DELETE FROM repo_cache WHERE org_slug = ?").run(orgSlug); +} diff --git a/src/lib/db/schema.ts b/src/lib/db/schema.ts index a54b02729..df4ca95bd 100644 --- a/src/lib/db/schema.ts +++ b/src/lib/db/schema.ts @@ -16,7 +16,7 @@ import { getEnv } from "../env.js"; import { stringifyUnknown } from "../errors.js"; import { logger } from "../logger.js"; -export const CURRENT_SCHEMA_VERSION = 13; +export const CURRENT_SCHEMA_VERSION = 14; /** Environment variable to disable auto-repair */ const NO_AUTO_REPAIR_ENV = "SENTRY_CLI_NO_AUTO_REPAIR"; @@ -192,6 +192,22 @@ export const TABLE_SCHEMAS: Record = { }, }, }, + repo_cache: { + columns: { + // Composite PK (org_slug) — one row per org caches the full repo list + org_slug: { type: "TEXT", primaryKey: true }, + // JSON array of SentryRepository — stored as blob since we re-hydrate + // the whole list on cache hit (no per-repo lookups). Keeps the cache + // simple and matches the typical usage pattern (match git origin → + // find one repo by name/url). + repos_json: { type: "TEXT", notNull: true }, + cached_at: { + type: "INTEGER", + notNull: true, + default: "(unixepoch() * 1000)", + }, + }, + }, project_root_cache: { columns: { cwd: { type: "TEXT", primaryKey: true }, @@ -782,6 +798,13 @@ export function runMigrations(db: Database): void { db.exec("DROP TABLE defaults"); } + // Migration 13 -> 14: Add repo_cache table for offline Sentry repository + // lookups (used by `issue resolve --in @commit` to match git origin → + // Sentry-registered repo without an extra API round trip). + if (currentVersion < 14) { + db.exec(EXPECTED_TABLES.repo_cache as string); + } + if (currentVersion < CURRENT_SCHEMA_VERSION) { db.query("UPDATE schema_version SET version = ?").run( CURRENT_SCHEMA_VERSION diff --git a/test/commands/issue/merge.func.test.ts b/test/commands/issue/merge.func.test.ts new file mode 100644 index 000000000..8d117e94a --- /dev/null +++ b/test/commands/issue/merge.func.test.ts @@ -0,0 +1,493 @@ +/** + * Issue Merge Command Tests + * + * Tests for `sentry issue merge` func() body — arg validation, cross-org + * rejection, --into parent selection, and API call shape. + */ + +import { + afterEach, + beforeEach, + describe, + expect, + mock, + spyOn, + test, +} from "bun:test"; +import { mergeCommand } from "../../../src/commands/issue/merge.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as issueUtils from "../../../src/commands/issue/utils.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as apiClient from "../../../src/lib/api-client.js"; +import { + ApiError, + AuthError, + ResolutionError, +} from "../../../src/lib/errors.js"; +import type { SentryIssue } from "../../../src/types/sentry.js"; + +function makeMockIssue(overrides?: Partial): SentryIssue { + return { + id: "100", + shortId: "CLI-A", + title: "Boom", + culprit: "handler", + count: "10", + userCount: 1, + firstSeen: "2026-03-01T00:00:00Z", + lastSeen: "2026-04-03T12:00:00Z", + level: "error", + status: "unresolved", + permalink: "https://sentry.io/organizations/test-org/issues/100/", + project: { id: "1", slug: "cli", name: "cli" }, + ...overrides, + } as SentryIssue; +} + +function createMockContext() { + const stdoutWrite = mock(() => true); + const stderrWrite = mock(() => true); + return { + context: { + stdout: { write: stdoutWrite }, + stderr: { write: stderrWrite }, + cwd: "/tmp", + }, + stdoutWrite, + stderrWrite, + }; +} + +describe("mergeCommand.func()", () => { + let resolveIssueSpy: ReturnType; + let mergeSpy: ReturnType; + + beforeEach(() => { + resolveIssueSpy = spyOn(issueUtils, "resolveIssue"); + mergeSpy = spyOn(apiClient, "mergeIssues"); + }); + + afterEach(() => { + resolveIssueSpy.mockRestore(); + mergeSpy.mockRestore(); + }); + + test("rejects when fewer than 2 issues are provided", async () => { + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false }, "CLI-A") + .catch((e: Error) => e); + + expect(err).toBeInstanceOf(Error); + expect(err.message).toContain("needs at least 2 issue IDs"); + expect(mergeSpy).not.toHaveBeenCalled(); + }); + + test("rejects issues where org could not be determined", async () => { + // One issue resolves without org (e.g. bare numeric ID, no DSN match), + // the other has a known org — without the guard, the merge would + // silently proceed to the known org and fail with a confusing API error. + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => { + if (issueArg === "CLI-A") { + return Promise.resolve({ + org: "my-org", + issue: makeMockIssue({ shortId: "CLI-A", id: "10A" }), + }); + } + return Promise.resolve({ + org: undefined, + issue: makeMockIssue({ shortId: "NO-ORG", id: "999" }), + }); + }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false }, "CLI-A", "999") + .catch((e: Error) => e); + + expect(err.message).toContain("Could not determine the organization"); + expect(err.message).toContain("NO-ORG"); + expect(mergeSpy).not.toHaveBeenCalled(); + }); + + test("rejects cross-org merges with a friendly message", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: issueArg === "CLI-A" ? "org-one" : "org-two", + issue: makeMockIssue({ shortId: issueArg, id: issueArg }), + }) + ); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false }, "CLI-A", "CLI-B") + .catch((e: Error) => e); + + expect(err.message).toContain("Cannot merge issues across organizations"); + expect(mergeSpy).not.toHaveBeenCalled(); + }); + + test("calls mergeIssues with the resolved numeric IDs", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }) + ); + mergeSpy.mockResolvedValue({ parent: "10A", children: ["10B", "10C"] }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + await func.call(context, { json: false }, "CLI-A", "CLI-B", "CLI-C"); + + expect(mergeSpy).toHaveBeenCalledWith("test-org", ["10A", "10B", "10C"]); + }); + + test("--into pins the parent by short ID", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }) + ); + mergeSpy.mockResolvedValue({ parent: "10B", children: ["10A", "10C"] }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + // Pass A, B, C but pin B as parent — ordered list sent to API starts with B + await func.call( + context, + { json: false, into: "CLI-B" }, + "CLI-A", + "CLI-B", + "CLI-C" + ); + + const callArgs = mergeSpy.mock.calls[0] as [string, string[]]; + expect(callArgs[0]).toBe("test-org"); + expect(callArgs[1][0]).toBe("10B"); // parent moved to front + expect(new Set(callArgs[1])).toEqual(new Set(["10A", "10B", "10C"])); + }); + + test("--into accepts project-alias suffix (e.g. 'f-g')", async () => { + // When the user passes `f-g` as --into, it doesn't match any shortId + // on the direct-match fast path, so the command falls back to + // resolveIssue and then matches by numeric ID. + let callIdx = 0; + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => { + callIdx += 1; + // Positional args resolve to CLI-A / CLI-B + if (issueArg === "CLI-A" || issueArg === "CLI-B") { + return Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }); + } + // --into 'f-g' resolves (alias lookup) to CLI-B + if (issueArg === "f-g") { + return Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ shortId: "CLI-B", id: "10B" }), + }); + } + return Promise.reject(new Error(`unexpected issueArg: ${issueArg}`)); + }); + mergeSpy.mockResolvedValue({ parent: "10B", children: ["10A"] }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + await func.call(context, { json: false, into: "f-g" }, "CLI-A", "CLI-B"); + + const callArgs = mergeSpy.mock.calls[0] as [string, string[]]; + // Parent (10B) moved to front of the merge call + expect(callArgs[1][0]).toBe("10B"); + // Three resolve calls total: 2 positional + 1 alias fallback + expect(callIdx).toBeGreaterThanOrEqual(3); + }); + + test("--into accepts org-qualified short ID", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + // resolveIssue returns bare short ID even if arg was org-qualified + shortId: issueArg.split("/").pop() as string, + id: (issueArg.split("/").pop() as string).replace("CLI-", "10"), + }), + }) + ); + mergeSpy.mockResolvedValue({ parent: "10B", children: ["10A", "10C"] }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + // User passes org-qualified form to --into; should still match. + await func.call( + context, + { json: false, into: "my-org/CLI-B" }, + "CLI-A", + "CLI-B", + "CLI-C" + ); + + const callArgs = mergeSpy.mock.calls[0] as [string, string[]]; + expect(callArgs[1][0]).toBe("10B"); // parent still moved to front + }); + + test("--into rejects a value that doesn't match any provided issue", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }) + ); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false, into: "CLI-XYZ" }, "CLI-A", "CLI-B") + .catch((e: Error) => e); + + expect(err.message).toContain( + "--into 'CLI-XYZ' did not match any of the provided issues" + ); + expect(mergeSpy).not.toHaveBeenCalled(); + }); + + test("JSON output maps numeric IDs back to short IDs with URL", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }) + ); + mergeSpy.mockResolvedValue({ parent: "10A", children: ["10B"] }); + + const { context, stdoutWrite } = createMockContext(); + const func = await mergeCommand.loader(); + await func.call(context, { json: true }, "CLI-A", "CLI-B"); + + const output = stdoutWrite.mock.calls.map((c) => String(c[0])).join(""); + const parsed = JSON.parse(output) as { + org: string; + parent: { shortId: string; id: string; url: string }; + children: { shortId: string; id: string }[]; + }; + expect(parsed.org).toBe("test-org"); + expect(parsed.parent.shortId).toBe("CLI-A"); + expect(parsed.parent.id).toBe("10A"); + // URL surfaces the canonical parent so users can click through. + expect(parsed.parent.url).toContain("test-org"); + expect(parsed.parent.url).toContain("10A"); + expect(parsed.children).toEqual([{ shortId: "CLI-B", id: "10B" }]); + }); + + test("warns when --into preference is overridden by Sentry", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }) + ); + // User asked for CLI-B, but Sentry picked CLI-A (e.g. larger by count) + mergeSpy.mockResolvedValue({ parent: "10A", children: ["10B"] }); + + const { context, stderrWrite } = createMockContext(); + const func = await mergeCommand.loader(); + await func.call(context, { json: false, into: "CLI-B" }, "CLI-A", "CLI-B"); + + const stderr = stderrWrite.mock.calls.map((c) => String(c[0])).join(""); + expect(stderr).toContain("--into 'CLI-B' was a preference"); + expect(stderr).toContain("CLI-A as the canonical parent"); + }); + + test("does not warn when --into preference was honored", async () => { + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }) + ); + // User asked for CLI-B, Sentry agreed. + mergeSpy.mockResolvedValue({ parent: "10B", children: ["10A"] }); + + const { context, stderrWrite } = createMockContext(); + const func = await mergeCommand.loader(); + await func.call(context, { json: false, into: "CLI-B" }, "CLI-A", "CLI-B"); + + const stderr = stderrWrite.mock.calls.map((c) => String(c[0])).join(""); + expect(stderr).not.toContain("--into"); + }); + + test("rejects duplicate issue IDs after resolution (same issue in multiple forms)", async () => { + // User passes `CLI-A` and `100` which both resolve to the same numeric + // group id — without the dedupe guard, we'd send `?id=100&id=100` to + // Sentry and get back a confusing 204 → "no matching issues" error. + resolveIssueSpy.mockImplementation(() => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ shortId: "CLI-A", id: "100" }), + }) + ); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false }, "CLI-A", "100") + .catch((e: Error) => e); + + expect(err.message).toContain("at least 2 distinct issues"); + expect(err.message).toContain("CLI-A"); + expect(mergeSpy).not.toHaveBeenCalled(); + }); + + test("--into propagates auth errors instead of masking them as 'not found'", async () => { + // Fast-path direct match won't find CLI-XYZ (not among provided), so + // we fall back to resolveIssue. When that throws AuthError, the error + // must propagate — not be masked as the generic "did not match" + // message, which would be misleading during an outage or expired token. + let callIdx = 0; + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => { + callIdx += 1; + if (callIdx <= 2) { + // First two calls resolve positional args normally + return Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }); + } + // The --into fallback call throws an auth error + return Promise.reject(new AuthError("invalid")); + }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false, into: "CLI-XYZ" }, "CLI-A", "CLI-B") + .catch((e: Error) => e); + + // AuthError bubbles up (not the misleading "did not match" error) + expect(err).toBeInstanceOf(AuthError); + expect(mergeSpy).not.toHaveBeenCalled(); + }); + + test("--into propagates 5xx ApiError instead of masking", async () => { + let callIdx = 0; + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => { + callIdx += 1; + if (callIdx <= 2) { + return Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }); + } + return Promise.reject(new ApiError("Internal error", 500)); + }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false, into: "CLI-XYZ" }, "CLI-A", "CLI-B") + .catch((e: Error) => e); + + expect(err).toBeInstanceOf(ApiError); + expect((err as ApiError).status).toBe(500); + }); + + test("--into swallows ResolutionError as clean not-found", async () => { + // Opposite of the above: when resolveIssue cleanly fails with + // ResolutionError (or a 404 ApiError), we should fall through to + // the 'did not match any of the provided issues' ValidationError. + let callIdx = 0; + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => { + callIdx += 1; + if (callIdx <= 2) { + return Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg, + id: issueArg.replace("CLI-", "10"), + }), + }); + } + return Promise.reject( + new ResolutionError("Issue 'XYZ'", "not found", "sentry issue view XYZ") + ); + }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + const err = await func + .call(context, { json: false, into: "XYZ" }, "CLI-A", "CLI-B") + .catch((e: Error) => e); + + // Should be the friendly "did not match" error, not the raw + // ResolutionError — the fallback path specifically handles not-found. + expect(err.message).toContain("did not match any of the provided issues"); + expect(err.message).toContain("CLI-A, CLI-B"); + }); + + test("fast-path matches short IDs case-insensitively", async () => { + // User types `cli-b` (lowercase) but short IDs are canonically + // uppercase. Direct match should still succeed without hitting the + // API-fallback path. + resolveIssueSpy.mockImplementation(({ issueArg }: { issueArg: string }) => + Promise.resolve({ + org: "test-org", + issue: makeMockIssue({ + shortId: issueArg.toUpperCase(), + id: issueArg.toUpperCase().replace("CLI-", "10"), + }), + }) + ); + mergeSpy.mockResolvedValue({ parent: "10B", children: ["10A"] }); + + let fallbackCalls = 0; + // Count how many times resolveIssue is called — should be 2 (positional + // only) since the fast-path succeeds. If it were 3, the fallback fired. + const originalImpl = resolveIssueSpy.getMockImplementation(); + resolveIssueSpy.mockImplementation((opts) => { + fallbackCalls += 1; + return originalImpl?.(opts) as ReturnType; + }); + + const { context } = createMockContext(); + const func = await mergeCommand.loader(); + await func.call(context, { json: false, into: "cli-b" }, "CLI-A", "CLI-B"); + + // 2 calls: one per positional arg. The fast path should hit on the + // lowercase `cli-b` → uppercase `CLI-B` comparison, avoiding a 3rd call. + expect(fallbackCalls).toBe(2); + const callArgs = mergeSpy.mock.calls[0] as [string, string[]]; + expect(callArgs[1][0]).toBe("10B"); // parent at front + }); +}); diff --git a/test/commands/issue/resolve-commit-spec.test.ts b/test/commands/issue/resolve-commit-spec.test.ts new file mode 100644 index 000000000..ae4a462d7 --- /dev/null +++ b/test/commands/issue/resolve-commit-spec.test.ts @@ -0,0 +1,187 @@ +/** + * Tests for resolveCommitSpec — converts `@commit` / `@commit:@` + * specs into the concrete `{commit, repository}` payload the Sentry API + * expects. Every failure mode must throw a ValidationError (never silently + * fall back to a different resolution mode). + */ + +import { describe, expect, spyOn, test } from "bun:test"; +import { resolveCommitSpec } from "../../../src/commands/issue/resolve-commit-spec.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as apiClient from "../../../src/lib/api-client.js"; +import { ValidationError } from "../../../src/lib/errors.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as gitLib from "../../../src/lib/git.js"; +import type { SentryRepository } from "../../../src/types/sentry.js"; + +function makeRepo(overrides: Partial): SentryRepository { + return { + id: "1", + name: "getsentry/cli", + url: "https://github.com/getsentry/cli", + provider: { id: "integrations:github", name: "GitHub" }, + status: "active", + externalSlug: "getsentry/cli", + ...overrides, + } as SentryRepository; +} + +describe("resolveCommitSpec — explicit mode", () => { + test("returns {commit, repository} when repo is registered in Sentry", async () => { + const repos = [makeRepo({ name: "getsentry/cli" })]; + const listSpy = spyOn( + apiClient, + "listRepositoriesCached" + ).mockResolvedValue(repos); + try { + const result = await resolveCommitSpec( + { kind: "explicit", repository: "getsentry/cli", commit: "abc123" }, + "sentry", + "/tmp" + ); + expect(result).toEqual({ + commit: "abc123", + repository: "getsentry/cli", + }); + } finally { + listSpy.mockRestore(); + } + }); + + test("matches on externalSlug as a fallback when name differs", async () => { + const repos = [ + makeRepo({ + name: "Sentry Monolith", + externalSlug: "getsentry/sentry", + }), + ]; + const listSpy = spyOn( + apiClient, + "listRepositoriesCached" + ).mockResolvedValue(repos); + try { + const result = await resolveCommitSpec( + { kind: "explicit", repository: "getsentry/sentry", commit: "abc" }, + "sentry", + "/tmp" + ); + // API expects the canonical `name`, not the externalSlug + expect(result.repository).toBe("Sentry Monolith"); + } finally { + listSpy.mockRestore(); + } + }); + + test("throws ValidationError when repo is not registered in Sentry", async () => { + const listSpy = spyOn( + apiClient, + "listRepositoriesCached" + ).mockResolvedValue([makeRepo({ name: "getsentry/sentry" })]); + try { + await expect( + resolveCommitSpec( + { kind: "explicit", repository: "unknown/repo", commit: "abc" }, + "sentry", + "/tmp" + ) + ).rejects.toBeInstanceOf(ValidationError); + } finally { + listSpy.mockRestore(); + } + }); +}); + +describe("resolveCommitSpec — auto-detect mode", () => { + test("throws when not inside a git work tree", async () => { + const gitSpy = spyOn(gitLib, "isInsideGitWorkTree").mockReturnValue(false); + try { + await expect( + resolveCommitSpec({ kind: "auto" }, "sentry", "/tmp") + ).rejects.toThrow(/requires a git repository/); + } finally { + gitSpy.mockRestore(); + } + }); + + test("throws when HEAD cannot be read", async () => { + const gitSpy = spyOn(gitLib, "isInsideGitWorkTree").mockReturnValue(true); + const headSpy = spyOn(gitLib, "getHeadCommit").mockImplementation(() => { + throw new Error("fresh repo, no commits"); + }); + try { + await expect( + resolveCommitSpec({ kind: "auto" }, "sentry", "/tmp") + ).rejects.toThrow(/could not read HEAD/); + } finally { + gitSpy.mockRestore(); + headSpy.mockRestore(); + } + }); + + test("resolves HEAD + matching Sentry repo (happy path)", async () => { + // Exercises the full success path: work-tree check → HEAD read → + // parseRemoteUrl parses the origin → listRepositoriesCached returns + // a repo whose externalSlug matches → resolved payload returned. + const gitSpy = spyOn(gitLib, "isInsideGitWorkTree").mockReturnValue(true); + const headSpy = spyOn(gitLib, "getHeadCommit").mockReturnValue( + "abc123def456" + ); + // parseRemoteUrl runs on the output of `git remote get-url origin`, + // which resolveCommitSpec fetches internally via execFileSync. We can + // stub parseRemoteUrl to skip the real git call and return a known + // owner/repo. + const parseSpy = spyOn(gitLib, "parseRemoteUrl").mockReturnValue( + "getsentry/cli" + ); + const listSpy = spyOn( + apiClient, + "listRepositoriesCached" + ).mockResolvedValue([ + makeRepo({ name: "getsentry/cli", externalSlug: "getsentry/cli" }), + ]); + + try { + // Use a cwd that actually has a git origin (the repo root) so the + // internal `git remote get-url origin` call succeeds and the stubbed + // parseRemoteUrl takes over from there. + const result = await resolveCommitSpec( + { kind: "auto" }, + "sentry", + process.cwd() + ); + expect(result).toEqual({ + commit: "abc123def456", + repository: "getsentry/cli", + }); + } finally { + gitSpy.mockRestore(); + headSpy.mockRestore(); + parseSpy.mockRestore(); + listSpy.mockRestore(); + } + }); +}); + +describe("resolveCommitSpec — error messages are actionable", () => { + test("explicit-mode miss lists available repos to help the user correct", async () => { + const listSpy = spyOn( + apiClient, + "listRepositoriesCached" + ).mockResolvedValue([ + makeRepo({ name: "getsentry/cli" }), + makeRepo({ name: "getsentry/sentry" }), + ]); + try { + const err = await resolveCommitSpec( + { kind: "explicit", repository: "typo/repo", commit: "abc" }, + "sentry", + "/tmp" + ).catch((e: Error) => e); + expect(err).toBeInstanceOf(ValidationError); + expect(err.message).toContain("getsentry/cli"); + expect(err.message).toContain("getsentry/sentry"); + } finally { + listSpy.mockRestore(); + } + }); +}); diff --git a/test/commands/issue/resolve.func.test.ts b/test/commands/issue/resolve.func.test.ts new file mode 100644 index 000000000..2196d950a --- /dev/null +++ b/test/commands/issue/resolve.func.test.ts @@ -0,0 +1,272 @@ +/** + * Issue Resolve Command Tests + * + * Tests for `sentry issue resolve` and `sentry issue unresolve` func() bodies. + */ + +import { + afterEach, + beforeEach, + describe, + expect, + mock, + spyOn, + test, +} from "bun:test"; +import { resolveCommand } from "../../../src/commands/issue/resolve.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as commitSpec from "../../../src/commands/issue/resolve-commit-spec.js"; +import { unresolveCommand } from "../../../src/commands/issue/unresolve.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as issueUtils from "../../../src/commands/issue/utils.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as apiClient from "../../../src/lib/api-client.js"; +import type { SentryIssue } from "../../../src/types/sentry.js"; + +function makeMockIssue(overrides?: Partial): SentryIssue { + return { + id: "123456789", + shortId: "CLI-G5", + title: "TypeError: boom", + culprit: "handler", + count: "10", + userCount: 3, + firstSeen: "2026-03-01T00:00:00Z", + lastSeen: "2026-04-03T12:00:00Z", + level: "error", + status: "resolved", + permalink: "https://sentry.io/organizations/test-org/issues/123456789/", + project: { id: "456", slug: "test-project", name: "Test Project" }, + ...overrides, + } as SentryIssue; +} + +function createMockContext() { + const stdoutWrite = mock(() => true); + return { + context: { + stdout: { write: stdoutWrite }, + stderr: { write: mock(() => true) }, + cwd: "/tmp", + }, + stdoutWrite, + }; +} + +describe("resolveCommand.func()", () => { + let resolveIssueSpy: ReturnType; + let updateSpy: ReturnType; + + beforeEach(() => { + resolveIssueSpy = spyOn(issueUtils, "resolveIssue"); + updateSpy = spyOn(apiClient, "updateIssueStatus"); + }); + + afterEach(() => { + resolveIssueSpy.mockRestore(); + updateSpy.mockRestore(); + }); + + test("resolves immediately when no --in spec is provided", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue({ status: "unresolved" }), + }); + updateSpy.mockResolvedValue(makeMockIssue({ status: "resolved" })); + + const { context } = createMockContext(); + const func = await resolveCommand.loader(); + await func.call(context, { json: false }, "CLI-G5"); + + expect(updateSpy).toHaveBeenCalledWith("123456789", "resolved", { + statusDetails: undefined, + orgSlug: "test-org", + }); + }); + + test("resolves --in as inRelease", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue(), + }); + updateSpy.mockResolvedValue(makeMockIssue()); + + const { context } = createMockContext(); + const func = await resolveCommand.loader(); + await func.call(context, { json: false, in: "0.26.1" }, "CLI-G5"); + + expect(updateSpy).toHaveBeenCalledWith("123456789", "resolved", { + statusDetails: { inRelease: "0.26.1" }, + orgSlug: "test-org", + }); + }); + + test("resolves --in @next as inNextRelease", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue(), + }); + updateSpy.mockResolvedValue(makeMockIssue()); + + const { context } = createMockContext(); + const func = await resolveCommand.loader(); + await func.call(context, { json: false, in: "@next" }, "CLI-G5"); + + expect(updateSpy).toHaveBeenCalledWith("123456789", "resolved", { + statusDetails: { inNextRelease: true }, + orgSlug: "test-org", + }); + }); + + test("resolves --in @commit by delegating to resolveCommitSpec", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue(), + }); + updateSpy.mockResolvedValue(makeMockIssue()); + const commitSpy = spyOn(commitSpec, "resolveCommitSpec").mockResolvedValue({ + commit: "abc123def", + repository: "getsentry/cli", + }); + + try { + const { context } = createMockContext(); + const func = await resolveCommand.loader(); + await func.call(context, { json: false, in: "@commit" }, "CLI-G5"); + + expect(commitSpy).toHaveBeenCalledWith( + { kind: "auto" }, + "test-org", + "/tmp" + ); + expect(updateSpy).toHaveBeenCalledWith("123456789", "resolved", { + statusDetails: { + inCommit: { commit: "abc123def", repository: "getsentry/cli" }, + }, + orgSlug: "test-org", + }); + } finally { + commitSpy.mockRestore(); + } + }); + + test("resolves --in @commit:@ via explicit spec", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue(), + }); + updateSpy.mockResolvedValue(makeMockIssue()); + const commitSpy = spyOn(commitSpec, "resolveCommitSpec").mockResolvedValue({ + commit: "abc123", + repository: "getsentry/cli", + }); + + try { + const { context } = createMockContext(); + const func = await resolveCommand.loader(); + await func.call( + context, + { json: false, in: "@commit:getsentry/cli@abc123" }, + "CLI-G5" + ); + + expect(commitSpy).toHaveBeenCalledWith( + { kind: "explicit", repository: "getsentry/cli", commit: "abc123" }, + "test-org", + "/tmp" + ); + expect(updateSpy).toHaveBeenCalledWith("123456789", "resolved", { + statusDetails: { + inCommit: { commit: "abc123", repository: "getsentry/cli" }, + }, + orgSlug: "test-org", + }); + } finally { + commitSpy.mockRestore(); + } + }); + + test("--in @commit without an org context throws ContextError", async () => { + // No org resolved from the issue (e.g. bare numeric ID with no DSN cache hit) + resolveIssueSpy.mockResolvedValue({ + org: undefined, + issue: makeMockIssue(), + }); + + const { context } = createMockContext(); + const func = await resolveCommand.loader(); + const err = await func + .call(context, { json: false, in: "@commit" }, "123456789") + .catch((e: Error) => e); + + expect(err).toBeInstanceOf(Error); + expect(err.message).toContain("Organization"); + expect(updateSpy).not.toHaveBeenCalled(); + }); + + test("JSON output includes resolved_in metadata", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue(), + }); + updateSpy.mockResolvedValue(makeMockIssue()); + + const { context, stdoutWrite } = createMockContext(); + const func = await resolveCommand.loader(); + await func.call(context, { json: true, in: "0.26.1" }, "CLI-G5"); + + const output = stdoutWrite.mock.calls.map((c) => String(c[0])).join(""); + const parsed = JSON.parse(output) as Record; + expect(parsed.resolved_in).toEqual({ inRelease: "0.26.1" }); + expect(parsed.status).toBe("resolved"); + expect(parsed.shortId).toBe("CLI-G5"); + }); + + test("JSON output omits resolved_in when immediate", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue(), + }); + updateSpy.mockResolvedValue(makeMockIssue()); + + const { context, stdoutWrite } = createMockContext(); + const func = await resolveCommand.loader(); + await func.call(context, { json: true }, "CLI-G5"); + + const output = stdoutWrite.mock.calls.map((c) => String(c[0])).join(""); + const parsed = JSON.parse(output) as Record; + expect(parsed.resolved_in).toBeUndefined(); + expect(parsed.status).toBe("resolved"); + }); +}); + +describe("unresolveCommand.func()", () => { + let resolveIssueSpy: ReturnType; + let updateSpy: ReturnType; + + beforeEach(() => { + resolveIssueSpy = spyOn(issueUtils, "resolveIssue"); + updateSpy = spyOn(apiClient, "updateIssueStatus"); + }); + + afterEach(() => { + resolveIssueSpy.mockRestore(); + updateSpy.mockRestore(); + }); + + test("sets status to unresolved", async () => { + resolveIssueSpy.mockResolvedValue({ + org: "test-org", + issue: makeMockIssue({ status: "resolved" }), + }); + updateSpy.mockResolvedValue(makeMockIssue({ status: "unresolved" })); + + const { context } = createMockContext(); + const func = await unresolveCommand.loader(); + await func.call(context, { json: false }, "CLI-G5"); + + expect(updateSpy).toHaveBeenCalledWith("123456789", "unresolved", { + orgSlug: "test-org", + }); + }); +}); diff --git a/test/lib/api-client.coverage.test.ts b/test/lib/api-client.coverage.test.ts index 91d8cb8a6..5fe04c69b 100644 --- a/test/lib/api-client.coverage.test.ts +++ b/test/lib/api-client.coverage.test.ts @@ -994,6 +994,53 @@ describe("repositories.ts", () => { expect(result[0]!.name).toBe("getsentry/sentry"); }); }); + + describe("listAllRepositories", () => { + test("walks pagination and concatenates pages", async () => { + const pages: Array> = [ + [ + { id: "1", name: "owner/repo-a" }, + { id: "2", name: "owner/repo-b" }, + ], + [{ id: "3", name: "owner/repo-c" }], + ]; + let callIdx = 0; + + globalThis.fetch = mockFetch(async () => { + const page = pages[callIdx]; + callIdx += 1; + const hasNext = callIdx < pages.length; + const fullPage = page?.map((r) => ({ + ...r, + url: `https://github.com/${r.name}`, + provider: { id: "github", name: "GitHub" }, + status: "active", + })); + return new Response(JSON.stringify(fullPage), { + status: 200, + headers: { + "Content-Type": "application/json", + Link: hasNext + ? '; rel="next"; results="true"; cursor="c1"' + : '; rel="next"; results="false"; cursor=""', + }, + }); + }); + + // Dynamic import to avoid circular issue with already-imported listRepositories + const { listAllRepositories } = await import( + "../../src/lib/api/repositories.js" + ); + const result = await listAllRepositories("test-org"); + expect(result).toHaveLength(3); + expect(result.map((r) => r.name)).toEqual([ + "owner/repo-a", + "owner/repo-b", + "owner/repo-c", + ]); + expect(callIdx).toBe(2); + }); + }); }); // ============================================================================= diff --git a/test/lib/api/issues.test.ts b/test/lib/api/issues.test.ts new file mode 100644 index 000000000..98600a7bc --- /dev/null +++ b/test/lib/api/issues.test.ts @@ -0,0 +1,237 @@ +/** + * Tests for issue API helpers: parseResolveSpec + mergeIssues. + * + * Other issue API functions (list/get/update) are covered by + * test/lib/api-client.coverage.test.ts. + */ + +import { afterEach, beforeEach, describe, expect, test } from "bun:test"; +import { + mergeIssues, + parseResolveSpec, + RESOLVE_COMMIT_EXPLICIT_PREFIX, + RESOLVE_COMMIT_SENTINEL, + RESOLVE_NEXT_RELEASE_SENTINEL, +} from "../../../src/lib/api-client.js"; +import { ApiError, ValidationError } from "../../../src/lib/errors.js"; +import { mockFetch } from "../../helpers.js"; + +describe("parseResolveSpec", () => { + test("returns null for undefined", () => { + expect(parseResolveSpec(undefined)).toBeNull(); + }); + + test("returns null for empty string", () => { + expect(parseResolveSpec("")).toBeNull(); + }); + + test("returns null for whitespace-only string", () => { + expect(parseResolveSpec(" ")).toBeNull(); + }); + + test("parses @next sentinel as static inNextRelease", () => { + expect(parseResolveSpec(RESOLVE_NEXT_RELEASE_SENTINEL)).toEqual({ + kind: "static", + details: { inNextRelease: true }, + }); + }); + + test("parses bare @commit as commit/auto", () => { + expect(parseResolveSpec(RESOLVE_COMMIT_SENTINEL)).toEqual({ + kind: "commit", + spec: { kind: "auto" }, + }); + }); + + test("parses explicit @commit:@ as commit/explicit", () => { + expect( + parseResolveSpec(`${RESOLVE_COMMIT_EXPLICIT_PREFIX}getsentry/cli@abc123`) + ).toEqual({ + kind: "commit", + spec: { kind: "explicit", repository: "getsentry/cli", commit: "abc123" }, + }); + }); + + test("explicit @commit splits on the LAST '@' (scoped repo names like @acme/web)", () => { + expect( + parseResolveSpec(`${RESOLVE_COMMIT_EXPLICIT_PREFIX}@acme/web@abc123`) + ).toEqual({ + kind: "commit", + spec: { kind: "explicit", repository: "@acme/web", commit: "abc123" }, + }); + }); + + test("@commit:@ rejects missing SHA", () => { + expect(() => + parseResolveSpec(`${RESOLVE_COMMIT_EXPLICIT_PREFIX}getsentry/cli@`) + ).toThrow(ValidationError); + }); + + test("@commit:@ rejects missing repo", () => { + expect(() => + parseResolveSpec(`${RESOLVE_COMMIT_EXPLICIT_PREFIX}@abc123`) + ).toThrow(ValidationError); + }); + + test("@commit:@ rejects payload with no '@' separator", () => { + expect(() => + parseResolveSpec(`${RESOLVE_COMMIT_EXPLICIT_PREFIX}getsentry/cli`) + ).toThrow(ValidationError); + }); + + test("treats any other value as static inRelease (including monorepo 'pkg@1.2.3')", () => { + expect(parseResolveSpec("0.26.1")).toEqual({ + kind: "static", + details: { inRelease: "0.26.1" }, + }); + expect(parseResolveSpec("v2.3.0")).toEqual({ + kind: "static", + details: { inRelease: "v2.3.0" }, + }); + // Monorepo-style release — must NOT be mistaken for a commit spec + // because it lacks the `@commit:` anchor. + expect(parseResolveSpec("spotlight@1.2.3")).toEqual({ + kind: "static", + details: { inRelease: "spotlight@1.2.3" }, + }); + expect(parseResolveSpec("my-release-tag")).toEqual({ + kind: "static", + details: { inRelease: "my-release-tag" }, + }); + }); + + test("trims surrounding whitespace from the version", () => { + expect(parseResolveSpec(" 0.26.1 ")).toEqual({ + kind: "static", + details: { inRelease: "0.26.1" }, + }); + }); + + test("sentinel matching is case-insensitive (@Next, @NEXT, @Commit, ...)", () => { + // Users sometimes copy sentinels from docs with different casing, or + // a stack frame name may be auto-capitalized. All variants must + // normalize to the canonical sentinel, never silently fall through to + // inRelease. + expect(parseResolveSpec("@Next")).toEqual({ + kind: "static", + details: { inNextRelease: true }, + }); + expect(parseResolveSpec("@NEXT")).toEqual({ + kind: "static", + details: { inNextRelease: true }, + }); + expect(parseResolveSpec("@Commit")).toEqual({ + kind: "commit", + spec: { kind: "auto" }, + }); + expect(parseResolveSpec("@COMMIT")).toEqual({ + kind: "commit", + spec: { kind: "auto" }, + }); + }); + + test("explicit @commit prefix is case-insensitive but preserves payload case", () => { + expect(parseResolveSpec("@Commit:GetSentry/CLI@ABCDEF123")).toEqual({ + kind: "commit", + spec: { + kind: "explicit", + repository: "GetSentry/CLI", + commit: "ABCDEF123", + }, + }); + }); + + test("rejects unknown @-prefixed tokens instead of silent inRelease fallback", () => { + // Typo guard: @netx, @commmit, @release, etc. must error instead of + // quietly creating a release named "@netx". Releases cannot legally + // start with @ in any supported format, so this is always a typo. + expect(() => parseResolveSpec("@netx")).toThrow(ValidationError); + expect(() => parseResolveSpec("@commmit")).toThrow(ValidationError); + expect(() => parseResolveSpec("@release")).toThrow(ValidationError); + }); +}); + +describe("mergeIssues", () => { + let originalFetch: typeof globalThis.fetch; + + beforeEach(() => { + originalFetch = globalThis.fetch; + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + test("rejects fewer than 2 group IDs", async () => { + await expect(mergeIssues("test-org", ["1"])).rejects.toThrow( + ValidationError + ); + await expect(mergeIssues("test-org", [])).rejects.toThrow(ValidationError); + }); + + test("sends PUT to org bulk-mutate endpoint with id= params and merge body", async () => { + let capturedUrl = ""; + let capturedMethod = ""; + let capturedBody: unknown; + + globalThis.fetch = mockFetch(async (input, init) => { + const req = new Request(input!, init); + capturedUrl = req.url; + capturedMethod = req.method; + capturedBody = await req.json(); + return new Response( + JSON.stringify({ + merge: { parent: "100", children: ["200", "300"] }, + }), + { + status: 200, + headers: { "Content-Type": "application/json" }, + } + ); + }); + + const result = await mergeIssues("test-org", ["100", "200", "300"]); + + expect(capturedMethod).toBe("PUT"); + expect(capturedUrl).toContain("/api/0/organizations/test-org/issues/"); + // All three IDs present as repeated query params + expect(capturedUrl).toContain("id=100"); + expect(capturedUrl).toContain("id=200"); + expect(capturedUrl).toContain("id=300"); + expect(capturedBody).toEqual({ merge: 1 }); + expect(result).toEqual({ parent: "100", children: ["200", "300"] }); + }); + + test("propagates API errors (e.g. 'Only error issues can be merged')", async () => { + globalThis.fetch = mockFetch( + async () => + new Response(JSON.stringify(["Only error issues can be merged."]), { + status: 400, + headers: { "Content-Type": "application/json" }, + }) + ); + + await expect( + mergeIssues("test-org", ["100", "200"]) + ).rejects.toBeInstanceOf(ApiError); + }); + + test("handles 204 No Content (no matching issues) gracefully", async () => { + // Sentry's bulk mutate returns 204 when IDs are out of scope or the + // matched set is empty — without a body. Previously this crashed with + // SyntaxError in response.json(). + globalThis.fetch = mockFetch( + async () => + new Response(null, { + status: 204, + }) + ); + + await expect( + mergeIssues("test-org", ["100", "200"]) + ).rejects.toBeInstanceOf(ApiError); + await expect(mergeIssues("test-org", ["100", "200"])).rejects.toThrow( + /no matching issues|out of scope/i + ); + }); +}); diff --git a/test/lib/api/repositories.test.ts b/test/lib/api/repositories.test.ts new file mode 100644 index 000000000..5e2d54faf --- /dev/null +++ b/test/lib/api/repositories.test.ts @@ -0,0 +1,102 @@ +/** + * Tests for the cached repository helper. + * + * Covers the cache-hit / cache-miss paths and the resilience guard that + * keeps a broken SQLite write from crashing a command whose primary API + * fetch already succeeded (established project pattern — see AGENTS.md + * lore on cache-write resilience). + */ + +import { afterEach, beforeEach, describe, expect, spyOn, test } from "bun:test"; +import { listRepositoriesCached } from "../../../src/lib/api/repositories.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as repoCache from "../../../src/lib/db/repo-cache.js"; +import type { SentryRepository } from "../../../src/types/sentry.js"; +import { mockFetch } from "../../helpers.js"; + +function repoApiResponse(repos: { name: string }[]): Response { + const body = repos.map((r) => ({ + id: r.name, + name: r.name, + url: `https://github.com/${r.name}`, + provider: { id: "github", name: "GitHub" }, + status: "active", + })); + return new Response(JSON.stringify(body), { + status: 200, + headers: { + "Content-Type": "application/json", + Link: '; rel="next"; results="false"; cursor=""', + }, + }); +} + +describe("listRepositoriesCached", () => { + let originalFetch: typeof globalThis.fetch; + let getSpy: ReturnType; + let setSpy: ReturnType; + + beforeEach(() => { + originalFetch = globalThis.fetch; + getSpy = spyOn(repoCache, "getCachedRepos"); + setSpy = spyOn(repoCache, "setCachedRepos"); + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + getSpy.mockRestore(); + setSpy.mockRestore(); + }); + + test("returns cached list without hitting the API on cache hit", async () => { + const cached: SentryRepository[] = [ + { id: "1", name: "owner/cached" } as unknown as SentryRepository, + ]; + getSpy.mockReturnValue(cached); + let fetchCalled = false; + globalThis.fetch = mockFetch(async () => { + fetchCalled = true; + return new Response("[]"); + }); + + const result = await listRepositoriesCached("my-org"); + expect(result).toBe(cached); + expect(fetchCalled).toBe(false); + expect(setSpy).not.toHaveBeenCalled(); + }); + + test("refetches and writes cache on cache miss", async () => { + getSpy.mockReturnValue(null); + setSpy.mockImplementation(() => { + /* succeed silently */ + }); + globalThis.fetch = mockFetch(async () => + repoApiResponse([{ name: "owner/fresh" }]) + ); + + const result = await listRepositoriesCached("my-org"); + expect(result).toHaveLength(1); + expect(result[0]?.name).toBe("owner/fresh"); + expect(setSpy).toHaveBeenCalledWith( + "my-org", + expect.arrayContaining([expect.objectContaining({ name: "owner/fresh" })]) + ); + }); + + test("does NOT crash when cache write throws (resilience)", async () => { + getSpy.mockReturnValue(null); + setSpy.mockImplementation(() => { + // Simulate a read-only DB / corrupted SQLite write + throw new Error("attempt to write a readonly database"); + }); + globalThis.fetch = mockFetch(async () => + repoApiResponse([{ name: "owner/primary-succeeded" }]) + ); + + // The API fetch succeeded, so the command should still receive the + // data even though the cache write failed. No exception should escape. + const result = await listRepositoriesCached("my-org"); + expect(result).toHaveLength(1); + expect(result[0]?.name).toBe("owner/primary-succeeded"); + }); +}); diff --git a/test/lib/db/repo-cache.test.ts b/test/lib/db/repo-cache.test.ts new file mode 100644 index 000000000..26210d177 --- /dev/null +++ b/test/lib/db/repo-cache.test.ts @@ -0,0 +1,109 @@ +/** + * Tests for the Sentry repository offline cache. + * + * The cache is a per-org JSON blob with a TTL — covers cache hit, cache + * miss (no row), staleness (older than TTL), and corruption resilience. + */ + +import { afterEach, describe, expect, test } from "bun:test"; +import { getDatabase } from "../../../src/lib/db/index.js"; +import { + clearCachedRepos, + getCachedRepos, + REPO_CACHE_TTL_MS, + setCachedRepos, +} from "../../../src/lib/db/repo-cache.js"; +import type { SentryRepository } from "../../../src/types/sentry.js"; +import { useTestConfigDir } from "../../helpers.js"; + +useTestConfigDir("test-repo-cache-"); + +function makeRepo(name: string): SentryRepository { + return { + id: "1", + name, + url: `https://github.com/${name}`, + provider: { id: "integrations:github", name: "GitHub" }, + status: "active", + externalSlug: name, + } as SentryRepository; +} + +afterEach(() => { + // Each test starts fresh — useTestConfigDir resets the DB. +}); + +describe("getCachedRepos", () => { + test("returns null when no row exists", () => { + expect(getCachedRepos("no-such-org")).toBeNull(); + }); + + test("returns the stored list on cache hit", () => { + const repos = [makeRepo("getsentry/cli"), makeRepo("getsentry/sentry")]; + setCachedRepos("my-org", repos); + const result = getCachedRepos("my-org"); + expect(result).toHaveLength(2); + expect(result?.[0]?.name).toBe("getsentry/cli"); + expect(result?.[1]?.name).toBe("getsentry/sentry"); + }); + + test("returns null when the row is older than TTL", () => { + setCachedRepos("aged-org", [makeRepo("foo/bar")]); + // Manually age the row past the TTL + const db = getDatabase(); + db.query("UPDATE repo_cache SET cached_at = ? WHERE org_slug = ?").run( + Date.now() - (REPO_CACHE_TTL_MS + 1000), + "aged-org" + ); + expect(getCachedRepos("aged-org")).toBeNull(); + }); + + test("returns null when repos_json is corrupted (treats as miss)", () => { + const db = getDatabase(); + db.query( + "INSERT INTO repo_cache (org_slug, repos_json, cached_at) VALUES (?, ?, ?)" + ).run("broken-org", "{not-json", Date.now()); + expect(getCachedRepos("broken-org")).toBeNull(); + }); + + test("returns null when repos_json is valid JSON but not an array", () => { + const db = getDatabase(); + db.query( + "INSERT INTO repo_cache (org_slug, repos_json, cached_at) VALUES (?, ?, ?)" + ).run("wrong-shape-org", JSON.stringify({ not: "array" }), Date.now()); + expect(getCachedRepos("wrong-shape-org")).toBeNull(); + }); +}); + +describe("setCachedRepos", () => { + test("overwrites the existing entry on repeated writes", () => { + setCachedRepos("my-org", [makeRepo("foo/bar")]); + setCachedRepos("my-org", [makeRepo("baz/qux"), makeRepo("quux/corge")]); + const result = getCachedRepos("my-org"); + expect(result).toHaveLength(2); + expect(result?.[0]?.name).toBe("baz/qux"); + }); + + test("stores the full repo payload (round-trip)", () => { + const original = makeRepo("getsentry/cli"); + setCachedRepos("my-org", [original]); + const [roundTripped] = getCachedRepos("my-org") ?? []; + expect(roundTripped).toEqual(original); + }); +}); + +describe("clearCachedRepos", () => { + test("removes a single org's entry without affecting others", () => { + setCachedRepos("org-a", [makeRepo("a/1")]); + setCachedRepos("org-b", [makeRepo("b/1")]); + clearCachedRepos("org-a"); + expect(getCachedRepos("org-a")).toBeNull(); + expect(getCachedRepos("org-b")).not.toBeNull(); + }); + + test("is a no-op when the entry doesn't exist", () => { + // Should not throw + clearCachedRepos("never-existed"); + expect(getCachedRepos("never-existed")).toBeNull(); + }); +});