From bcb182b647a62dbd9d698aaeaf5e776d3f05e696 Mon Sep 17 00:00:00 2001 From: Will Washburn Date: Tue, 5 May 2026 17:01:08 -0400 Subject: [PATCH] relayburn-sdk: implement five sync query verbs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fill in `query_verbs.rs` with the embedding-API surface for `summary`, `session_cost`, `overhead`, `overhead_trim`, and `hotspots`. Each verb appears in two forms: - `impl LedgerHandle { fn (&self, opts) -> Result<…> }` for callers that already hold a handle (the common embedding path). - A free function that opens its own `LedgerHandle` from `LedgerOpenOptions::with_home(opts.ledger_home)`, mirroring the TS `withHome(opts.ledgerHome, …)` wrapper without mutating process env. Result types derive `Serialize` + `#[serde(rename_all = "camelCase")]` so the JSON shape matches `@relayburn/sdk` byte-for-byte; the discriminated `HotspotsResult` uses `#[serde(tag = "kind")]` to mirror the TS union. `normalize_since` accepts both ISO timestamps and the relative ranges (`24h`, `7d`, `4w`, `2m`) the CLI/SDK take, formatting via a hand-rolled proleptic-Gregorian helper to avoid pulling in chrono. The hotspots findings path runs the core `detect_patterns` pipeline plus the side-channel `tool-output-bloat` and `tool-call-pattern` detectors; `ghost-surface` is deferred (its TS sibling drives a filesystem-mining pipeline that goes beyond the ledger surface). Unit tests inside `query_verbs.rs` build a fixture ledger via `tempfile::TempDir`, append a couple of synthetic `TurnRecord`s, and assert each verb's wrapper plumbs through with structurally non-empty results — coverage of the JS sibling's behavioral parity is left for the integration test in PR5. Part of #246. Co-Authored-By: Claude Opus 4.7 (1M context) --- crates/relayburn-sdk/src/query_verbs.rs | 1619 ++++++++++++++++++++++- 1 file changed, 1616 insertions(+), 3 deletions(-) diff --git a/crates/relayburn-sdk/src/query_verbs.rs b/crates/relayburn-sdk/src/query_verbs.rs index 913f9ebd..85ea5c5f 100644 --- a/crates/relayburn-sdk/src/query_verbs.rs +++ b/crates/relayburn-sdk/src/query_verbs.rs @@ -1,8 +1,1621 @@ //! Query verbs — `summary`, `session_cost`, `overhead`, `overhead_trim`, -//! `hotspots`. Filled in by the follow-up to #246 PR1. +//! `hotspots`. Rust port of the corresponding exports from +//! `packages/sdk/index.js`. //! //! Each verb appears as an `impl LedgerHandle` method (sync, returns //! `anyhow::Result`) plus a free-function form that opens its own ledger -//! handle from `LedgerOpenOptions`. +//! handle from `LedgerOpenOptions`. Free functions take `ledger_home: +//! Option` so callers don't have to mutate process env to point +//! at a non-default ledger. -// TODO(#246): port the query verbs from `packages/sdk/index.js`. +use std::collections::{HashMap, HashSet}; +use std::fs; +use std::path::{Path, PathBuf}; + +use anyhow::Result; +use serde::{Deserialize, Serialize}; + +use relayburn_analyze::{ + aggregate_by_bash, aggregate_by_bash_verb, aggregate_by_file, aggregate_by_subagent, + attribute_hotspots, attribute_overhead, build_trim_recommendations, cost_for_turn, + detect_patterns, detect_tool_call_patterns, detect_tool_output_bloat, find_overhead_files, + findings_from_patterns, load_claude_settings, load_overhead_file, load_pricing, + project_claude_settings_path, render_unified_diff_for_recommendation, summarize_fidelity, + sum_costs, tool_call_pattern_to_finding, tool_output_bloat_to_finding, + user_claude_settings_path, AttributeOverheadInput, AttributionMethod, BashAggregation, + BashVerbAggregation, DetectPatternsOptions, DetectToolCallPatternsOptions, + DetectToolOutputBloatOptions, FidelitySummary, FileAggregation, + HotspotsOptions as AnalyzeHotspotsOptions, LoadedClaudeSettings, MarkdownSection, + OverheadFile, OverheadFileKind, ParsedOverheadFile, PricingTable, SubagentAggregation, + WasteFinding, +}; +use relayburn_ledger::Query; +use relayburn_reader::{ + parse_bash_command, resolve_project, BashParse, SourceKind, TurnRecord, UserTurnRecord, +}; + +use crate::{Ledger, LedgerHandle, LedgerOpenOptions}; + +// --------------------------------------------------------------------------- +// since-string parsing +// --------------------------------------------------------------------------- + +/// Accept either a CLI-style relative range (`24h`, `7d`, `4w`, `2m`) or an +/// ISO timestamp and return an ISO string the ledger query can compare. The +/// ledger filter does lexical compare on `turn.ts`, so passing a raw `7d` +/// would silently filter every turn out — same trap the TS sibling +/// (`packages/sdk/index.js`) protects against. +pub fn normalize_since(since: Option<&str>) -> Result> { + let Some(raw) = since else { + return Ok(None); + }; + if raw.is_empty() { + return Ok(None); + } + + if let Some((n, unit)) = parse_relative(raw) { + let secs_back = match unit { + 'h' => n * 3_600, + 'd' => n * 86_400, + 'w' => n * 7 * 86_400, + 'm' => n * 30 * 86_400, + _ => unreachable!(), + }; + let now = system_now_secs(); + let when = now.saturating_sub(secs_back); + return Ok(Some(format_iso_z(when))); + } + + // ISO-style: validate by checking the leading `YYYY-MM-DD` prefix. A + // chrono-grade parser would be heavier than the gate needs — anything + // beyond the date prefix the ledger compares lexically. + if !looks_like_iso(raw) { + anyhow::bail!( + "invalid since: {raw} (expected ISO timestamp or relative range like 7d)" + ); + } + Ok(Some(raw.to_string())) +} + +fn parse_relative(s: &str) -> Option<(u64, char)> { + let bytes = s.as_bytes(); + if bytes.len() < 2 { + return None; + } + let unit = bytes[bytes.len() - 1] as char; + if !matches!(unit, 'h' | 'd' | 'w' | 'm') { + return None; + } + let num = &s[..s.len() - 1]; + if num.is_empty() || !num.bytes().all(|b| b.is_ascii_digit()) { + return None; + } + let n: u64 = num.parse().ok()?; + Some((n, unit)) +} + +fn looks_like_iso(s: &str) -> bool { + let b = s.as_bytes(); + b.len() >= 10 + && b[0..4].iter().all(|c| c.is_ascii_digit()) + && b[4] == b'-' + && b[5..7].iter().all(|c| c.is_ascii_digit()) + && b[7] == b'-' + && b[8..10].iter().all(|c| c.is_ascii_digit()) +} + +fn system_now_secs() -> u64 { + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map(|d| d.as_secs()) + .unwrap_or(0) +} + +/// Format Unix-seconds as `YYYY-MM-DDTHH:MM:SSZ`. Proleptic Gregorian — same +/// flavor of date math `relayburn-ingest::pending_stamps` uses to avoid a +/// chrono dep. +fn format_iso_z(secs: u64) -> String { + let total_days = (secs / 86_400) as i64; + let secs_in_day = (secs % 86_400) as u32; + let hour = secs_in_day / 3_600; + let minute = (secs_in_day / 60) % 60; + let second = secs_in_day % 60; + let (year, month, day) = days_to_ymd(total_days); + format!("{year:04}-{month:02}-{day:02}T{hour:02}:{minute:02}:{second:02}Z") +} + +fn days_to_ymd(days_from_epoch: i64) -> (i64, u32, u32) { + // Howard Hinnant's date-library algorithm (proleptic Gregorian). + let z = days_from_epoch + 719_468; + let era = if z >= 0 { z } else { z - 146_096 } / 146_097; + let doe = (z - era * 146_097) as u64; + let yoe = (doe - doe / 1_460 + doe / 36_524 - doe / 146_096) / 365; + let y = yoe as i64 + era * 400; + let doy = doe - (365 * yoe + yoe / 4 - yoe / 100); + let mp = (5 * doy + 2) / 153; + let d = doy - (153 * mp + 2) / 5 + 1; + let m = if mp < 10 { mp + 3 } else { mp - 9 }; + let year = if m <= 2 { y + 1 } else { y }; + (year, m as u32, d as u32) +} + +// --------------------------------------------------------------------------- +// Shared helpers — query construction + hotspots coverage gate +// --------------------------------------------------------------------------- + +fn build_query( + session: Option<&str>, + project: Option<&str>, + since: Option<&str>, +) -> Result { + let mut q = Query::default(); + if let Some(s) = session { + q.session_id = Some(s.to_string()); + } + if let Some(p) = project { + q.project = Some(p.to_string()); + } + if let Some(since_norm) = normalize_since(since)? { + q.since = Some(since_norm); + } + Ok(q) +} + +/// Mirrors the TS `HOTSPOTS_ATTRIBUTION_REQUIRED` + `turnPassesCoverage` +/// pair. Records without `fidelity` (older ledger writers) pass. +fn turn_passes_hotspots_coverage(turn: &TurnRecord) -> bool { + let Some(f) = turn.fidelity.as_ref() else { + return true; + }; + f.coverage.has_tool_calls && f.coverage.has_tool_result_events +} + +fn collect_turns(handle: &LedgerHandle, q: &Query) -> Result> { + let enriched = handle.inner.query_turns(q)?; + Ok(enriched.into_iter().map(|e| e.turn).collect()) +} + +fn bucket_user_turns_by_session( + handle: &LedgerHandle, + side_q: &Query, + keep: Option<&HashSet>, +) -> Result>> { + let mut out: HashMap> = HashMap::new(); + let user_turns = handle.inner.query_user_turns(side_q)?; + for ut in user_turns { + if let Some(set) = keep { + if !set.contains(&ut.session_id) { + continue; + } + } + out.entry(ut.session_id.clone()).or_default().push(ut); + } + Ok(out) +} + +fn open_with(ledger_home: Option<&Path>) -> Result { + let opts = match ledger_home { + Some(h) => LedgerOpenOptions::with_home(h), + None => LedgerOpenOptions::default(), + }; + Ledger::open(opts) +} + +// --------------------------------------------------------------------------- +// summary +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SummaryOptions { + pub session: Option, + pub project: Option, + pub since: Option, + pub ledger_home: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SummaryToolRow { + pub tool: String, + pub tokens: u64, + pub cost: f64, + pub count: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SummaryModelRow { + pub model: String, + pub tokens: u64, + pub cost: f64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Summary { + pub total_tokens: u64, + pub total_cost: f64, + pub turn_count: u64, + pub by_tool: Vec, + pub by_model: Vec, +} + +impl LedgerHandle { + pub fn summary(&self, opts: SummaryOptions) -> Result { + let q = build_query( + opts.session.as_deref(), + opts.project.as_deref(), + opts.since.as_deref(), + )?; + let turns = collect_turns(self, &q)?; + let pricing = load_pricing(None); + Ok(compute_summary(&turns, &pricing)) + } +} + +pub fn summary(opts: SummaryOptions) -> Result { + let handle = open_with(opts.ledger_home.as_deref())?; + handle.summary(SummaryOptions { + ledger_home: None, + ..opts + }) +} + +fn compute_summary(turns: &[TurnRecord], pricing: &PricingTable) -> Summary { + // First-seen iteration order matches TS `Map` semantics. + let mut by_tool_order: Vec = Vec::new(); + let mut by_tool: HashMap = HashMap::new(); + let mut by_model_order: Vec = Vec::new(); + let mut by_model: HashMap = HashMap::new(); + let mut total_tokens: u64 = 0; + let mut total_cost: f64 = 0.0; + + for t in turns { + let cost = cost_for_turn(t, pricing).map(|c| c.total).unwrap_or(0.0); + let tokens = t.usage.input + + t.usage.output + + t.usage.reasoning + + t.usage.cache_read + + t.usage.cache_create_5m + + t.usage.cache_create_1h; + total_tokens += tokens; + total_cost += cost; + + let model_row = by_model.entry(t.model.clone()).or_insert_with(|| { + by_model_order.push(t.model.clone()); + SummaryModelRow { + model: t.model.clone(), + tokens: 0, + cost: 0.0, + } + }); + model_row.tokens += tokens; + model_row.cost += cost; + + for call in &t.tool_calls { + let tool_row = by_tool.entry(call.name.clone()).or_insert_with(|| { + by_tool_order.push(call.name.clone()); + SummaryToolRow { + tool: call.name.clone(), + tokens: 0, + cost: 0.0, + count: 0, + } + }); + tool_row.tokens += tokens; + tool_row.cost += cost; + tool_row.count += 1; + } + } + + Summary { + total_tokens, + total_cost, + turn_count: turns.len() as u64, + by_tool: by_tool_order + .into_iter() + .map(|k| by_tool.remove(&k).unwrap()) + .collect(), + by_model: by_model_order + .into_iter() + .map(|k| by_model.remove(&k).unwrap()) + .collect(), + } +} + +// --------------------------------------------------------------------------- +// session_cost +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SessionCostOptions { + pub session: Option, + pub ledger_home: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SessionCostResult { + pub session_id: Option, + #[serde(rename = "totalUSD")] + pub total_usd: f64, + pub total_tokens: u64, + pub turn_count: u64, + pub models: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub note: Option, +} + +impl LedgerHandle { + pub fn session_cost(&self, opts: SessionCostOptions) -> Result { + let Some(session_id) = opts.session.clone() else { + return Ok(SessionCostResult { + session_id: None, + total_usd: 0.0, + total_tokens: 0, + turn_count: 0, + models: Vec::new(), + note: Some("no session id provided".to_string()), + }); + }; + let q = Query::for_session(&session_id); + let turns = collect_turns(self, &q)?; + if turns.is_empty() { + return Ok(SessionCostResult { + session_id: Some(session_id), + total_usd: 0.0, + total_tokens: 0, + turn_count: 0, + models: Vec::new(), + note: Some("no turns recorded for this session yet".to_string()), + }); + } + let pricing = load_pricing(None); + let mut models = std::collections::BTreeSet::new(); + let mut total_tokens: u64 = 0; + let mut costs = Vec::with_capacity(turns.len()); + for t in &turns { + models.insert(t.model.clone()); + let u = &t.usage; + total_tokens += u.input + + u.output + + u.reasoning + + u.cache_read + + u.cache_create_5m + + u.cache_create_1h; + if let Some(c) = cost_for_turn(t, &pricing) { + costs.push(c); + } + } + let total = sum_costs(costs.iter()); + let total_usd = (total.total * 1_000_000.0).round() / 1_000_000.0; + Ok(SessionCostResult { + session_id: Some(session_id), + total_usd, + total_tokens, + turn_count: turns.len() as u64, + models: models.into_iter().collect(), + note: None, + }) + } +} + +pub fn session_cost(opts: SessionCostOptions) -> Result { + let handle = open_with(opts.ledger_home.as_deref())?; + handle.session_cost(SessionCostOptions { + ledger_home: None, + ..opts + }) +} + +// --------------------------------------------------------------------------- +// overhead + overhead_trim — share `gather_overhead` +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadOptions { + pub project: Option, + pub since: Option, + pub kind: Option, + pub ledger_home: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadSection { + pub heading: String, + pub start_line: u64, + pub end_line: u64, + pub tokens: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadSectionCost { + pub file_path: String, + pub section: OverheadSection, + pub token_share: f64, + pub cost_per_session: f64, + pub total_cost: f64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadAttributionDetail { + pub session_count: u64, + pub per_session_avg: f64, + pub per_session_p95: f64, + pub total_cost: f64, + pub section_costs: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadFileSummary { + pub kind: OverheadFileKind, + pub path: String, + pub applies_to: Vec, + pub total_lines: u64, + pub bytes: u64, + pub tokens: u64, + pub sections: Vec, + pub grouping_level: u32, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadPerFileEntry { + pub path: String, + pub kind: OverheadFileKind, + pub applies_to: Vec, + pub attribution: OverheadAttributionDetail, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadResult { + pub project: String, + pub files: Vec, + pub per_file: Vec, + pub grand_total: f64, +} + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadTrimOptions { + pub project: Option, + pub since: Option, + pub kind: Option, + pub ledger_home: Option, + pub top: Option, + pub include_diff: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadTrimSection { + pub heading: String, + pub start_line: u64, + pub end_line: u64, + pub tokens: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadTrimProjectedSavings { + pub per_session_usd: f64, + pub across_window_usd: f64, + pub tokens: u64, + pub token_share: f64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadTrimRecommendation { + pub file: String, + pub kind: OverheadFileKind, + pub applies_to: Vec, + pub section: OverheadTrimSection, + pub projected_savings: OverheadTrimProjectedSavings, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub diff: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadTrimSummary { + pub files_analyzed: u64, + pub files_with_recommendations: u64, + pub total_recommendations: u64, + pub total_projected_savings_per_session: f64, + pub total_projected_savings_across_window: f64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverheadTrimResult { + pub project: String, + pub since: String, + pub recommendations: Vec, + pub summary: OverheadTrimSummary, +} + +struct GatheredOverhead { + project_path: PathBuf, + files: Vec, + attribution: Option, +} + +fn gather_overhead( + handle: &LedgerHandle, + project: Option<&Path>, + since: Option<&str>, + kind: Option, +) -> Result { + let project_path: PathBuf = match project { + Some(p) => fs::canonicalize(p).unwrap_or_else(|_| p.to_path_buf()), + None => std::env::current_dir()?, + }; + + let mut found: Vec = find_overhead_files(&project_path); + if let Some(want) = kind { + found.retain(|f| f.kind == want); + } + if found.is_empty() { + return Ok(GatheredOverhead { + project_path, + files: Vec::new(), + attribution: None, + }); + } + + let mut parsed_files: Vec = Vec::with_capacity(found.len()); + for f in found { + parsed_files.push(load_overhead_file(f)?); + } + + let resolved = resolve_project(&project_path.to_string_lossy()); + let q = Query { + project: Some(resolved.project_key.unwrap_or(resolved.project)), + since: normalize_since(since)?, + ..Default::default() + }; + let turns = collect_turns(handle, &q)?; + let pricing = load_pricing(None); + let attribution = attribute_overhead(AttributeOverheadInput { + files: &parsed_files, + turns: &turns, + pricing: &pricing, + }); + Ok(GatheredOverhead { + project_path, + files: parsed_files, + attribution: Some(attribution), + }) +} + +impl LedgerHandle { + pub fn overhead(&self, opts: OverheadOptions) -> Result { + let data = gather_overhead( + self, + opts.project.as_deref(), + opts.since.as_deref(), + opts.kind, + )?; + let project_str = data.project_path.to_string_lossy().into_owned(); + let Some(attribution) = data.attribution else { + return Ok(OverheadResult { + project: project_str, + files: Vec::new(), + per_file: Vec::new(), + grand_total: 0.0, + }); + }; + let files = data + .files + .iter() + .map(|pf| OverheadFileSummary { + kind: pf.file.kind, + path: pf.file.path.clone(), + applies_to: pf.file.applies_to.clone(), + total_lines: pf.parsed.total_lines, + bytes: pf.parsed.bytes, + tokens: pf.parsed.tokens, + sections: pf.parsed.sections.clone(), + grouping_level: pf.parsed.grouping_level, + }) + .collect(); + let per_file = attribution + .per_file + .iter() + .map(|p| OverheadPerFileEntry { + path: p.file.path.clone(), + kind: p.file.kind, + applies_to: p.file.applies_to.clone(), + attribution: OverheadAttributionDetail { + session_count: p.attribution.session_count, + per_session_avg: p.attribution.per_session_avg, + per_session_p95: p.attribution.per_session_p95, + total_cost: p.attribution.total_cost, + section_costs: p + .attribution + .section_costs + .iter() + .map(|sc| OverheadSectionCost { + file_path: sc.file_path.clone(), + section: OverheadSection { + heading: sc.section.heading.clone(), + start_line: sc.section.start_line, + end_line: sc.section.end_line, + tokens: sc.section.tokens, + }, + token_share: sc.token_share, + cost_per_session: sc.cost_per_session, + total_cost: sc.total_cost, + }) + .collect(), + }, + }) + .collect(); + Ok(OverheadResult { + project: project_str, + files, + per_file, + grand_total: attribution.grand_total, + }) + } + + pub fn overhead_trim(&self, opts: OverheadTrimOptions) -> Result { + let since_label = opts + .since + .clone() + .unwrap_or_else(|| "all time".to_string()); + let data = gather_overhead( + self, + opts.project.as_deref(), + opts.since.as_deref(), + opts.kind, + )?; + let project_str = data.project_path.to_string_lossy().into_owned(); + let top_n = parse_top_n(opts.top); + let include_diff = opts.include_diff.unwrap_or(true); + + let Some(attribution) = data.attribution else { + return Ok(OverheadTrimResult { + project: project_str, + since: since_label, + recommendations: Vec::new(), + summary: OverheadTrimSummary { + files_analyzed: 0, + files_with_recommendations: 0, + total_recommendations: 0, + total_projected_savings_per_session: 0.0, + total_projected_savings_across_window: 0.0, + }, + }); + }; + + let mut recommendations: Vec = Vec::new(); + let mut files_with_recs: u64 = 0; + let mut text_cache: HashMap = HashMap::new(); + + for fa in &attribution.per_file { + let recs = build_trim_recommendations(&fa.attribution, top_n); + if recs.is_empty() { + continue; + } + files_with_recs += 1; + let file_text: Option = if include_diff { + if let Some(t) = text_cache.get(&fa.file.path) { + Some(t.clone()) + } else { + let read = fs::read_to_string(&fa.file.path)?; + text_cache.insert(fa.file.path.clone(), read.clone()); + Some(read) + } + } else { + None + }; + for rec in &recs { + let diff = if include_diff { + Some(render_unified_diff_for_recommendation( + &fa.file.path, + file_text.as_deref().unwrap_or(""), + rec, + Some(&data.project_path), + )) + } else { + None + }; + recommendations.push(OverheadTrimRecommendation { + file: to_project_relative(&fa.file.path, &data.project_path), + kind: fa.file.kind, + applies_to: fa.file.applies_to.clone(), + section: OverheadTrimSection { + heading: rec.section.heading.clone(), + start_line: rec.section.start_line, + end_line: rec.section.end_line, + tokens: rec.section.tokens, + }, + projected_savings: OverheadTrimProjectedSavings { + per_session_usd: rec.projected_savings_per_session, + across_window_usd: rec.projected_savings_across_window, + tokens: rec.section.tokens, + token_share: rec.token_share, + }, + diff, + }); + } + } + + let total_per_session: f64 = recommendations + .iter() + .map(|r| r.projected_savings.per_session_usd) + .sum(); + let total_across_window: f64 = recommendations + .iter() + .map(|r| r.projected_savings.across_window_usd) + .sum(); + + Ok(OverheadTrimResult { + project: project_str, + since: since_label, + summary: OverheadTrimSummary { + files_analyzed: data.files.len() as u64, + files_with_recommendations: files_with_recs, + total_recommendations: recommendations.len() as u64, + total_projected_savings_per_session: total_per_session, + total_projected_savings_across_window: total_across_window, + }, + recommendations, + }) + } +} + +pub fn overhead(opts: OverheadOptions) -> Result { + let handle = open_with(opts.ledger_home.as_deref())?; + handle.overhead(OverheadOptions { + ledger_home: None, + ..opts + }) +} + +pub fn overhead_trim(opts: OverheadTrimOptions) -> Result { + let handle = open_with(opts.ledger_home.as_deref())?; + handle.overhead_trim(OverheadTrimOptions { + ledger_home: None, + ..opts + }) +} + +fn parse_top_n(v: Option) -> usize { + match v { + Some(n) if n > 0 => n as usize, + _ => 3, + } +} + +fn to_project_relative(file_path: &str, project_path: &Path) -> String { + let p = Path::new(file_path); + match p.strip_prefix(project_path) { + Ok(r) if !r.as_os_str().is_empty() => { + r.to_string_lossy().replace(std::path::MAIN_SEPARATOR, "/") + } + _ => file_path.replace(std::path::MAIN_SEPARATOR, "/"), + } +} + +// --------------------------------------------------------------------------- +// hotspots — discriminated union +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum HotspotsGroupBy { + Attribution, + Bash, + BashVerb, + File, + Subagent, +} + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HotspotsOptions { + pub session: Option, + pub project: Option, + pub since: Option, + pub group_by: Option, + pub patterns: Option>, + pub ledger_home: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HotspotsSessionTotal { + pub session_id: String, + pub grand_cost: f64, + pub attributed_cost: f64, + pub unattributed_cost: f64, + pub attribution_method: AttributionMethod, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HotspotsFidelityBlock { + pub analyzed: u64, + pub excluded: u64, + /// Aggregate fidelity summary for the matched-window turns. The analyze + /// `FidelitySummary` doesn't derive `Serialize`, so this trip through + /// `serde_json::Value` keeps the wire shape stable. + pub summary: serde_json::Value, + pub refused: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "kind")] +pub enum HotspotsResult { + #[serde(rename = "attribution")] + Attribution(Box), + #[serde(rename = "bash")] + Bash { + rows: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + refused: Option, + #[serde(default, skip_serializing_if = "Option::is_none", rename = "refusalReason")] + refusal_reason: Option, + }, + #[serde(rename = "bash-verb")] + BashVerb { + rows: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + refused: Option, + #[serde(default, skip_serializing_if = "Option::is_none", rename = "refusalReason")] + refusal_reason: Option, + }, + #[serde(rename = "file")] + File { + rows: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + refused: Option, + #[serde(default, skip_serializing_if = "Option::is_none", rename = "refusalReason")] + refusal_reason: Option, + }, + #[serde(rename = "subagent")] + Subagent { + rows: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + refused: Option, + #[serde(default, skip_serializing_if = "Option::is_none", rename = "refusalReason")] + refusal_reason: Option, + }, + #[serde(rename = "findings")] + Findings { + findings: Vec, + summary: serde_json::Value, + }, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HotspotsAttributionResult { + pub turns_analyzed: u64, + pub grand_total: f64, + pub attributed_total: f64, + pub unattributed_total: f64, + pub attribution_degraded: bool, + pub sessions: Vec, + pub files: Vec, + pub bash_verbs: Vec, + pub bash: Vec, + pub subagents: Vec, + pub fidelity: HotspotsFidelityBlock, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub refused: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub refusal_reason: Option, +} + +impl LedgerHandle { + pub fn hotspots(&self, opts: HotspotsOptions) -> Result { + let using_patterns = opts + .patterns + .as_ref() + .map(|v| !v.is_empty()) + .unwrap_or(false); + let q = build_query( + opts.session.as_deref(), + opts.project.as_deref(), + opts.since.as_deref(), + )?; + let turns = collect_turns(self, &q)?; + let pricing = load_pricing(None); + + if using_patterns { + return run_hotspots_findings( + self, + &turns, + &pricing, + opts.patterns.unwrap_or_default(), + &q, + ); + } + run_hotspots_attribution(self, &turns, &pricing, opts.group_by, &q) + } +} + +pub fn hotspots(opts: HotspotsOptions) -> Result { + let handle = open_with(opts.ledger_home.as_deref())?; + handle.hotspots(HotspotsOptions { + ledger_home: None, + ..opts + }) +} + +fn run_hotspots_attribution( + handle: &LedgerHandle, + turns: &[TurnRecord], + pricing: &PricingTable, + group_by: Option, + q: &Query, +) -> Result { + let mut eligible: Vec = Vec::new(); + let mut excluded: Vec = Vec::new(); + for t in turns { + if turn_passes_hotspots_coverage(t) { + eligible.push(t.clone()); + } else { + excluded.push(t.clone()); + } + } + let fidelity_summary = summarize_fidelity(turns); + let summary_value = fidelity_summary_to_value(&fidelity_summary); + + if !turns.is_empty() && eligible.is_empty() { + let refusal = format!( + "{}/{} turns lack tool-call/tool-result coverage required for hotspots attribution", + turns.len(), + turns.len() + ); + let group = group_by.unwrap_or(HotspotsGroupBy::Attribution); + return Ok(refused_for_group( + group, + refusal, + turns.len() as u64, + summary_value, + )); + } + + let session_ids: HashSet = eligible.iter().map(|t| t.session_id.clone()).collect(); + let side_q = Query { + session_id: q.session_id.clone(), + since: q.since.clone(), + ..Default::default() + }; + let user_turns_by_session = bucket_user_turns_by_session(handle, &side_q, Some(&session_ids))?; + + let result = attribute_hotspots( + &eligible, + &AnalyzeHotspotsOptions { + pricing, + content_by_session: None, + user_turns_by_session: Some(&user_turns_by_session), + }, + ); + + let group = group_by.unwrap_or(HotspotsGroupBy::Attribution); + match group { + HotspotsGroupBy::Bash => { + return Ok(HotspotsResult::Bash { + rows: aggregate_by_bash(&result.attributions), + refused: None, + refusal_reason: None, + }); + } + HotspotsGroupBy::BashVerb => { + return Ok(HotspotsResult::BashVerb { + rows: aggregate_by_bash_verb(&result.attributions, parse_bash_verb), + refused: None, + refusal_reason: None, + }); + } + HotspotsGroupBy::File => { + return Ok(HotspotsResult::File { + rows: aggregate_by_file(&result.attributions), + refused: None, + refusal_reason: None, + }); + } + HotspotsGroupBy::Subagent => { + return Ok(HotspotsResult::Subagent { + rows: aggregate_by_subagent(&result.attributions), + refused: None, + refusal_reason: None, + }); + } + HotspotsGroupBy::Attribution => {} + } + + let files = aggregate_by_file(&result.attributions); + let bash_verbs = aggregate_by_bash_verb(&result.attributions, parse_bash_verb); + let bash = aggregate_by_bash(&result.attributions); + let subagents = aggregate_by_subagent(&result.attributions); + let even_split: usize = result + .session_totals + .iter() + .filter(|s| matches!(s.attribution_method, AttributionMethod::EvenSplit)) + .count(); + let degraded = !result.session_totals.is_empty() + && (even_split as f64 / result.session_totals.len() as f64) >= 0.5; + + let sessions = result + .session_totals + .into_iter() + .map(|s| HotspotsSessionTotal { + session_id: s.session_id, + grand_cost: s.grand_cost, + attributed_cost: s.attributed_cost, + unattributed_cost: s.unattributed_cost, + attribution_method: s.attribution_method, + }) + .collect(); + + Ok(HotspotsResult::Attribution(Box::new( + HotspotsAttributionResult { + turns_analyzed: eligible.len() as u64, + grand_total: result.grand_total, + attributed_total: result.attributed_total, + unattributed_total: result.unattributed_total, + attribution_degraded: degraded, + sessions, + files, + bash_verbs, + bash, + subagents, + fidelity: HotspotsFidelityBlock { + analyzed: eligible.len() as u64, + excluded: excluded.len() as u64, + summary: summary_value, + refused: false, + }, + refused: None, + refusal_reason: None, + }, + ))) +} + +fn refused_for_group( + group: HotspotsGroupBy, + refusal: String, + excluded_total: u64, + summary_value: serde_json::Value, +) -> HotspotsResult { + match group { + HotspotsGroupBy::Bash => HotspotsResult::Bash { + rows: Vec::new(), + refused: Some(true), + refusal_reason: Some(refusal), + }, + HotspotsGroupBy::BashVerb => HotspotsResult::BashVerb { + rows: Vec::new(), + refused: Some(true), + refusal_reason: Some(refusal), + }, + HotspotsGroupBy::File => HotspotsResult::File { + rows: Vec::new(), + refused: Some(true), + refusal_reason: Some(refusal), + }, + HotspotsGroupBy::Subagent => HotspotsResult::Subagent { + rows: Vec::new(), + refused: Some(true), + refusal_reason: Some(refusal), + }, + HotspotsGroupBy::Attribution => HotspotsResult::Attribution(Box::new( + HotspotsAttributionResult { + turns_analyzed: 0, + grand_total: 0.0, + attributed_total: 0.0, + unattributed_total: 0.0, + attribution_degraded: false, + sessions: Vec::new(), + files: Vec::new(), + bash_verbs: Vec::new(), + bash: Vec::new(), + subagents: Vec::new(), + fidelity: HotspotsFidelityBlock { + analyzed: 0, + excluded: excluded_total, + summary: summary_value, + refused: true, + }, + refused: Some(true), + refusal_reason: Some(refusal), + }, + )), + } +} + +fn parse_bash_verb(command: &str) -> Option { + parse_bash_command(command) +} + +fn run_hotspots_findings( + handle: &LedgerHandle, + turns: &[TurnRecord], + pricing: &PricingTable, + wanted: Vec, + q: &Query, +) -> Result { + let wanted_set: HashSet = wanted.into_iter().collect(); + let mut findings: Vec = Vec::new(); + + let side_q = Query { + session_id: q.session_id.clone(), + since: q.since.clone(), + ..Default::default() + }; + + let user_turns_all: Vec = handle.inner.query_user_turns(&side_q)?; + let mut user_turns_by_session: HashMap> = HashMap::new(); + for ut in &user_turns_all { + user_turns_by_session + .entry(ut.session_id.clone()) + .or_default() + .push(ut.clone()); + } + + let detected = detect_patterns( + turns, + &DetectPatternsOptions { + pricing, + compactions: None, + user_turns_by_session: Some(&user_turns_by_session), + content_by_session: None, + tool_result_events: None, + }, + ); + for f in findings_from_patterns(&detected) { + if wanted_set.contains(&f.kind) { + findings.push(f); + } + } + + if wanted_set.contains("tool-output-bloat") { + let mut settings: Vec = Vec::new(); + if let Some(s) = load_claude_settings(user_claude_settings_path()) { + settings.push(s); + } + let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")); + if let Some(s) = load_claude_settings(project_claude_settings_path(&cwd)) { + settings.push(s); + } + let tool_result_events = handle.inner.query_tool_result_events(&side_q)?; + let bloats = detect_tool_output_bloat(&DetectToolOutputBloatOptions { + settings: &settings, + tool_result_events: &tool_result_events, + user_turns: &user_turns_all, + turns, + pricing, + threshold: None, + min_occurrences: None, + }); + for b in bloats { + findings.push(tool_output_bloat_to_finding(&b)); + } + } + + // ghost-surface omitted: its TS sibling drives an async pipeline of + // filesystem mining + synthetic-prompt deduction that goes well beyond + // the ledger surface. Defer to a follow-up SDK PR. + + if wanted_set.contains("tool-call-pattern") { + let patterns = detect_tool_call_patterns(turns, &DetectToolCallPatternsOptions { pricing }); + for p in patterns { + findings.push(tool_call_pattern_to_finding(&p)); + } + } + + Ok(HotspotsResult::Findings { + findings, + summary: fidelity_summary_to_value(&summarize_fidelity(turns)), + }) +} + +fn fidelity_summary_to_value(s: &FidelitySummary) -> serde_json::Value { + // Mirror the TS shape: { total, byClass, byGranularity, missingCoverage, + // unknown }. The analyze type doesn't derive Serialize so build it here. + let by_class: serde_json::Map = s + .by_class + .iter() + .map(|(k, v)| { + let key = serde_json::to_value(k) + .ok() + .and_then(|x| x.as_str().map(str::to_string)) + .unwrap_or_default(); + (key, serde_json::Value::from(*v)) + }) + .collect(); + let by_granularity: serde_json::Map = s + .by_granularity + .iter() + .map(|(k, v)| { + let key = serde_json::to_value(k) + .ok() + .and_then(|x| x.as_str().map(str::to_string)) + .unwrap_or_default(); + (key, serde_json::Value::from(*v)) + }) + .collect(); + let missing: serde_json::Map = s + .missing_coverage + .iter() + .map(|(k, v)| ((*k).to_string(), serde_json::Value::from(*v))) + .collect(); + serde_json::json!({ + "total": s.total, + "byClass": serde_json::Value::Object(by_class), + "byGranularity": serde_json::Value::Object(by_granularity), + "missingCoverage": serde_json::Value::Object(missing), + "unknown": s.unknown, + }) +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use relayburn_reader::{ToolCall, Usage}; + use tempfile::TempDir; + + fn fixture_handle() -> (TempDir, LedgerHandle) { + let dir = tempfile::tempdir().unwrap(); + let opts = LedgerOpenOptions::with_home(dir.path()); + let mut handle = Ledger::open(opts).expect("open ledger"); + + let turn1 = TurnRecord { + v: 1, + source: SourceKind::ClaudeCode, + session_id: "sess-a".into(), + session_path: None, + message_id: "m-1".into(), + turn_index: 0, + ts: "2026-04-23T00:00:00.000Z".into(), + model: "claude-sonnet-4-6".into(), + project: Some("/tmp/proj".into()), + project_key: None, + usage: Usage { + input: 1000, + output: 500, + reasoning: 0, + cache_read: 0, + cache_create_5m: 0, + cache_create_1h: 0, + }, + tool_calls: vec![ToolCall { + id: "tu-1".into(), + name: "Read".into(), + target: Some("/tmp/proj/foo.rs".into()), + args_hash: "h1".into(), + is_error: None, + edit_pre_hash: None, + edit_post_hash: None, + skill_name: None, + replaced_tools: None, + collapsed_calls: None, + }], + files_touched: None, + subagent: None, + stop_reason: None, + activity: None, + retries: None, + has_edits: None, + fidelity: None, + }; + let turn2 = TurnRecord { + v: 1, + source: SourceKind::ClaudeCode, + session_id: "sess-a".into(), + session_path: None, + message_id: "m-2".into(), + turn_index: 1, + ts: "2026-04-23T00:01:00.000Z".into(), + model: "claude-sonnet-4-6".into(), + project: Some("/tmp/proj".into()), + project_key: None, + usage: Usage { + input: 800, + output: 400, + reasoning: 0, + cache_read: 200, + cache_create_5m: 0, + cache_create_1h: 0, + }, + tool_calls: vec![ToolCall { + id: "tu-2".into(), + name: "Read".into(), + target: Some("/tmp/proj/foo.rs".into()), + args_hash: "h1".into(), + is_error: None, + edit_pre_hash: None, + edit_post_hash: None, + skill_name: None, + replaced_tools: None, + collapsed_calls: None, + }], + files_touched: None, + subagent: None, + stop_reason: None, + activity: None, + retries: None, + has_edits: None, + fidelity: None, + }; + handle + .raw_mut() + .append_turns(&[turn1, turn2]) + .expect("append turns"); + (dir, handle) + } + + #[test] + fn normalize_since_accepts_relative_ranges() { + let v = normalize_since(Some("7d")).unwrap().unwrap(); + assert_eq!(v.len(), 20); + assert!(v.ends_with('Z')); + } + + #[test] + fn normalize_since_passes_iso_through() { + let iso = "2026-04-01T00:00:00Z"; + assert_eq!(normalize_since(Some(iso)).unwrap().as_deref(), Some(iso)); + } + + #[test] + fn normalize_since_rejects_garbage() { + assert!(normalize_since(Some("zzz")).is_err()); + } + + #[test] + fn normalize_since_returns_none_for_empty() { + assert!(normalize_since(None).unwrap().is_none()); + assert!(normalize_since(Some("")).unwrap().is_none()); + } + + #[test] + fn summary_aggregates_two_turns() { + let (_dir, handle) = fixture_handle(); + let s = handle.summary(SummaryOptions::default()).unwrap(); + assert_eq!(s.turn_count, 2); + assert_eq!(s.total_tokens, 1000 + 500 + 800 + 400 + 200); + assert_eq!(s.by_model.len(), 1); + assert_eq!(s.by_model[0].model, "claude-sonnet-4-6"); + assert_eq!(s.by_tool.len(), 1); + assert_eq!(s.by_tool[0].tool, "Read"); + assert_eq!(s.by_tool[0].count, 2); + assert!(s.total_cost > 0.0); + } + + #[test] + fn summary_session_filter_narrows_to_session() { + let (_dir, handle) = fixture_handle(); + let s = handle + .summary(SummaryOptions { + session: Some("nope".into()), + ..SummaryOptions::default() + }) + .unwrap(); + assert_eq!(s.turn_count, 0); + assert_eq!(s.total_tokens, 0); + } + + #[test] + fn session_cost_returns_note_when_session_missing() { + let (_dir, handle) = fixture_handle(); + let r = handle.session_cost(SessionCostOptions::default()).unwrap(); + assert!(r.session_id.is_none()); + assert_eq!(r.note.as_deref(), Some("no session id provided")); + assert_eq!(r.turn_count, 0); + } + + #[test] + fn session_cost_aggregates_turns_for_known_session() { + let (_dir, handle) = fixture_handle(); + let r = handle + .session_cost(SessionCostOptions { + session: Some("sess-a".into()), + ..SessionCostOptions::default() + }) + .unwrap(); + assert_eq!(r.session_id.as_deref(), Some("sess-a")); + assert_eq!(r.turn_count, 2); + assert_eq!(r.models, vec!["claude-sonnet-4-6".to_string()]); + assert!(r.total_usd > 0.0); + assert!(r.note.is_none()); + } + + #[test] + fn session_cost_known_session_with_no_turns_emits_note() { + let (_dir, handle) = fixture_handle(); + let r = handle + .session_cost(SessionCostOptions { + session: Some("ghost".into()), + ..SessionCostOptions::default() + }) + .unwrap(); + assert_eq!(r.session_id.as_deref(), Some("ghost")); + assert_eq!(r.turn_count, 0); + assert_eq!( + r.note.as_deref(), + Some("no turns recorded for this session yet") + ); + } + + #[test] + fn overhead_returns_empty_when_no_files_present() { + let (_dir, handle) = fixture_handle(); + let project = tempfile::tempdir().unwrap(); + let r = handle + .overhead(OverheadOptions { + project: Some(project.path().to_path_buf()), + ..OverheadOptions::default() + }) + .unwrap(); + assert!(r.files.is_empty()); + assert!(r.per_file.is_empty()); + assert_eq!(r.grand_total, 0.0); + } + + #[test] + fn overhead_attributes_when_claude_md_present() { + let (_dir, handle) = fixture_handle(); + let project = tempfile::tempdir().unwrap(); + let body = format!("## Section\n{}", "x".repeat(800)); + std::fs::write(project.path().join("CLAUDE.md"), &body).unwrap(); + let r = handle + .overhead(OverheadOptions { + project: Some(project.path().to_path_buf()), + ..OverheadOptions::default() + }) + .unwrap(); + assert_eq!(r.files.len(), 1); + assert_eq!(r.per_file.len(), 1); + assert_eq!(r.files[0].kind, OverheadFileKind::ClaudeMd); + } + + #[test] + fn overhead_trim_emits_summary_when_claude_md_present() { + let (_dir, handle) = fixture_handle(); + let project = tempfile::tempdir().unwrap(); + let body = format!( + "## Big\n{}\n\n## Small\n{}\n", + "y".repeat(8000), + "z".repeat(200) + ); + std::fs::write(project.path().join("CLAUDE.md"), &body).unwrap(); + let r = handle + .overhead_trim(OverheadTrimOptions { + project: Some(project.path().to_path_buf()), + top: Some(1), + ..OverheadTrimOptions::default() + }) + .unwrap(); + // The fixture's turns have cache_read=0/200 — well below this + // CLAUDE.md's token count — so attribution sees no rides and total + // cost is 0. `build_trim_recommendations` still emits a top-N row + // per non-preamble section, with projected savings = 0; that's the + // contract. With `top=1` and two H2 sections in the file, we get + // a single recommendation. + assert_eq!(r.summary.files_analyzed, 1); + assert_eq!(r.recommendations.len(), 1); + assert_eq!(r.recommendations[0].projected_savings.per_session_usd, 0.0); + assert!(r.recommendations[0].diff.is_some()); + assert_eq!(r.since, "all time"); + } + + #[test] + fn hotspots_returns_attribution_shape_by_default() { + let (_dir, handle) = fixture_handle(); + let r = handle.hotspots(HotspotsOptions::default()).unwrap(); + match r { + HotspotsResult::Attribution(a) => { + // Our turns lack `fidelity` (None), so the coverage gate + // passes — both turns are eligible. + assert_eq!(a.turns_analyzed, 2); + assert!(a.grand_total >= 0.0); + assert_eq!(a.fidelity.analyzed, 2); + assert_eq!(a.fidelity.excluded, 0); + } + other => panic!("expected attribution, got {other:?}"), + } + } + + #[test] + fn hotspots_group_by_file_returns_file_kind() { + let (_dir, handle) = fixture_handle(); + let r = handle + .hotspots(HotspotsOptions { + group_by: Some(HotspotsGroupBy::File), + ..HotspotsOptions::default() + }) + .unwrap(); + match r { + HotspotsResult::File { rows, refused, .. } => { + assert!(refused.is_none()); + // Two `Read` calls on /tmp/proj/foo.rs collapse into 1 row. + assert!(rows.len() <= 1); + } + other => panic!("expected file, got {other:?}"), + } + } + + #[test] + fn hotspots_with_patterns_returns_findings_kind() { + let (_dir, handle) = fixture_handle(); + let r = handle + .hotspots(HotspotsOptions { + patterns: Some(vec!["retry-loop".into()]), + ..HotspotsOptions::default() + }) + .unwrap(); + match r { + HotspotsResult::Findings { findings, summary } => { + // No retries in fixture, so findings is empty — but the + // kind:findings shape and summary block should still ship. + assert!(findings.is_empty()); + assert!(summary.is_object()); + } + other => panic!("expected findings, got {other:?}"), + } + } + + #[test] + fn free_function_summary_round_trips_through_ledger_home() { + let dir = tempfile::tempdir().unwrap(); + { + let mut handle = Ledger::open(LedgerOpenOptions::with_home(dir.path())).unwrap(); + let t = TurnRecord { + v: 1, + source: SourceKind::ClaudeCode, + session_id: "x".into(), + session_path: None, + message_id: "m".into(), + turn_index: 0, + ts: "2026-04-23T00:00:00.000Z".into(), + model: "claude-sonnet-4-6".into(), + project: None, + project_key: None, + usage: Usage { + input: 100, + output: 50, + reasoning: 0, + cache_read: 0, + cache_create_5m: 0, + cache_create_1h: 0, + }, + tool_calls: Vec::new(), + files_touched: None, + subagent: None, + stop_reason: None, + activity: None, + retries: None, + has_edits: None, + fidelity: None, + }; + handle.raw_mut().append_turns(&[t]).unwrap(); + } + let s = summary(SummaryOptions { + ledger_home: Some(dir.path().to_path_buf()), + ..SummaryOptions::default() + }) + .unwrap(); + assert_eq!(s.turn_count, 1); + assert_eq!(s.total_tokens, 150); + } +}