diff --git a/Cargo.toml b/Cargo.toml index 3f5d72a..93ec785 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,6 +39,7 @@ axum = "0.7" clap = { version = "4", features = ["derive"] } ignore = "0.4" libloading = "0.8" +notify = "8" regex = "1" serde = { version = "1", features = ["derive"] } serde_json = "1" diff --git a/src/cli.rs b/src/cli.rs index a89a61a..a054472 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -32,12 +32,11 @@ use crate::{ use anyhow::{anyhow, Context, Result}; use clap::{Parser, Subcommand}; use serde_json::{json, Value}; -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeSet; use std::fs; use std::path::{Path, PathBuf}; use std::process; -use std::thread; -use std::time::{Duration, SystemTime, UNIX_EPOCH}; +use std::time::{SystemTime, UNIX_EPOCH}; use crate::mcp; @@ -66,7 +65,8 @@ struct Args { #[arg(long, value_name = "PATH")] sarif: Option, - /// Watch mode: rescan on a fixed interval and reprint. + /// Watch mode: rescan and reprint when files change. Filesystem-driven + /// (no polling); ignores changes inside target / .git / node_modules / .raysense. #[arg(long)] watch: bool, @@ -74,10 +74,6 @@ struct Args { #[arg(long, value_name = "PORT", num_args = 0..=1, default_missing_value = "7000")] ui: Option, - /// Re-scan interval in seconds (used by `--watch` and `--ui`). - #[arg(long, default_value_t = 2)] - interval: u64, - /// Run as a stdio MCP server. Path is ignored. #[arg(long)] mcp: bool, @@ -287,10 +283,10 @@ pub fn run() -> Result<()> { return mcp::run(); } if let Some(port) = args.ui { - return serve_visualization(&args.path, args.config.as_deref(), args.interval, port); + return serve_visualization(&args.path, args.config.as_deref(), port); } if args.watch { - return watch_project(&args.path, args.config.as_deref(), args.interval); + return watch_project(&args.path, args.config.as_deref()); } if args.check { let exit = check_project( @@ -612,25 +608,89 @@ fn sarif_uri(root: &Path, path: &str) -> String { } } -fn watch_project(root: &Path, config_path: Option<&Path>, interval: u64) -> Result<()> { +fn watch_project(root: &Path, config_path: Option<&Path>) -> Result<()> { let mut last_snapshot = String::new(); - loop { + let mut emit = || -> Result<()> { let config = config_for_root(root, config_path)?; let report = scan_path_with_config(root, &config)?; let health = compute_health_with_config(&report, &config); if report.snapshot.snapshot_id != last_snapshot { println!( - "snapshot {} quality_signal={} score={} files={} rules={}", + "snapshot {} score={} files={} rules={}", report.snapshot.snapshot_id, - health.quality_signal, health.score, report.snapshot.file_count, health.rules.len() ); last_snapshot = report.snapshot.snapshot_id; } - thread::sleep(Duration::from_secs(interval.max(1))); + Ok(()) + }; + emit()?; + watch_paths_blocking(root, emit) +} + +/// Watch a project root with `notify`, debounce events that fall in +/// always-ignored directories (target, .git, node_modules, .raysense), +/// and call `on_change` once per debounced burst (~150 ms window). +fn watch_paths_blocking(root: &Path, mut on_change: F) -> Result<()> +where + F: FnMut() -> Result<()>, +{ + use notify::{RecursiveMode, Watcher}; + use std::sync::mpsc; + let (tx, rx) = mpsc::channel::<()>(); + let mut watcher = notify::recommended_watcher(move |res: notify::Result| { + if let Ok(event) = res { + if relevant_event(&event) { + let _ = tx.send(()); + } + } + }) + .context("failed to start filesystem watcher")?; + watcher + .watch(root, RecursiveMode::Recursive) + .with_context(|| format!("failed to watch {}", root.display()))?; + loop { + // wait for at least one event + if rx.recv().is_err() { + break; + } + // drain rapid bursts + let deadline = std::time::Instant::now() + std::time::Duration::from_millis(150); + loop { + let now = std::time::Instant::now(); + if now >= deadline { + break; + } + match rx.recv_timeout(deadline - now) { + Ok(()) => continue, + Err(_) => break, + } + } + on_change()?; } + Ok(()) +} + +fn relevant_event(event: ¬ify::Event) -> bool { + use notify::EventKind; + if !matches!( + event.kind, + EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) + ) { + return false; + } + event.paths.iter().any(|p| !is_ignored_event_path(p)) +} + +fn is_ignored_event_path(path: &Path) -> bool { + path.components().any(|c| { + matches!( + c.as_os_str().to_str(), + Some("target" | ".git" | "node_modules" | ".raysense") + ) + }) } /// Run a tokio HTTP server that hosts the live visualization. The server @@ -639,15 +699,9 @@ fn watch_project(root: &Path, config_path: Option<&Path>, interval: u64) -> Resu /// the HTML page without any meta-refresh. Browsers connected to `/events` /// reload the page on each change; other state (filter selections, scroll, /// expanded panels) survives whenever data didn't actually change. -fn serve_visualization( - root: &Path, - config_path: Option<&Path>, - interval: u64, - port: u16, -) -> Result<()> { +fn serve_visualization(root: &Path, config_path: Option<&Path>, port: u16) -> Result<()> { let root = root.to_path_buf(); let config_path = config_path.map(Path::to_path_buf); - let interval = interval.max(1); let runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() @@ -672,15 +726,48 @@ fn serve_visualization( tx: broadcast::channel::<()>(16).0, }); + // Bridge filesystem events into a tokio mpsc; the watcher's callback + // runs on a private notify thread (sync), and we drain into the + // async runtime for debouncing + rescan. + let (fs_tx, mut fs_rx) = tokio::sync::mpsc::unbounded_channel::<()>(); + let watcher_root = root.clone(); + let _watcher_keepalive = tokio::task::spawn_blocking(move || { + use notify::{RecursiveMode, Watcher}; + let mut watcher = + match notify::recommended_watcher(move |res: notify::Result| { + if let Ok(event) = res { + if relevant_event(&event) { + let _ = fs_tx.send(()); + } + } + }) { + Ok(w) => w, + Err(err) => { + eprintln!("filesystem watcher init failed: {err}"); + return; + } + }; + if let Err(err) = watcher.watch(&watcher_root, RecursiveMode::Recursive) { + eprintln!("filesystem watcher attach failed: {err}"); + return; + } + // Park here forever; dropping the watcher would stop events. + std::thread::park(); + }); + let scanner_state = state.clone(); let scanner_root = root.clone(); let scanner_config = config_path.clone(); tokio::spawn(async move { - let mut ticker = tokio::time::interval(std::time::Duration::from_secs(interval)); - ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); - ticker.tick().await; // first tick fires immediately; we already scanned. - loop { - ticker.tick().await; + let debounce = std::time::Duration::from_millis(150); + while let Some(()) = fs_rx.recv().await { + // drain rapid bursts + loop { + match tokio::time::timeout(debounce, fs_rx.recv()).await { + Ok(Some(())) => continue, + _ => break, + } + } let scan = match tokio::task::spawn_blocking({ let root = scanner_root.clone(); let cfg = scanner_config.clone(); @@ -743,11 +830,7 @@ fn serve_visualization( let listener = tokio::net::TcpListener::bind(addr) .await .with_context(|| format!("failed to bind {addr}"))?; - println!( - "visualization http://{addr} interval={interval}s — Ctrl+C to stop", - addr = addr, - interval = interval, - ); + println!("visualization http://{addr} (filesystem watcher; Ctrl+C to stop)"); axum::serve(listener, app) .with_graceful_shutdown(async { @@ -933,54 +1016,145 @@ pub(crate) fn visualization_html( ) .unwrap_or_else(|_| "[]".to_string()); - let cells = report - .files + let _ = max_lines; + let author_by_path: std::collections::HashMap<&str, &str> = health + .metrics + .evolution + .file_ownership .iter() - .map(|file| { - let width = ((file.lines as f64 / max_lines as f64) * 100.0).max(8.0); - let path = file.path.to_string_lossy(); - let churn = churn_by_path.get(path.as_ref()).copied().unwrap_or(0); - let age = age_by_path.get(path.as_ref()).copied().unwrap_or(0); - let risk = risk_by_path.get(path.as_ref()).copied().unwrap_or(0); - let instability = instability_by_module - .get(file.module.as_str()) - .copied() - .unwrap_or(0.0); - let directory = directory_for(path.as_ref()); - let is_entry = if entry_point_files.contains(&file.file_id) { 1 } else { 0 }; - format!( - "
{}{} lines{}
", - html_escape(&path), - file.lines, - html_escape(&file.language_name), - churn, - age, - risk, - instability, - html_escape(&directory), - is_entry, - html_escape(&path), - file.lines, - html_escape(&file.language_name) - ) - }) - .collect::>() - .join(""); - let modules = health + .map(|o| (o.path.as_str(), o.top_author.as_str())) + .collect(); + let bus_by_path: std::collections::HashMap<&str, usize> = health .metrics - .dsm - .top_module_edges + .evolution + .file_ownership .iter() - .map(|edge| { - format!( - "{}{}{}", - html_escape(&edge.from_module), - html_escape(&edge.to_module), - edge.edges - ) - }) - .collect::>() - .join(""); + .map(|o| (o.path.as_str(), o.bus_factor)) + .collect(); + let test_gap_paths: std::collections::HashSet<&str> = health + .metrics + .test_gap + .candidates + .iter() + .map(|c| c.path.as_str()) + .collect(); + let cycle_index_by_path: std::collections::HashMap = health + .metrics + .architecture + .cycles + .iter() + .enumerate() + .flat_map(|(idx, files)| files.iter().map(move |f| (f.clone(), idx))) + .collect(); + let files_json = serde_json::to_string( + &report + .files + .iter() + .map(|file| { + let path = file.path.to_string_lossy().into_owned(); + let churn = churn_by_path.get(path.as_str()).copied().unwrap_or(0); + let age = age_by_path.get(path.as_str()).copied().unwrap_or(0); + let risk = risk_by_path.get(path.as_str()).copied().unwrap_or(0); + let instability = instability_by_module + .get(file.module.as_str()) + .copied() + .unwrap_or(0.0); + let directory = directory_for(path.as_str()); + let is_entry = entry_point_files.contains(&file.file_id); + let author = author_by_path + .get(path.as_str()) + .copied() + .unwrap_or("") + .to_string(); + let bus = bus_by_path.get(path.as_str()).copied().unwrap_or(0); + let in_test_gap = test_gap_paths.contains(path.as_str()); + let cycle = cycle_index_by_path.get(path.as_str()).copied(); + serde_json::json!({ + "path": path, + "lines": file.lines, + "language": file.language_name, + "churn": churn, + "age": age, + "risk": risk, + "instability": instability, + "directory": directory, + "entry": is_entry, + "author": author, + "bus": bus, + "test_gap": in_test_gap, + "cycle": cycle, + }) + }) + .collect::>(), + ) + .unwrap_or_else(|_| "[]".to_string()); + + let cycles_json = serde_json::to_string(&health.metrics.architecture.cycles) + .unwrap_or_else(|_| "[]".to_string()); + let change_coupling_json = serde_json::to_string(&health.metrics.evolution.change_coupling) + .unwrap_or_else(|_| "[]".to_string()); + let distance_metrics_json = serde_json::to_string( + &health + .metrics + .architecture + .distance_metrics + .iter() + .map(|m| { + serde_json::json!({ + "module": m.module, + "instability": m.instability, + "abstractness": m.abstractness, + "distance": m.distance, + "is_foundation": m.is_foundation, + }) + }) + .collect::>(), + ) + .unwrap_or_else(|_| "[]".to_string()); + let dsm_json = serde_json::to_string(&health.metrics.dsm.top_module_edges) + .unwrap_or_else(|_| "[]".to_string()); + let trend_json = read_trend_samples(&report.snapshot.root) + .map(|s| serde_json::to_string(&s).unwrap_or_else(|_| "[]".to_string())) + .unwrap_or_else(|| "[]".to_string()); + let functions_json = { + use std::collections::HashMap; + // group functions by file with their cyclomatic complexity from the + // health complexity table; each entry is { path, functions: [...] } + let mut complexity_by_function: HashMap = + HashMap::new(); + for fc in &health.metrics.complexity.all_functions { + complexity_by_function.insert(fc.function_id, fc); + } + let mut grouped: HashMap> = HashMap::new(); + for func in &report.functions { + let lines = func.end_line.saturating_sub(func.start_line) + 1; + let value = complexity_by_function + .get(&func.function_id) + .map(|fc| fc.value) + .unwrap_or(0); + grouped + .entry(func.file_id) + .or_default() + .push(serde_json::json!({ + "name": func.name, + "lines": lines, + "value": value, + })); + } + let entries: Vec = report + .files + .iter() + .filter_map(|file| { + grouped.get(&file.file_id).map(|fns| { + serde_json::json!({ + "path": file.path.to_string_lossy(), + "functions": fns, + }) + }) + }) + .collect(); + serde_json::to_string(&entries).unwrap_or_else(|_| "[]".to_string()) + }; let complex = health .metrics .complexity @@ -1042,65 +1216,37 @@ pub(crate) fn visualization_html( .collect::>() .join(""); - let mut module_names = BTreeSet::new(); - for module in &health.metrics.architecture.unstable_modules { - if !module.module.is_empty() { - module_names.insert(module.module.clone()); - } - } - for edge in &health.metrics.dsm.top_module_edges { - if !edge.from_module.is_empty() { - module_names.insert(edge.from_module.clone()); - } - if !edge.to_module.is_empty() { - module_names.insert(edge.to_module.clone()); - } - } - let module_names = module_names.into_iter().take(16).collect::>(); - let stability_by_module = health + // Module edges and instability are now surfaced in the left panel as + // text rows; the central viz is a treemap, not a node-link diagram. + let unstable_modules = health .metrics .architecture .unstable_modules .iter() - .map(|module| (module.module.clone(), module.instability)) - .collect::>(); - let module_positions = module_names - .iter() - .enumerate() - .map(|(idx, module)| { - let x = 80 + (idx % 4) * 190; - let y = 70 + (idx / 4) * 70; - (module.clone(), (x, y)) + .take(8) + .map(|m| { + format!( + "{}{:.3}{}{}", + html_escape(&m.module), + m.instability, + m.fan_in, + m.fan_out, + ) }) - .collect::>(); - let module_edges = health + .collect::>() + .join(""); + let module_edges_rows = health .metrics .dsm .top_module_edges .iter() - .filter_map(|edge| { - let (x1, y1) = module_positions.get(&edge.from_module)?; - let (x2, y2) = module_positions.get(&edge.to_module)?; - let width = edge.edges.min(8).max(1); - Some(format!( - "" - )) - }) - .collect::>() - .join(""); - let module_nodes = module_names - .iter() - .map(|module| { - let (x, y) = module_positions[module]; - let instability = stability_by_module.get(module).copied().unwrap_or(0.0); - let radius = 22 + (instability * 18.0).round() as usize; - let label = compact_label(module, 24); + .take(8) + .map(|edge| { format!( - "{}{} instability {:.3}", - html_escape(&label), - html_escape(module), - instability, - text_y = y + radius + 16 + "{}{}{}", + html_escape(&edge.from_module), + html_escape(&edge.to_module), + edge.edges, ) }) .collect::>() @@ -1120,349 +1266,790 @@ pub(crate) fn visualization_html( "hotspots": health.hotspots, })) .unwrap_or_else(|_| "{}".to_string()); + let project_name = report + .snapshot + .root + .canonicalize() + .ok() + .as_deref() + .and_then(|p| p.file_name()) + .or_else(|| report.snapshot.root.file_name()) + .map(|s| s.to_string_lossy().into_owned()) + .filter(|s| !s.is_empty()) + .unwrap_or_else(|| report.snapshot.root.to_string_lossy().into_owned()); + let arch = &health.metrics.architecture; + let evo = &health.metrics.evolution; + let cycles = arch.cycles.len(); + let upward = arch.upward_violations.len(); + let max_blast = arch.max_blast_radius; + let attack_pct = arch.attack_surface_ratio * 100.0; + let commits = evo.commits_sampled; + let authors = evo.author_count; + let changed = evo.changed_files; format!( r#" Raysense -
-
{}quality signal
-
{}score
-
{}coverage
-
{}structure
-
{}files
-
{}functions
-
{}rules
-
{:.3}modularity
-
{:.3}redundancy
-
-

Files

-
- - - - +
+
+

raysense {}

+
+ + - - - + + + +
+
+
+
+ - + + + + + + + "#, - health.quality_signal, + html_escape(&project_name), + (health.score as f64 * 1.3).round() as u32, + health.score, health.score, + (health.score as f64 * 1.3).round() as u32, health.coverage_score, health.structural_score, report.files.len(), report.functions.len(), health.rules.len(), - health.root_causes.modularity, - health.root_causes.modularity * 100.0, - health.root_causes.redundancy, - health.root_causes.redundancy * 100.0, - cells, - module_edges, - module_nodes, - modules, + (health.root_causes.modularity * 100.0).round() as u32, + html_escape(&health.grades.modularity), + (health.root_causes.acyclicity * 100.0).round() as u32, + html_escape(&health.grades.acyclicity), + (health.root_causes.depth * 100.0).round() as u32, + html_escape(&health.grades.depth), + (health.root_causes.equality * 100.0).round() as u32, + html_escape(&health.grades.equality), + (health.root_causes.redundancy * 100.0).round() as u32, + html_escape(&health.grades.redundancy), + (health.root_causes.structural_uniformity * 100.0).round() as u32, + html_escape(&health.grades.structural_uniformity), + cycles, + max_blast, + attack_pct, + upward, + commits, + authors, + changed, + unstable_modules, + module_edges_rows, hotspots, rules, complex, gaps, + json_script_escape(&files_json), + json_script_escape(&adjacency_json), json_script_escape(&telemetry), - json_script_escape(&adjacency_json) + json_script_escape(&cycles_json), + json_script_escape(&change_coupling_json), + json_script_escape(&distance_metrics_json), + json_script_escape(&dsm_json), + json_script_escape(&trend_json), + json_script_escape(&functions_json), ) } @@ -1514,23 +2124,17 @@ fn json_script_escape(value: &str) -> String { .replace('&', "\\u0026") } -fn compact_label(value: &str, max_chars: usize) -> String { - if value.chars().count() <= max_chars { - return value.to_string(); - } - let tail = value - .rsplit(['/', '.']) - .find(|part| !part.is_empty()) - .unwrap_or(value); - if tail.chars().count() <= max_chars { - tail.to_string() - } else { - let prefix = tail - .chars() - .take(max_chars.saturating_sub(3)) - .collect::(); - format!("{prefix}...") - } +#[derive(serde::Deserialize, serde::Serialize)] +struct TrendPoint { + score: u8, +} + +/// Read `.raysense/trends/history.json` if it exists. The file is only +/// written by `--trend record`; absence is normal and silent. +fn read_trend_samples(root: &Path) -> Option> { + let path = root.join(".raysense/trends/history.json"); + let content = fs::read_to_string(&path).ok()?; + serde_json::from_str::>(&content).ok() } fn list_plugins(root: &Path, config_path: Option<&Path>) -> Result<()> { @@ -1951,10 +2555,9 @@ fn show_trend(root: &Path, config_path: Option<&Path>, json: bool) -> Result<()> println!("{}", serde_json::to_string_pretty(&health.metrics.trend)?); } else if health.metrics.trend.available { println!( - "trend samples={} score_delta={} quality_signal_delta={} rule_delta={}", + "trend samples={} score_delta={} rule_delta={}", health.metrics.trend.samples, health.metrics.trend.score_delta, - health.metrics.trend.quality_signal_delta, health.metrics.trend.rule_delta ); } else { @@ -2008,11 +2611,9 @@ fn print_what_if( println!("{}", serde_json::to_string_pretty(&output)?); } else { println!( - "what_if score {} -> {} quality_signal {} -> {} files {} -> {} rules {} -> {}", + "what_if score {} -> {} / 100 files {} -> {} rules {} -> {}", before_health.score, after_health.score, - before_health.quality_signal, - after_health.quality_signal, before_report.snapshot.file_count, after_report.snapshot.file_count, before_health.rules.len(), @@ -2266,10 +2867,9 @@ fn print_baseline_diff(diff: &BaselineDiff) { } fn print_health(report: &crate::ScanReport, health: &crate::HealthSummary) { - println!("score {}", health.score); - println!("quality_signal {}", health.quality_signal); - println!("coverage_score {}", health.coverage_score); - println!("structural_score {}", health.structural_score); + println!("score {} / 100", health.score); + println!("coverage {} / 100", health.coverage_score); + println!("structure {} / 100", health.structural_score); println!("root {}", report.snapshot.root.display()); println!( "facts files={} functions={} calls={} call_edges={} imports={}", @@ -2350,25 +2950,23 @@ fn print_health(report: &crate::ScanReport, health: &crate::HealthSummary) { "dsm modules={} module_edges={}", health.metrics.dsm.module_count, health.metrics.dsm.module_edges ); + let pct = |v: f64| (v * 100.0).round() as u32; println!( - "root_causes modularity={:.3} acyclicity={:.3} depth={:.3} equality={:.3} redundancy={:.3} structural_uniformity={:.3}", - health.root_causes.modularity, - health.root_causes.acyclicity, - health.root_causes.depth, - health.root_causes.equality, - health.root_causes.redundancy, - health.root_causes.structural_uniformity - ); - println!( - "grades overall={} modularity={} acyclicity={} depth={} equality={} redundancy={} structural_uniformity={}", - health.grades.overall, + "dimensions modularity={}/100 ({}) acyclicity={}/100 ({}) depth={}/100 ({}) equality={}/100 ({}) redundancy={}/100 ({}) structural_uniformity={}/100 ({})", + pct(health.root_causes.modularity), health.grades.modularity, + pct(health.root_causes.acyclicity), health.grades.acyclicity, + pct(health.root_causes.depth), health.grades.depth, + pct(health.root_causes.equality), health.grades.equality, + pct(health.root_causes.redundancy), health.grades.redundancy, - health.grades.structural_uniformity + pct(health.root_causes.structural_uniformity), + health.grades.structural_uniformity, ); + println!("overall_grade {}", health.grades.overall); println!( "architecture depth={} max_blast_radius={} max_blast_radius_file={} max_non_foundation_blast_radius={} max_non_foundation_blast_radius_file={} attack_surface_files={} attack_surface_ratio={:.3} upward_violations={} upward_violation_ratio={:.3} average_distance_from_main_sequence={:.3}", health.metrics.architecture.module_depth, @@ -2538,14 +3136,17 @@ mod tests { } #[test] - fn visualization_html_includes_color_mode_and_detail_panel() { + fn visualization_html_includes_treemap_and_panels() { let report = crate::scan_path(env!("CARGO_MANIFEST_DIR")).unwrap(); let health = crate::compute_health(&report); let html = visualization_html(&report, &health); assert!(html.contains("id=\"color-mode\"")); - assert!(html.contains("data-churn")); - assert!(html.contains("id=\"file-detail\"")); + assert!(html.contains("id=\"treemap\"")); + assert!(html.contains("id=\"raysense-files\"")); + assert!(html.contains("id=\"raysense-adjacency\"")); assert!(html.contains("id=\"raysense-telemetry\"")); + assert!(html.contains("\"churn\""), "files JSON should carry churn"); + assert!(html.contains("class=\"app\"")); } #[test]