From 0b6ee41bcba929f0a05446ccc79a36fa2790355c Mon Sep 17 00:00:00 2001 From: hwisu Date: Fri, 6 Mar 2026 15:12:48 +0900 Subject: [PATCH 01/30] refactor effect boundaries for ui and runtime entrypoints --- crates/server/src/app_config.rs | 266 +++++++ crates/server/src/main.rs | 139 +--- desktop/src-tauri/src/app/mod.rs | 1 + desktop/src-tauri/src/app/session_query.rs | 117 ++++ desktop/src-tauri/src/main.rs | 89 +-- packages/ui/package-lock.json | 55 ++ packages/ui/package.json | 3 +- packages/ui/src/api-internal/auth-services.ts | 198 ++++++ packages/ui/src/api-internal/errors.ts | 66 ++ .../api-internal/parse-preview-services.ts | 105 +++ packages/ui/src/api-internal/requests.ts | 70 ++ packages/ui/src/api-internal/runtime.ts | 220 ++++++ .../ui/src/api-internal/session-services.ts | 274 ++++++++ packages/ui/src/api.ts | 654 +++--------------- .../src/components/SessionDetailPage.svelte | 154 +++-- .../ui/src/components/SessionListPage.svelte | 348 ++++------ packages/ui/src/index.ts | 12 + .../src/models/session-detail-model.test.ts | 116 ++++ .../ui/src/models/session-detail-model.ts | 202 ++++++ .../ui/src/models/session-list-model.test.ts | 165 +++++ packages/ui/src/models/session-list-model.ts | 322 +++++++++ .../src/models/source-preview-model.test.ts | 98 +++ .../ui/src/models/source-preview-model.ts | 244 +++++++ packages/ui/src/source-route.test.ts | 49 ++ packages/ui/src/source-route.ts | 214 ++++++ web/package-lock.json | 1 + .../src/[provider]/[...segments]/+page.svelte | 414 ++--------- 27 files changed, 3184 insertions(+), 1412 deletions(-) create mode 100644 crates/server/src/app_config.rs create mode 100644 desktop/src-tauri/src/app/mod.rs create mode 100644 desktop/src-tauri/src/app/session_query.rs create mode 100644 packages/ui/src/api-internal/auth-services.ts create mode 100644 packages/ui/src/api-internal/errors.ts create mode 100644 packages/ui/src/api-internal/parse-preview-services.ts create mode 100644 packages/ui/src/api-internal/requests.ts create mode 100644 packages/ui/src/api-internal/runtime.ts create mode 100644 packages/ui/src/api-internal/session-services.ts create mode 100644 packages/ui/src/models/session-detail-model.test.ts create mode 100644 packages/ui/src/models/session-detail-model.ts create mode 100644 packages/ui/src/models/session-list-model.test.ts create mode 100644 packages/ui/src/models/session-list-model.ts create mode 100644 packages/ui/src/models/source-preview-model.test.ts create mode 100644 packages/ui/src/models/source-preview-model.ts create mode 100644 packages/ui/src/source-route.test.ts create mode 100644 packages/ui/src/source-route.ts diff --git a/crates/server/src/app_config.rs b/crates/server/src/app_config.rs new file mode 100644 index 00000000..f5507c31 --- /dev/null +++ b/crates/server/src/app_config.rs @@ -0,0 +1,266 @@ +use std::path::PathBuf; + +use opensession_api::crypto::CredentialKeyring; +use opensession_api::oauth::{self, OAuthProviderConfig}; + +#[derive(Clone)] +pub struct AppConfig { + pub base_url: String, + pub allowed_origins: Vec, + pub oauth_use_request_host: bool, + pub jwt_secret: String, + pub admin_key: String, + pub oauth_providers: Vec, + pub public_feed_enabled: bool, + pub local_review_root: Option, + pub credential_keyring: Option, +} + +pub struct ServerBootstrap { + pub data_dir: PathBuf, + pub web_dir: PathBuf, + pub port: String, + pub config: AppConfig, +} + +pub fn load_server_bootstrap() -> ServerBootstrap { + let data_dir = std::env::var("OPENSESSION_DATA_DIR") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from("data")); + let web_dir = std::env::var("OPENSESSION_WEB_DIR") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from("web/build")); + let port = std::env::var("PORT").unwrap_or_else(|_| "3000".to_string()); + + let base_url_env = env_trimmed("BASE_URL").or_else(|| env_trimmed("OPENSESSION_BASE_URL")); + let base_url = base_url_env + .clone() + .unwrap_or_else(|| "http://localhost:3000".to_string()); + let public_feed_enabled_raw = + std::env::var(opensession_api::deploy::ENV_PUBLIC_FEED_ENABLED).ok(); + + ServerBootstrap { + data_dir, + web_dir, + port, + config: AppConfig { + base_url: base_url.clone(), + allowed_origins: load_allowed_origins(&base_url), + oauth_use_request_host: base_url_env.is_none(), + jwt_secret: env_trimmed("JWT_SECRET").unwrap_or_default(), + admin_key: env_trimmed("OPENSESSION_ADMIN_KEY").unwrap_or_default(), + oauth_providers: load_oauth_providers(), + public_feed_enabled: opensession_api::deploy::parse_bool_flag( + public_feed_enabled_raw.as_deref(), + true, + ), + local_review_root: std::env::var("OPENSESSION_LOCAL_REVIEW_ROOT") + .ok() + .map(PathBuf::from), + credential_keyring: load_credential_keyring(), + }, + } +} + +fn origin_from_base_url(raw: &str) -> Option { + let url = reqwest::Url::parse(raw).ok()?; + let host = url.host_str()?; + let mut origin = format!("{}://{host}", url.scheme()); + if let Some(port) = url.port() { + origin.push(':'); + origin.push_str(&port.to_string()); + } + Some(origin) +} + +fn load_allowed_origins(base_url: &str) -> Vec { + let configured = std::env::var("OPENSESSION_ALLOWED_ORIGINS") + .ok() + .map(|value| { + value + .split(',') + .map(str::trim) + .filter(|item| !item.is_empty()) + .map(ToOwned::to_owned) + .collect::>() + }) + .unwrap_or_default(); + if !configured.is_empty() { + return configured; + } + origin_from_base_url(base_url).into_iter().collect() +} + +fn load_oauth_providers() -> Vec { + [try_load_github(), try_load_gitlab()] + .into_iter() + .flatten() + .collect() +} + +fn env_trimmed(name: &str) -> Option { + std::env::var(name) + .ok() + .and_then(|value| oauth::normalize_oauth_config_value(&value)) +} + +fn try_load_github() -> Option { + let id = env_trimmed("GITHUB_CLIENT_ID")?; + let secret = env_trimmed("GITHUB_CLIENT_SECRET")?; + tracing::info!("OAuth provider enabled: GitHub"); + Some(oauth::github_preset(id, secret)) +} + +fn try_load_gitlab() -> Option { + let url = env_trimmed("GITLAB_URL")?; + let id = env_trimmed("GITLAB_CLIENT_ID")?; + let secret = env_trimmed("GITLAB_CLIENT_SECRET")?; + let ext_url = env_trimmed("GITLAB_EXTERNAL_URL"); + tracing::info!("OAuth provider enabled: GitLab ({})", url); + Some(oauth::gitlab_preset(url, ext_url, id, secret)) +} + +fn load_credential_keyring() -> Option { + let active = env_trimmed("OPENSESSION_CREDENTIAL_ACTIVE_KID")?; + let keyset = env_trimmed("OPENSESSION_CREDENTIAL_KEYS")?; + match CredentialKeyring::from_csv(&active, &keyset) { + Ok(keyring) => Some(keyring), + Err(err) => { + tracing::error!("invalid credential encryption config: {}", err.message()); + None + } + } +} + +#[cfg(test)] +mod tests { + use super::load_server_bootstrap; + use std::sync::{LazyLock, Mutex, MutexGuard}; + + static TEST_ENV_LOCK: LazyLock> = LazyLock::new(|| Mutex::new(())); + + struct EnvVarGuard { + key: &'static str, + previous: Option, + } + + impl EnvVarGuard { + fn set(key: &'static str, value: &str) -> Self { + let previous = std::env::var(key).ok(); + std::env::set_var(key, value); + Self { key, previous } + } + + fn clear(key: &'static str) -> Self { + let previous = std::env::var(key).ok(); + std::env::remove_var(key); + Self { key, previous } + } + } + + impl Drop for EnvVarGuard { + fn drop(&mut self) { + if let Some(previous) = self.previous.take() { + std::env::set_var(self.key, previous); + } else { + std::env::remove_var(self.key); + } + } + } + + fn lock_env() -> MutexGuard<'static, ()> { + TEST_ENV_LOCK.lock().expect("test env lock") + } + + #[test] + fn bootstrap_uses_explicit_base_url_and_allowed_origins() { + let _lock = lock_env(); + let _guards = [ + EnvVarGuard::set("BASE_URL", "https://api.example.test"), + EnvVarGuard::set("OPENSESSION_BASE_URL", "https://ignored.example.test"), + EnvVarGuard::set( + "OPENSESSION_ALLOWED_ORIGINS", + " https://app.example.test , https://ops.example.test ", + ), + EnvVarGuard::clear("OPENSESSION_DATA_DIR"), + EnvVarGuard::clear("OPENSESSION_WEB_DIR"), + EnvVarGuard::clear("PORT"), + EnvVarGuard::clear("JWT_SECRET"), + EnvVarGuard::clear("OPENSESSION_ADMIN_KEY"), + EnvVarGuard::clear(opensession_api::deploy::ENV_PUBLIC_FEED_ENABLED), + EnvVarGuard::clear("OPENSESSION_LOCAL_REVIEW_ROOT"), + EnvVarGuard::clear("OPENSESSION_CREDENTIAL_ACTIVE_KID"), + EnvVarGuard::clear("OPENSESSION_CREDENTIAL_KEYS"), + EnvVarGuard::clear("GITHUB_CLIENT_ID"), + EnvVarGuard::clear("GITHUB_CLIENT_SECRET"), + EnvVarGuard::clear("GITLAB_URL"), + EnvVarGuard::clear("GITLAB_CLIENT_ID"), + EnvVarGuard::clear("GITLAB_CLIENT_SECRET"), + EnvVarGuard::clear("GITLAB_EXTERNAL_URL"), + ]; + + let bootstrap = load_server_bootstrap(); + + assert_eq!(bootstrap.config.base_url, "https://api.example.test"); + assert_eq!( + bootstrap.config.allowed_origins, + vec![ + "https://app.example.test".to_string(), + "https://ops.example.test".to_string() + ] + ); + assert!(!bootstrap.config.oauth_use_request_host); + assert_eq!(bootstrap.data_dir, std::path::PathBuf::from("data")); + assert_eq!(bootstrap.web_dir, std::path::PathBuf::from("web/build")); + assert_eq!(bootstrap.port, "3000"); + } + + #[test] + fn bootstrap_derives_origin_and_request_host_mode_when_base_url_is_missing() { + let _lock = lock_env(); + let _guards = [ + EnvVarGuard::clear("BASE_URL"), + EnvVarGuard::clear("OPENSESSION_BASE_URL"), + EnvVarGuard::clear("OPENSESSION_ALLOWED_ORIGINS"), + EnvVarGuard::clear("OPENSESSION_CREDENTIAL_ACTIVE_KID"), + EnvVarGuard::clear("OPENSESSION_CREDENTIAL_KEYS"), + EnvVarGuard::clear("GITHUB_CLIENT_ID"), + EnvVarGuard::clear("GITHUB_CLIENT_SECRET"), + EnvVarGuard::clear("GITLAB_URL"), + EnvVarGuard::clear("GITLAB_CLIENT_ID"), + EnvVarGuard::clear("GITLAB_CLIENT_SECRET"), + EnvVarGuard::clear("GITLAB_EXTERNAL_URL"), + ]; + + let bootstrap = load_server_bootstrap(); + + assert_eq!(bootstrap.config.base_url, "http://localhost:3000"); + assert_eq!( + bootstrap.config.allowed_origins, + vec!["http://localhost:3000".to_string()] + ); + assert!(bootstrap.config.oauth_use_request_host); + } + + #[test] + fn bootstrap_ignores_invalid_credential_keyring_config() { + let _lock = lock_env(); + let _guards = [ + EnvVarGuard::clear("BASE_URL"), + EnvVarGuard::clear("OPENSESSION_BASE_URL"), + EnvVarGuard::clear("OPENSESSION_ALLOWED_ORIGINS"), + EnvVarGuard::set("OPENSESSION_CREDENTIAL_ACTIVE_KID", "kid-1"), + EnvVarGuard::set("OPENSESSION_CREDENTIAL_KEYS", "not-a-valid-keyset"), + EnvVarGuard::clear("GITHUB_CLIENT_ID"), + EnvVarGuard::clear("GITHUB_CLIENT_SECRET"), + EnvVarGuard::clear("GITLAB_URL"), + EnvVarGuard::clear("GITLAB_CLIENT_ID"), + EnvVarGuard::clear("GITLAB_CLIENT_SECRET"), + EnvVarGuard::clear("GITLAB_EXTERNAL_URL"), + ]; + + let bootstrap = load_server_bootstrap(); + + assert!(bootstrap.config.credential_keyring.is_none()); + } +} diff --git a/crates/server/src/main.rs b/crates/server/src/main.rs index ba78e4ce..d17e8313 100644 --- a/crates/server/src/main.rs +++ b/crates/server/src/main.rs @@ -1,3 +1,4 @@ +mod app_config; mod error; mod routes; mod storage; @@ -11,15 +12,15 @@ use axum::{ routing::{delete, get, post, put}, Router, }; -use std::path::PathBuf; use tower_http::cors::CorsLayer; use tower_http::services::{ServeDir, ServeFile}; use tower_http::trace::TraceLayer; -use opensession_api::crypto::CredentialKeyring; -use opensession_api::oauth::{self, OAuthProviderConfig}; +use app_config::load_server_bootstrap; use storage::Db; +pub use app_config::AppConfig; + /// Application state shared across all handlers. #[derive(Clone)] pub struct AppState { @@ -27,49 +28,6 @@ pub struct AppState { pub config: AppConfig, } -/// Server configuration loaded from environment variables. -#[derive(Clone)] -pub struct AppConfig { - pub base_url: String, - pub allowed_origins: Vec, - pub oauth_use_request_host: bool, - pub jwt_secret: String, - pub admin_key: String, - pub oauth_providers: Vec, - pub public_feed_enabled: bool, - pub local_review_root: Option, - pub credential_keyring: Option, -} - -fn origin_from_base_url(raw: &str) -> Option { - let url = reqwest::Url::parse(raw).ok()?; - let host = url.host_str()?; - let mut origin = format!("{}://{host}", url.scheme()); - if let Some(port) = url.port() { - origin.push(':'); - origin.push_str(&port.to_string()); - } - Some(origin) -} - -fn load_allowed_origins(base_url: &str) -> Vec { - let configured = std::env::var("OPENSESSION_ALLOWED_ORIGINS") - .ok() - .map(|value| { - value - .split(',') - .map(str::trim) - .filter(|item| !item.is_empty()) - .map(ToOwned::to_owned) - .collect::>() - }) - .unwrap_or_default(); - if !configured.is_empty() { - return configured; - } - origin_from_base_url(base_url).into_iter().collect() -} - fn build_cors_layer(allowed_origins: &[String]) -> CorsLayer { let csrf_header = HeaderName::from_static("x-csrf-token"); let origin_values: Vec = allowed_origins @@ -105,36 +63,6 @@ impl FromRef for AppConfig { } } -/// Load OAuth providers from environment variables. -fn load_oauth_providers() -> Vec { - [try_load_github(), try_load_gitlab()] - .into_iter() - .flatten() - .collect() -} - -fn env_trimmed(name: &str) -> Option { - std::env::var(name) - .ok() - .and_then(|v| oauth::normalize_oauth_config_value(&v)) -} - -fn try_load_github() -> Option { - let id = env_trimmed("GITHUB_CLIENT_ID")?; - let secret = env_trimmed("GITHUB_CLIENT_SECRET")?; - tracing::info!("OAuth provider enabled: GitHub"); - Some(oauth::github_preset(id, secret)) -} - -fn try_load_gitlab() -> Option { - let url = env_trimmed("GITLAB_URL")?; - let id = env_trimmed("GITLAB_CLIENT_ID")?; - let secret = env_trimmed("GITLAB_CLIENT_SECRET")?; - let ext_url = env_trimmed("GITLAB_EXTERNAL_URL"); - tracing::info!("OAuth provider enabled: GitLab ({})", url); - Some(oauth::gitlab_preset(url, ext_url, id, secret)) -} - #[tokio::main] async fn main() -> anyhow::Result<()> { // Initialize tracing @@ -145,10 +73,11 @@ async fn main() -> anyhow::Result<()> { ) .init(); - // Data directory - let data_dir = std::env::var("OPENSESSION_DATA_DIR") - .map(PathBuf::from) - .unwrap_or_else(|_| PathBuf::from("data")); + let bootstrap = load_server_bootstrap(); + let data_dir = bootstrap.data_dir; + let web_dir = bootstrap.web_dir; + let port = bootstrap.port; + let config = bootstrap.config; tracing::info!("data directory: {}", data_dir.display()); @@ -156,45 +85,20 @@ async fn main() -> anyhow::Result<()> { let db = storage::init_db(&data_dir)?; tracing::info!("database initialized"); - let base_url_env = env_trimmed("BASE_URL").or_else(|| env_trimmed("OPENSESSION_BASE_URL")); - let base_url = base_url_env - .clone() - .unwrap_or_else(|| "http://localhost:3000".into()); - - let jwt_secret = env_trimmed("JWT_SECRET").unwrap_or_default(); - if jwt_secret.is_empty() { + if config.jwt_secret.is_empty() { tracing::warn!("JWT_SECRET not set — JWT auth and OAuth will be disabled"); } - let admin_key = env_trimmed("OPENSESSION_ADMIN_KEY").unwrap_or_default(); - if admin_key.is_empty() { + if config.admin_key.is_empty() { tracing::warn!("OPENSESSION_ADMIN_KEY not set — /api/admin routes will return 401"); } - - let oauth_providers = load_oauth_providers(); - let public_feed_enabled_raw = - std::env::var(opensession_api::deploy::ENV_PUBLIC_FEED_ENABLED).ok(); - let public_feed_enabled = - opensession_api::deploy::parse_bool_flag(public_feed_enabled_raw.as_deref(), true); - if !public_feed_enabled { + if !config.public_feed_enabled { tracing::info!( "public feed is disabled ({}=false)", opensession_api::deploy::ENV_PUBLIC_FEED_ENABLED ); } - let config = AppConfig { - base_url: base_url.clone(), - allowed_origins: load_allowed_origins(&base_url), - oauth_use_request_host: base_url_env.is_none(), - jwt_secret, - admin_key, - oauth_providers, - public_feed_enabled, - local_review_root: std::env::var("OPENSESSION_LOCAL_REVIEW_ROOT") - .ok() - .map(PathBuf::from), - credential_keyring: load_credential_keyring(), - }; + let base_url = config.base_url.clone(); let state = AppState { db, config }; @@ -253,10 +157,6 @@ async fn main() -> anyhow::Result<()> { .route("/docs", get(routes::docs::handle)) .route("/llms.txt", get(routes::docs::llms_txt)); - // Serve static files from web build if present - let web_dir = std::env::var("OPENSESSION_WEB_DIR") - .map(PathBuf::from) - .unwrap_or_else(|_| PathBuf::from("web/build")); if web_dir.exists() { tracing::info!("serving static files from {}", web_dir.display()); let index_html = web_dir.join("index.html"); @@ -270,21 +170,8 @@ async fn main() -> anyhow::Result<()> { tracing::info!("starting server at {base_url}"); - let port = std::env::var("PORT").unwrap_or_else(|_| "3000".into()); let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{port}")).await?; axum::serve(listener, app).await?; Ok(()) } - -fn load_credential_keyring() -> Option { - let active = env_trimmed("OPENSESSION_CREDENTIAL_ACTIVE_KID")?; - let keyset = env_trimmed("OPENSESSION_CREDENTIAL_KEYS")?; - match CredentialKeyring::from_csv(&active, &keyset) { - Ok(keyring) => Some(keyring), - Err(err) => { - tracing::error!("invalid credential encryption config: {}", err.message()); - None - } - } -} diff --git a/desktop/src-tauri/src/app/mod.rs b/desktop/src-tauri/src/app/mod.rs new file mode 100644 index 00000000..2621dc6d --- /dev/null +++ b/desktop/src-tauri/src/app/mod.rs @@ -0,0 +1 @@ +pub(crate) mod session_query; diff --git a/desktop/src-tauri/src/app/session_query.rs b/desktop/src-tauri/src/app/session_query.rs new file mode 100644 index 00000000..12dc8592 --- /dev/null +++ b/desktop/src-tauri/src/app/session_query.rs @@ -0,0 +1,117 @@ +use opensession_api::DesktopSessionListQuery; +use opensession_local_db::{LocalSessionFilter, LocalSortOrder, LocalTimeRange}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum SearchMode { + Keyword, + Vector, +} + +fn normalize_non_empty(value: Option) -> Option { + value + .map(|raw| raw.trim().to_string()) + .and_then(|trimmed| (!trimmed.is_empty()).then_some(trimmed)) +} + +fn parse_positive_u32(raw: Option, fallback: u32, max: u32) -> u32 { + let parsed = raw + .and_then(|value| value.parse::().ok()) + .filter(|value| *value > 0) + .unwrap_or(fallback); + parsed.min(max).max(1) +} + +fn map_sort_order(sort: Option<&str>) -> LocalSortOrder { + match sort.unwrap_or_default() { + "popular" => LocalSortOrder::Popular, + "longest" => LocalSortOrder::Longest, + _ => LocalSortOrder::Recent, + } +} + +fn map_time_range(time_range: Option<&str>) -> LocalTimeRange { + match time_range.unwrap_or_default() { + "24h" => LocalTimeRange::Hours24, + "7d" => LocalTimeRange::Days7, + "30d" => LocalTimeRange::Days30, + _ => LocalTimeRange::All, + } +} + +pub(crate) fn split_search_mode(raw: Option) -> (Option, SearchMode) { + let normalized = normalize_non_empty(raw); + let Some(value) = normalized else { + return (None, SearchMode::Keyword); + }; + let lower = value.to_ascii_lowercase(); + for prefix in ["vector:", "vec:"] { + if lower.starts_with(prefix) { + let query = value[prefix.len()..].trim().to_string(); + return ((!query.is_empty()).then_some(query), SearchMode::Vector); + } + } + (Some(value), SearchMode::Keyword) +} + +pub(crate) fn build_local_filter_with_mode( + query: DesktopSessionListQuery, +) -> (LocalSessionFilter, u32, u32, SearchMode) { + let page = parse_positive_u32(query.page, 1, 10_000); + let per_page = parse_positive_u32(query.per_page, 20, 200); + let offset = (page.saturating_sub(1)).saturating_mul(per_page); + let (search_query, search_mode) = split_search_mode(query.search); + + let filter = LocalSessionFilter { + search: search_query, + tool: normalize_non_empty(query.tool), + git_repo_name: normalize_non_empty(query.git_repo_name), + exclude_low_signal: true, + sort: map_sort_order(query.sort.as_deref()), + time_range: map_time_range(query.time_range.as_deref()), + limit: Some(per_page), + offset: Some(offset), + ..Default::default() + }; + + (filter, page, per_page, search_mode) +} + +#[cfg(test)] +mod tests { + use super::{build_local_filter_with_mode, split_search_mode, SearchMode}; + use opensession_api::DesktopSessionListQuery; + use opensession_local_db::{LocalSortOrder, LocalTimeRange}; + + #[test] + fn query_mapping_trims_inputs_and_clamps_large_pages() { + let (filter, page, per_page, mode) = + build_local_filter_with_mode(DesktopSessionListQuery { + page: Some("0".to_string()), + per_page: Some("999".to_string()), + search: Some(" fix auth ".to_string()), + tool: Some(" codex ".to_string()), + git_repo_name: Some(" org/repo ".to_string()), + sort: Some("longest".to_string()), + time_range: Some("30d".to_string()), + force_refresh: None, + }); + + assert_eq!(page, 1); + assert_eq!(per_page, 200); + assert_eq!(mode, SearchMode::Keyword); + assert_eq!(filter.search.as_deref(), Some("fix auth")); + assert_eq!(filter.tool.as_deref(), Some("codex")); + assert_eq!(filter.git_repo_name.as_deref(), Some("org/repo")); + assert_eq!(filter.sort, LocalSortOrder::Longest); + assert_eq!(filter.time_range, LocalTimeRange::Days30); + assert_eq!(filter.offset, Some(0)); + } + + #[test] + fn vector_prefix_without_query_keeps_vector_mode_but_clears_search_text() { + let (query, mode) = split_search_mode(Some("vector: ".to_string())); + + assert_eq!(query, None); + assert_eq!(mode, SearchMode::Vector); + } +} diff --git a/desktop/src-tauri/src/main.rs b/desktop/src-tauri/src/main.rs index 1a23d760..6fd28f74 100644 --- a/desktop/src-tauri/src/main.rs +++ b/desktop/src-tauri/src/main.rs @@ -1,5 +1,8 @@ #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] +mod app; + +use app::session_query::{build_local_filter_with_mode, split_search_mode, SearchMode}; use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, Engine as _}; use opensession_api::{ oauth::{AuthProvidersResponse, OAuthProviderInfo}, @@ -76,12 +79,6 @@ const CHANGE_READER_MAX_EVENTS: usize = 180; const CHANGE_READER_MAX_LINE_CHARS: usize = 220; const FORCE_REFRESH_MAX_DISCOVERY_PATHS: usize = 240; -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -enum SearchMode { - Keyword, - Vector, -} - #[derive(Debug, Clone)] struct VectorInstallRuntimeState { state: DesktopVectorInstallState, @@ -193,69 +190,6 @@ fn normalize_non_empty(value: Option) -> Option { .and_then(|trimmed| (!trimmed.is_empty()).then_some(trimmed)) } -fn split_search_mode(raw: Option) -> (Option, SearchMode) { - let normalized = normalize_non_empty(raw); - let Some(value) = normalized else { - return (None, SearchMode::Keyword); - }; - let lower = value.to_ascii_lowercase(); - for prefix in ["vector:", "vec:"] { - if lower.starts_with(prefix) { - let query = value[prefix.len()..].trim().to_string(); - return ((!query.is_empty()).then_some(query), SearchMode::Vector); - } - } - (Some(value), SearchMode::Keyword) -} - -fn parse_positive_u32(raw: Option, fallback: u32, max: u32) -> u32 { - let parsed = raw - .and_then(|value| value.parse::().ok()) - .filter(|value| *value > 0) - .unwrap_or(fallback); - parsed.min(max).max(1) -} - -fn map_sort_order(sort: Option<&str>) -> opensession_local_db::LocalSortOrder { - match sort.unwrap_or_default() { - "popular" => opensession_local_db::LocalSortOrder::Popular, - "longest" => opensession_local_db::LocalSortOrder::Longest, - _ => opensession_local_db::LocalSortOrder::Recent, - } -} - -fn map_time_range(time_range: Option<&str>) -> opensession_local_db::LocalTimeRange { - match time_range.unwrap_or_default() { - "24h" => opensession_local_db::LocalTimeRange::Hours24, - "7d" => opensession_local_db::LocalTimeRange::Days7, - "30d" => opensession_local_db::LocalTimeRange::Days30, - _ => opensession_local_db::LocalTimeRange::All, - } -} - -fn build_local_filter_with_mode( - query: DesktopSessionListQuery, -) -> (LocalSessionFilter, u32, u32, SearchMode) { - let page = parse_positive_u32(query.page, 1, 10_000); - let per_page = parse_positive_u32(query.per_page, 20, 200); - let offset = (page.saturating_sub(1)).saturating_mul(per_page); - let (search_query, search_mode) = split_search_mode(query.search); - - let filter = LocalSessionFilter { - search: search_query, - tool: normalize_non_empty(query.tool), - git_repo_name: normalize_non_empty(query.git_repo_name), - exclude_low_signal: true, - sort: map_sort_order(query.sort.as_deref()), - time_range: map_time_range(query.time_range.as_deref()), - limit: Some(per_page), - offset: Some(offset), - ..Default::default() - }; - - (filter, page, per_page, search_mode) -} - fn force_refresh_discovery_tools() -> &'static [&'static str] { // Cursor workspace DBs are high-volume and often metadata-only. Exclude them from the // synchronous force-refresh path so recent sessions show up immediately. @@ -5497,12 +5431,12 @@ mod tests { build_vector_chunks_for_session, canonicalize_summaries, cosine_similarity, desktop_ask_session_changes, desktop_change_reader_tts, desktop_get_contract_version, desktop_get_runtime_settings, desktop_get_session_detail, desktop_get_session_raw, - desktop_list_sessions, desktop_read_session_changes, - desktop_share_session_quick, desktop_summary_batch_run, desktop_summary_batch_status, - desktop_update_runtime_settings, extract_vector_lines, force_refresh_discovery_tools, - map_link_type, normalize_launch_route, normalize_session_body_to_hail_jsonl, - parse_cli_quick_share_response, session_summary_from_local_row, split_search_mode, - validate_pin_alias, validate_vector_preflight_ready, DesktopSessionListQuery, SearchMode, + desktop_list_sessions, desktop_read_session_changes, desktop_share_session_quick, + desktop_summary_batch_run, desktop_summary_batch_status, desktop_update_runtime_settings, + extract_vector_lines, force_refresh_discovery_tools, map_link_type, normalize_launch_route, + normalize_session_body_to_hail_jsonl, parse_cli_quick_share_response, + session_summary_from_local_row, split_search_mode, validate_pin_alias, + validate_vector_preflight_ready, DesktopSessionListQuery, SearchMode, }; use opensession_api::{ DesktopChangeQuestionRequest, DesktopChangeReadRequest, DesktopChangeReaderScope, @@ -6675,7 +6609,10 @@ mod tests { let lifecycle_status = super::desktop_lifecycle_cleanup_status_from_db(&db) .expect("read lifecycle cleanup status"); - assert_eq!(lifecycle_status.state, DesktopLifecycleCleanupState::Complete); + assert_eq!( + lifecycle_status.state, + DesktopLifecycleCleanupState::Complete + ); assert_eq!(lifecycle_status.deleted_sessions, 1); assert_eq!(lifecycle_status.deleted_summaries, 1); assert!( diff --git a/packages/ui/package-lock.json b/packages/ui/package-lock.json index 9761fa90..167b1b53 100644 --- a/packages/ui/package-lock.json +++ b/packages/ui/package-lock.json @@ -8,6 +8,7 @@ "name": "@opensession/ui", "version": "0.1.0", "dependencies": { + "effect": "3.19.19", "highlight.js": "^11.11.1", "isomorphic-dompurify": "^2.28.0", "marked": "^17.0.1" @@ -882,6 +883,12 @@ "dev": true, "license": "BSD-2-Clause" }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "license": "MIT" + }, "node_modules/@sveltejs/acorn-typescript": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.8.tgz", @@ -1047,6 +1054,16 @@ "@types/trusted-types": "^2.0.7" } }, + "node_modules/effect": { + "version": "3.19.19", + "resolved": "https://registry.npmjs.org/effect/-/effect-3.19.19.tgz", + "integrity": "sha512-Yc8U/SVXo2dHnaP7zNBlAo83h/nzSJpi7vph6Hzyl4ulgMBIgPmz3UzOjb9sBgpFE00gC0iETR244sfXDNLHRg==", + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "fast-check": "^3.23.1" + } + }, "node_modules/entities": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", @@ -1118,6 +1135,28 @@ "@jridgewell/sourcemap-codec": "^1.4.15" } }, + "node_modules/fast-check": { + "version": "3.23.2", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.23.2.tgz", + "integrity": "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT", + "dependencies": { + "pure-rand": "^6.1.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -1333,6 +1372,22 @@ "node": ">=6" } }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, "node_modules/require-from-string": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", diff --git a/packages/ui/package.json b/packages/ui/package.json index 164e5ca3..c2dea303 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -29,6 +29,7 @@ "svelte": "^5.0.0" }, "dependencies": { + "effect": "3.19.19", "highlight.js": "^11.11.1", "isomorphic-dompurify": "^2.28.0", "marked": "^17.0.1" @@ -37,7 +38,7 @@ "lint": "biome check src/", "lint:fix": "biome check --write src/", "format": "biome format --write src/", - "test": "tsx --test src/**/*.test.ts" + "test": "tsx --test src/*.test.ts src/**/*.test.ts" }, "devDependencies": { "@biomejs/biome": "2.3.15", diff --git a/packages/ui/src/api-internal/auth-services.ts b/packages/ui/src/api-internal/auth-services.ts new file mode 100644 index 00000000..62eb79b8 --- /dev/null +++ b/packages/ui/src/api-internal/auth-services.ts @@ -0,0 +1,198 @@ +import { Effect } from 'effect'; +import type { + AuthProvidersResponse, + AuthTokenResponse, + CapabilitiesResponse, + GitCredentialSummary, + IssueApiKeyResponse, + ListGitCredentialsResponse, + UserSettings, +} from '../types'; +import { requestEffect } from './requests'; +import { + getBaseUrl, + getCsrfToken, + getOAuthUrl, + isAuthenticated, + isDesktopLocalRuntime, + type RuntimeEnv, + RuntimeEnvTag, +} from './runtime'; +import { ApiError } from './errors'; +import { getApiCapabilitiesEffect, getAuthProvidersEffect } from './session-services'; + +export function isAuthenticatedEffect(): Effect.Effect { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + return isAuthenticated(runtime); + }); +} + +export function verifyAuthEffect(): Effect.Effect { + return Effect.gen(function* () { + try { + yield* requestEffect('/api/auth/verify', { method: 'POST' }); + return true; + } catch (error) { + if (error instanceof ApiError && (error.status === 401 || error.status === 403)) { + const refreshed = yield* tryRefreshTokenEffect(); + if (!refreshed) return false; + try { + yield* requestEffect('/api/auth/verify', { method: 'POST' }); + return true; + } catch { + return false; + } + } + return false; + } + }); +} + +export function getSettingsEffect(): Effect.Effect { + return requestEffect('/api/auth/me'); +} + +export function issueApiKeyEffect(): Effect.Effect { + return requestEffect('/api/auth/api-keys/issue', { + method: 'POST', + }); +} + +export function listGitCredentialsEffect(): Effect.Effect< + GitCredentialSummary[], + unknown, + RuntimeEnv +> { + return Effect.gen(function* () { + const response = yield* requestEffect('/api/auth/git-credentials'); + return response.credentials ?? []; + }); +} + +export function createGitCredentialEffect(params: { + label: string; + host: string; + path_prefix?: string | null; + header_name: string; + header_value: string; +}): Effect.Effect { + return requestEffect('/api/auth/git-credentials', { + method: 'POST', + body: JSON.stringify({ + label: params.label, + host: params.host, + path_prefix: params.path_prefix ?? null, + header_name: params.header_name, + header_value: params.header_value, + }), + }); +} + +export function deleteGitCredentialEffect(id: string): Effect.Effect { + return requestEffect(`/api/auth/git-credentials/${encodeURIComponent(id)}`, { + method: 'DELETE', + }); +} + +export function authRegisterEffect( + email: string, + password: string, + nickname: string, +): Effect.Effect { + return requestEffect('/api/auth/register', { + method: 'POST', + body: JSON.stringify({ email, password, nickname }), + includeAuthHeader: false, + }); +} + +export function authLoginEffect( + email: string, + password: string, +): Effect.Effect { + return requestEffect('/api/auth/login', { + method: 'POST', + body: JSON.stringify({ email, password }), + includeAuthHeader: false, + }); +} + +function tryRefreshTokenEffect(): Effect.Effect { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + if (isDesktopLocalRuntime(runtime)) return false; + try { + const url = `${getBaseUrl(runtime)}/api/auth/refresh`; + const headers: Record = { 'Content-Type': 'application/json' }; + const csrf = getCsrfToken(runtime); + if (csrf) headers['X-CSRF-Token'] = csrf; + const response = yield* Effect.tryPromise(() => + runtime.fetchImpl(url, { + method: 'POST', + headers, + credentials: 'include', + }), + ); + if (!response.ok) return false; + yield* Effect.tryPromise(() => response.json()); + return true; + } catch { + return false; + } + }); +} + +export function authLogoutEffect(): Effect.Effect { + return Effect.catchAll( + requestEffect('/api/auth/logout', { + method: 'POST', + }), + () => Effect.void, + ); +} + +export function getAuthProvidersSafeEffect(): Effect.Effect { + return Effect.catchAll(getAuthProvidersEffect(), () => + Effect.succeed({ email_password: false, oauth: [] }), + ); +} + +export function getApiCapabilitiesSafeEffect(): Effect.Effect< + CapabilitiesResponse, + never, + RuntimeEnv +> { + return Effect.catchAll(getApiCapabilitiesEffect(), () => + Effect.succeed({ + auth_enabled: false, + parse_preview_enabled: false, + register_targets: [], + share_modes: [], + }), + ); +} + +export function getOAuthUrlEffect(provider: string): Effect.Effect { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + return getOAuthUrl(runtime, provider); + }); +} + +export function handleAuthCallbackEffect(): Effect.Effect { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + if (!runtime.hasWindow()) return false; + const location = runtime.getLocation(); + if (location.hash) { + runtime.replaceHistoryUrl(location.pathname); + } + try { + yield* requestEffect('/api/auth/verify', { method: 'POST' }); + return true; + } catch { + return false; + } + }); +} diff --git a/packages/ui/src/api-internal/errors.ts b/packages/ui/src/api-internal/errors.ts new file mode 100644 index 00000000..f1dd6d36 --- /dev/null +++ b/packages/ui/src/api-internal/errors.ts @@ -0,0 +1,66 @@ +import type { ParsePreviewErrorResponse } from '../types'; +import { SessionReadCoreError } from '../session-read-core'; + +function parseBodyErrorShape(body: string): { + code?: string; + message?: string; + details?: Record | null; +} | null { + try { + return JSON.parse(body) as { + code?: string; + message?: string; + details?: Record | null; + }; + } catch { + return null; + } +} + +export class ApiError extends Error { + constructor( + public status: number, + public body: string, + public code: string = 'unknown', + public details: Record | null = null, + ) { + let message = + body.trimStart().startsWith('<') ? `Server returned ${status}` : body.slice(0, 200); + let resolvedCode = code; + let resolvedDetails = details; + if (!body.trimStart().startsWith('<')) { + const parsed = parseBodyErrorShape(body); + if (parsed) { + if (typeof parsed.message === 'string' && parsed.message.trim()) { + message = parsed.message.trim(); + } + if (typeof parsed.code === 'string' && parsed.code.trim()) { + resolvedCode = parsed.code.trim(); + } + if (parsed.details && typeof parsed.details === 'object') { + resolvedDetails = parsed.details; + } + } + } + super(message); + this.code = resolvedCode; + this.details = resolvedDetails; + } +} + +export class PreviewApiError extends Error { + constructor( + public status: number, + public payload: ParsePreviewErrorResponse, + ) { + super(payload.message); + } +} + +export function normalizeSessionAdapterError(error: unknown): ApiError { + if (error instanceof ApiError) return error; + if (error instanceof SessionReadCoreError) { + return new ApiError(error.status, error.body, error.code, error.details); + } + return new ApiError(500, '{"message":"Session adapter request failed"}'); +} diff --git a/packages/ui/src/api-internal/parse-preview-services.ts b/packages/ui/src/api-internal/parse-preview-services.ts new file mode 100644 index 00000000..04cce783 --- /dev/null +++ b/packages/ui/src/api-internal/parse-preview-services.ts @@ -0,0 +1,105 @@ +import { Effect } from 'effect'; +import type { + ParsePreviewErrorResponse, + ParsePreviewRequest, + ParsePreviewResponse, + ParseSource, +} from '../types'; +import { ApiError, PreviewApiError } from './errors'; +import { requestEffect } from './requests'; +import type { RuntimeEnv } from './runtime'; + +function postParsePreviewEffect( + request: ParsePreviewRequest, +): Effect.Effect { + return Effect.gen(function* () { + try { + return yield* requestEffect('/api/parse/preview', { + method: 'POST', + body: JSON.stringify(request), + }); + } catch (error) { + if (error instanceof ApiError) { + let parsed: ParsePreviewErrorResponse | null = null; + try { + parsed = JSON.parse(error.body) as ParsePreviewErrorResponse; + } catch { + parsed = null; + } + if (parsed && typeof parsed.code === 'string' && typeof parsed.message === 'string') { + throw new PreviewApiError(error.status, parsed); + } + } + throw error; + } + }); +} + +export function previewSessionFromGithubSourceEffect(params: { + owner: string; + repo: string; + ref: string; + path: string; + parser_hint?: string; +}): Effect.Effect { + const source: ParseSource = { + kind: 'github', + owner: params.owner, + repo: params.repo, + ref: params.ref, + path: params.path, + }; + return postParsePreviewEffect({ + source, + parser_hint: params.parser_hint ?? null, + }); +} + +export function previewSessionFromGitSourceEffect(params: { + remote: string; + ref: string; + path: string; + parser_hint?: string; +}): Effect.Effect { + const source: ParseSource = { + kind: 'git', + remote: params.remote, + ref: params.ref, + path: params.path, + }; + return postParsePreviewEffect({ + source, + parser_hint: params.parser_hint ?? null, + }); +} + +export function previewSessionFromInlineSourceEffect(params: { + filename: string; + content_base64: string; + parser_hint?: string; +}): Effect.Effect { + const source: ParseSource = { + kind: 'inline', + filename: params.filename, + content_base64: params.content_base64, + }; + return postParsePreviewEffect({ + source, + parser_hint: params.parser_hint ?? null, + }); +} + +export function getParsePreviewError(error: unknown): ParsePreviewErrorResponse | null { + if (error instanceof PreviewApiError) return error.payload; + if (error instanceof ApiError) { + try { + const parsed = JSON.parse(error.body) as ParsePreviewErrorResponse; + if (typeof parsed.code === 'string' && typeof parsed.message === 'string') { + return parsed; + } + } catch { + // Ignore non-JSON errors. + } + } + return null; +} diff --git a/packages/ui/src/api-internal/requests.ts b/packages/ui/src/api-internal/requests.ts new file mode 100644 index 00000000..41f733bf --- /dev/null +++ b/packages/ui/src/api-internal/requests.ts @@ -0,0 +1,70 @@ +import { Effect } from 'effect'; +import { ApiError } from './errors'; +import { + assertDesktopHttpApiAvailable, + getAuthHeader, + getBaseUrl, + getCsrfToken, + type RuntimeEnv, + RuntimeEnvTag, +} from './runtime'; + +type RequestEffectOptions = RequestInit & { + includeAuthHeader?: boolean; +}; + +export function requestEffect( + path: string, + options: RequestEffectOptions = {}, +): Effect.Effect { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + assertDesktopHttpApiAvailable(runtime, path); + const url = `${getBaseUrl(runtime)}${path}`; + const method = (options.method ?? 'GET').toUpperCase(); + const needsCsrf = method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS'; + const includeAuthHeader = options.includeAuthHeader !== false; + const headers: Record = { + 'Content-Type': 'application/json', + ...(options.headers as Record), + }; + + if (includeAuthHeader) { + const auth = getAuthHeader(runtime); + if (auth) headers.Authorization = auth; + } + if (needsCsrf) { + const csrf = getCsrfToken(runtime); + if (csrf) headers['X-CSRF-Token'] = csrf; + } + + const response = yield* Effect.tryPromise(() => + runtime.fetchImpl(url, { + ...options, + headers, + credentials: 'include', + }), + ); + + if (!response.ok) { + const body = yield* Effect.tryPromise(() => response.text()); + return yield* Effect.fail(new ApiError(response.status, body)); + } + + if (response.status === 204) { + return undefined as T; + } + + const contentType = response.headers.get('content-type') || ''; + if (!contentType.includes('application/json')) { + return undefined as T; + } + + const text = yield* Effect.tryPromise(() => response.text()); + if (!text.trim()) { + return undefined as T; + } + + return JSON.parse(text) as T; + }); +} diff --git a/packages/ui/src/api-internal/runtime.ts b/packages/ui/src/api-internal/runtime.ts new file mode 100644 index 00000000..1a1a85bd --- /dev/null +++ b/packages/ui/src/api-internal/runtime.ts @@ -0,0 +1,220 @@ +import { Cause, Context, Effect, Exit, Layer } from 'effect'; +import { + createDesktopSessionReadAdapter, + createUnavailableDesktopSessionReadAdapter, + createWebSessionReadAdapter, + type DesktopInvoke, + type SessionReadAdapter, +} from '../session-adapter'; +import { ApiError } from './errors'; + +declare global { + interface Window { + __OPENSESSION_API_URL__?: string; + __TAURI_INTERNALS__?: unknown; + __TAURI__?: { + core?: { + invoke?: DesktopInvoke; + }; + }; + } +} + +export interface RuntimeLocation { + origin: string; + protocol: string; + pathname: string; + hash: string; + search: string; +} + +export interface RuntimeEnv { + fetchImpl: typeof fetch; + now: () => number; + getStorageItem: (key: string) => string | null; + setStorageItem: (key: string, value: string) => void; + removeStorageItem: (key: string) => void; + getDocumentCookie: () => string; + getLocation: () => RuntimeLocation; + replaceHistoryUrl: (url: string) => void; + getApiUrlOverride: () => string | null; + getDesktopInvoke: () => DesktopInvoke | null; + isTauriRuntime: () => boolean; + hasWindow: () => boolean; +} + +export const RuntimeEnvTag = Context.GenericTag('opensession/ui/runtime-env'); + +function isHttpLikeOrigin(origin: string): boolean { + return origin.startsWith('http://') || origin.startsWith('https://'); +} + +export function createBrowserRuntimeEnv(): RuntimeEnv { + return { + fetchImpl: fetch, + now: () => Date.now(), + getStorageItem(key) { + if (typeof localStorage === 'undefined') return null; + try { + return localStorage.getItem(key); + } catch { + return null; + } + }, + setStorageItem(key, value) { + if (typeof localStorage === 'undefined') return; + localStorage.setItem(key, value); + }, + removeStorageItem(key) { + if (typeof localStorage === 'undefined') return; + localStorage.removeItem(key); + }, + getDocumentCookie() { + if (typeof document === 'undefined') return ''; + return document.cookie ?? ''; + }, + getLocation() { + if (typeof window === 'undefined') { + return { + origin: '', + protocol: '', + pathname: '', + hash: '', + search: '', + }; + } + return { + origin: window.location.origin, + protocol: window.location.protocol, + pathname: window.location.pathname, + hash: window.location.hash, + search: window.location.search, + }; + }, + replaceHistoryUrl(url) { + if (typeof window === 'undefined') return; + window.history?.replaceState?.(null, '', url); + }, + getApiUrlOverride() { + if (typeof window === 'undefined') return null; + return window.__OPENSESSION_API_URL__?.trim() || null; + }, + getDesktopInvoke() { + if (typeof window === 'undefined') return null; + const invoke = window.__TAURI__?.core?.invoke; + return typeof invoke === 'function' ? invoke : null; + }, + isTauriRuntime() { + if (typeof window === 'undefined') return false; + if ('__TAURI_INTERNALS__' in window) return true; + return window.location.protocol === 'tauri:'; + }, + hasWindow() { + return typeof window !== 'undefined'; + }, + }; +} + +export function browserRuntimeLayer(): Layer.Layer { + return Layer.succeed(RuntimeEnvTag, createBrowserRuntimeEnv()); +} + +export function runUiEffect(effect: Effect.Effect): Promise { + return Effect.runPromiseExit(Effect.provide(effect, browserRuntimeLayer())).then((exit) => { + if (Exit.isSuccess(exit)) return exit.value; + throw Cause.squash(exit.cause); + }); +} + +export function readBrowserRuntime(): RuntimeEnv { + return createBrowserRuntimeEnv(); +} + +export function hasDesktopApiOverride(runtime: RuntimeEnv): boolean { + const runtimeOverride = runtime.getApiUrlOverride(); + if (runtimeOverride) return true; + return Boolean(runtime.getStorageItem('opensession_api_url')?.trim()); +} + +export function isDesktopLocalRuntime(runtime: RuntimeEnv): boolean { + return runtime.isTauriRuntime() && !hasDesktopApiOverride(runtime); +} + +export function getBaseUrl(runtime: RuntimeEnv): string { + const runtimeOverride = runtime.getApiUrlOverride(); + if (runtimeOverride) return runtimeOverride; + + const stored = runtime.getStorageItem('opensession_api_url'); + if (stored) return stored; + + const location = runtime.getLocation(); + if (isHttpLikeOrigin(location.origin)) return location.origin; + if (runtime.isTauriRuntime()) return ''; + if (!location.origin || location.origin === 'null') return ''; + return location.origin; +} + +export function getCookieValue(runtime: RuntimeEnv, name: string): string | null { + const encodedName = `${name}=`; + const parts = runtime.getDocumentCookie().split(';'); + for (const raw of parts) { + const trimmed = raw.trim(); + if (trimmed.startsWith(encodedName)) { + return trimmed.slice(encodedName.length); + } + } + return null; +} + +export function getCsrfToken(runtime: RuntimeEnv): string | null { + return getCookieValue(runtime, 'opensession_csrf_token'); +} + +export function getAuthHeader(runtime: RuntimeEnv): string | null { + const apiKey = runtime.getStorageItem('opensession_api_key'); + return apiKey ? `Bearer ${apiKey}` : null; +} + +export function setBaseUrl(runtime: RuntimeEnv, url: string) { + runtime.setStorageItem('opensession_api_url', url); +} + +export function isAuthenticated(runtime: RuntimeEnv): boolean { + return getCsrfToken(runtime) != null; +} + +export function desktopHttpApiUnavailable(path: string): ApiError { + return new ApiError( + 501, + JSON.stringify({ + code: 'desktop_http_api_unavailable', + message: + 'HTTP API is unavailable in desktop local runtime. Set OPENSESSION_API_URL to call a remote server.', + details: { path }, + }), + ); +} + +export function assertDesktopHttpApiAvailable(runtime: RuntimeEnv, path: string) { + if (isDesktopLocalRuntime(runtime)) { + throw desktopHttpApiUnavailable(path); + } +} + +export function createRuntimeSessionReadAdapter(runtime: RuntimeEnv): SessionReadAdapter { + const invoke = runtime.getDesktopInvoke(); + if (isDesktopLocalRuntime(runtime)) { + if (invoke) return createDesktopSessionReadAdapter(invoke); + return createUnavailableDesktopSessionReadAdapter(); + } + return createWebSessionReadAdapter({ + baseUrl: getBaseUrl(runtime), + fetchImpl: runtime.fetchImpl, + getAuthHeader: async () => getAuthHeader(runtime), + }); +} + +export function getOAuthUrl(runtime: RuntimeEnv, provider: string): string { + if (isDesktopLocalRuntime(runtime)) return '#'; + return `${getBaseUrl(runtime)}/api/auth/oauth/${encodeURIComponent(provider)}`; +} diff --git a/packages/ui/src/api-internal/session-services.ts b/packages/ui/src/api-internal/session-services.ts new file mode 100644 index 00000000..d2069918 --- /dev/null +++ b/packages/ui/src/api-internal/session-services.ts @@ -0,0 +1,274 @@ +import { Effect } from 'effect'; +import { createSessionReadCore } from '../session-read-core'; +import type { + CapabilitiesResponse, + DesktopChangeQuestionResponse, + DesktopChangeReaderScope, + DesktopChangeReadResponse, + DesktopChangeReaderTtsResponse, + DesktopHandoffBuildResponse, + DesktopLifecycleCleanupStatusResponse, + DesktopQuickShareResponse, + DesktopRuntimeSettingsResponse, + DesktopRuntimeSettingsUpdateRequest, + DesktopSummaryBatchStatusResponse, + DesktopSessionSummaryResponse, + DesktopSummaryProviderDetectResponse, + DesktopVectorIndexStatusResponse, + DesktopVectorInstallStatusResponse, + DesktopVectorPreflightResponse, + DesktopVectorSearchResponse, + Session, + SessionDetail, + SessionListResponse, + AuthProvidersResponse, +} from '../types'; +import type { SessionListParams } from '../session-adapter'; +import { normalizeSessionAdapterError } from './errors'; +import { + createRuntimeSessionReadAdapter, + type RuntimeEnv, + RuntimeEnvTag, +} from './runtime'; + +function withSessionReadCore( + handler: (core: ReturnType) => Promise, +): Effect.Effect, RuntimeEnv> { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + const core = createSessionReadCore(createRuntimeSessionReadAdapter(runtime)); + return yield* Effect.tryPromise({ + try: () => handler(core), + catch: normalizeSessionAdapterError, + }); + }); +} + +export function listSessionsEffect( + params?: SessionListParams, +): Effect.Effect, RuntimeEnv> { + return withSessionReadCore((core) => core.listSessions(params)); +} + +export function listSessionReposEffect(): Effect.Effect< + string[], + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.listRepos()); +} + +export function getSessionEffect( + id: string, +): Effect.Effect, RuntimeEnv> { + return withSessionReadCore((core) => core.getSession(id)); +} + +export function getSessionDetailEffect( + id: string, +): Effect.Effect, RuntimeEnv> { + return withSessionReadCore((core) => core.getSessionDetail(id)); +} + +export function getSessionSemanticSummaryEffect( + sessionId: string, +): Effect.Effect< + DesktopSessionSummaryResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.getSessionSummary(sessionId)); +} + +export function regenerateSessionSemanticSummaryEffect( + sessionId: string, +): Effect.Effect< + DesktopSessionSummaryResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.regenerateSessionSummary(sessionId)); +} + +export function buildSessionHandoffEffect( + sessionId: string, + pinLatest: boolean, +): Effect.Effect< + DesktopHandoffBuildResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.buildHandoff(sessionId, pinLatest)); +} + +export function quickShareSessionEffect( + sessionId: string, + remote: string | null, +): Effect.Effect< + DesktopQuickShareResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.quickShareSession(sessionId, remote)); +} + +export function readSessionChangesEffect( + sessionId: string, + scope?: DesktopChangeReaderScope | null, +): Effect.Effect< + DesktopChangeReadResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.readSessionChanges(sessionId, scope)); +} + +export function askSessionChangesEffect( + sessionId: string, + question: string, + scope?: DesktopChangeReaderScope | null, +): Effect.Effect< + DesktopChangeQuestionResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.askSessionChanges(sessionId, question, scope)); +} + +export function changeReaderTextToSpeechEffect( + text: string, + sessionId?: string | null, + scope?: DesktopChangeReaderScope | null, +): Effect.Effect< + DesktopChangeReaderTtsResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.changeReaderTts(text, sessionId, scope)); +} + +export function getRuntimeSettingsEffect(): Effect.Effect< + DesktopRuntimeSettingsResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.getRuntimeSettings()); +} + +export function updateRuntimeSettingsEffect( + request: DesktopRuntimeSettingsUpdateRequest, +): Effect.Effect< + DesktopRuntimeSettingsResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.updateRuntimeSettings(request)); +} + +export function getLifecycleCleanupStatusEffect(): Effect.Effect< + DesktopLifecycleCleanupStatusResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.lifecycleCleanupStatus()); +} + +export function runSummaryBatchEffect(): Effect.Effect< + DesktopSummaryBatchStatusResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.summaryBatchRun()); +} + +export function getSummaryBatchStatusEffect(): Effect.Effect< + DesktopSummaryBatchStatusResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.summaryBatchStatus()); +} + +export function detectSummaryProviderEffect(): Effect.Effect< + DesktopSummaryProviderDetectResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.detectSummaryProvider()); +} + +export function vectorPreflightEffect(): Effect.Effect< + DesktopVectorPreflightResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.vectorPreflight()); +} + +export function vectorInstallModelEffect( + model: string, +): Effect.Effect< + DesktopVectorInstallStatusResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.vectorInstallModel(model)); +} + +export function vectorIndexRebuildEffect(): Effect.Effect< + DesktopVectorIndexStatusResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.vectorIndexRebuild()); +} + +export function vectorIndexStatusEffect(): Effect.Effect< + DesktopVectorIndexStatusResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.vectorIndexStatus()); +} + +export function searchSessionsVectorEffect( + query: string, + cursor?: string | null, + limit?: number, +): Effect.Effect< + DesktopVectorSearchResponse, + ReturnType, + RuntimeEnv +> { + return withSessionReadCore((core) => core.searchSessionsVector(query, cursor, limit)); +} + +export function getAuthProvidersEffect(): Effect.Effect< + AuthProvidersResponse, + ReturnType, + RuntimeEnv +> { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + const adapter = createRuntimeSessionReadAdapter(runtime); + return yield* Effect.tryPromise({ + try: () => adapter.getAuthProviders(), + catch: normalizeSessionAdapterError, + }); + }); +} + +export function getApiCapabilitiesEffect(): Effect.Effect< + CapabilitiesResponse, + ReturnType, + RuntimeEnv +> { + return Effect.gen(function* () { + const runtime = yield* RuntimeEnvTag; + const adapter = createRuntimeSessionReadAdapter(runtime); + return yield* Effect.tryPromise({ + try: () => adapter.getCapabilities(), + catch: normalizeSessionAdapterError, + }); + }); +} diff --git a/packages/ui/src/api.ts b/packages/ui/src/api.ts index 9dcd5906..4a0053dd 100644 --- a/packages/ui/src/api.ts +++ b/packages/ui/src/api.ts @@ -1,11 +1,3 @@ -import { - createDesktopSessionReadAdapter, - createUnavailableDesktopSessionReadAdapter, - createWebSessionReadAdapter, - type DesktopInvoke, - type SessionListParams, -} from './session-adapter'; -import { createSessionReadCore, SessionReadCoreError } from './session-read-core'; import type { AuthProvidersResponse, AuthTokenResponse, @@ -28,362 +20,136 @@ import type { DesktopVectorSearchResponse, GitCredentialSummary, IssueApiKeyResponse, - ListGitCredentialsResponse, LocalReviewBundle, ParsePreviewErrorResponse, - ParsePreviewRequest, ParsePreviewResponse, - ParseSource, Session, SessionDetail, SessionListResponse, SessionRepoListResponse, UserSettings, } from './types'; +import type { SessionListParams } from './session-adapter'; +import { + ApiError, + PreviewApiError, +} from './api-internal/errors'; +import { + authLoginEffect, + authLogoutEffect, + authRegisterEffect, + createGitCredentialEffect, + deleteGitCredentialEffect, + getApiCapabilitiesSafeEffect, + getAuthProvidersSafeEffect, + getSettingsEffect, + handleAuthCallbackEffect, + issueApiKeyEffect, + isAuthenticatedEffect, + listGitCredentialsEffect, + verifyAuthEffect, +} from './api-internal/auth-services'; +import { + getParsePreviewError, + previewSessionFromGithubSourceEffect, + previewSessionFromGitSourceEffect, + previewSessionFromInlineSourceEffect, +} from './api-internal/parse-preview-services'; +import { requestEffect } from './api-internal/requests'; +import { + askSessionChangesEffect, + buildSessionHandoffEffect, + changeReaderTextToSpeechEffect, + detectSummaryProviderEffect, + getLifecycleCleanupStatusEffect, + getRuntimeSettingsEffect, + getSessionDetailEffect, + getSessionEffect, + getSessionSemanticSummaryEffect, + getSummaryBatchStatusEffect, + listSessionReposEffect, + listSessionsEffect, + quickShareSessionEffect, + readSessionChangesEffect, + regenerateSessionSemanticSummaryEffect, + runSummaryBatchEffect, + searchSessionsVectorEffect, + updateRuntimeSettingsEffect, + vectorIndexRebuildEffect, + vectorIndexStatusEffect, + vectorInstallModelEffect, + vectorPreflightEffect, +} from './api-internal/session-services'; +import { + getOAuthUrl as getOAuthUrlFromRuntime, + isAuthenticated as isAuthenticatedInRuntime, + readBrowserRuntime, + runUiEffect, + setBaseUrl as setBaseUrlInRuntime, +} from './api-internal/runtime'; -declare global { - interface Window { - __OPENSESSION_API_URL__?: string; - __TAURI_INTERNALS__?: unknown; - } -} - -function isHttpLikeOrigin(origin: string): boolean { - return origin.startsWith('http://') || origin.startsWith('https://'); -} - -function isTauriRuntime(): boolean { - if (typeof window === 'undefined') return false; - if ('__TAURI_INTERNALS__' in window) return true; - return window.location.protocol === 'tauri:'; -} - -function getDesktopInvoke(): DesktopInvoke | null { - if (!isTauriRuntime()) return null; - const tauri = (window as unknown as { __TAURI__?: { core?: { invoke?: DesktopInvoke } } }) - .__TAURI__; - const invoke = tauri?.core?.invoke; - return typeof invoke === 'function' ? invoke : null; -} - -function hasDesktopApiOverride(): boolean { - if (typeof window === 'undefined') return false; - const runtimeOverride = window.__OPENSESSION_API_URL__?.trim(); - if (runtimeOverride) return true; - const stored = localStorage.getItem('opensession_api_url')?.trim(); - return Boolean(stored); -} - -function isDesktopLocalRuntime(): boolean { - return isTauriRuntime() && !hasDesktopApiOverride(); -} - -function getBaseUrl(): string { - if (typeof window !== 'undefined') { - const runtimeOverride = window.__OPENSESSION_API_URL__?.trim(); - if (runtimeOverride) return runtimeOverride; - - const stored = localStorage.getItem('opensession_api_url'); - if (stored) return stored; - - const origin = window.location.origin; - if (isHttpLikeOrigin(origin)) return origin; - - if (isTauriRuntime()) { - return ''; - } - - if (origin === 'null' || !origin) return ''; - return origin; - } - return ''; -} - -function getApiKey(): string | null { - if (typeof window === 'undefined') return null; - return localStorage.getItem('opensession_api_key'); -} - -function getCookie(name: string): string | null { - if (typeof window === 'undefined') return null; - const encodedName = `${name}=`; - const parts = document.cookie.split(';'); - for (const raw of parts) { - const trimmed = raw.trim(); - if (trimmed.startsWith(encodedName)) { - return trimmed.slice(encodedName.length); - } - } - return null; -} - -function getCsrfToken(): string | null { - return getCookie('opensession_csrf_token'); -} +export { ApiError, PreviewApiError, getParsePreviewError }; export function setBaseUrl(url: string) { - localStorage.setItem('opensession_api_url', url); + setBaseUrlInRuntime(readBrowserRuntime(), url); } export function isAuthenticated(): boolean { - if (typeof window === 'undefined') return false; - return getCsrfToken() != null; + return isAuthenticatedInRuntime(readBrowserRuntime()); } export async function verifyAuth(): Promise { - try { - await request('/api/auth/verify', { method: 'POST' }); - return true; - } catch (error) { - if (error instanceof ApiError && (error.status === 401 || error.status === 403)) { - const refreshed = await tryRefreshToken(); - if (!refreshed) return false; - try { - await request('/api/auth/verify', { method: 'POST' }); - return true; - } catch { - return false; - } - } - return false; - } -} - -async function getAuthHeader(): Promise { - const apiKey = getApiKey(); - if (apiKey) return `Bearer ${apiKey}`; - return null; -} - -function getSessionReadAdapter() { - const invoke = getDesktopInvoke(); - if (isDesktopLocalRuntime()) { - if (invoke) return createDesktopSessionReadAdapter(invoke); - return createUnavailableDesktopSessionReadAdapter(); - } - return createWebSessionReadAdapter({ - baseUrl: getBaseUrl(), - fetchImpl: fetch, - getAuthHeader, - }); -} - -function desktopHttpApiUnavailable(path: string): ApiError { - return new ApiError( - 501, - JSON.stringify({ - code: 'desktop_http_api_unavailable', - message: - 'HTTP API is unavailable in desktop local runtime. Set OPENSESSION_API_URL to call a remote server.', - details: { path }, - }), - ); -} - -function assertDesktopHttpApiAvailable(path: string): void { - if (isDesktopLocalRuntime()) { - throw desktopHttpApiUnavailable(path); - } -} - -function getSessionReadCore() { - return createSessionReadCore(getSessionReadAdapter()); -} - -function parseBodyErrorShape(body: string): { - code?: string; - message?: string; - details?: Record | null; -} | null { - try { - return JSON.parse(body) as { - code?: string; - message?: string; - details?: Record | null; - }; - } catch { - return null; - } -} - -function normalizeSessionAdapterError(error: unknown): ApiError { - if (error instanceof ApiError) return error; - if (error instanceof SessionReadCoreError) { - return new ApiError(error.status, error.body, error.code, error.details); - } - return new ApiError(500, '{"message":"Session adapter request failed"}'); -} - -export class ApiError extends Error { - constructor( - public status: number, - public body: string, - public code: string = 'unknown', - public details: Record | null = null, - ) { - let msg = body.trimStart().startsWith('<') ? `Server returned ${status}` : body.slice(0, 200); - let resolvedCode = code; - let resolvedDetails = details; - if (!body.trimStart().startsWith('<')) { - const parsed = parseBodyErrorShape(body); - if (parsed) { - if (typeof parsed.message === 'string' && parsed.message.trim()) { - msg = parsed.message.trim(); - } - if (typeof parsed.code === 'string' && parsed.code.trim()) { - resolvedCode = parsed.code.trim(); - } - if (parsed.details && typeof parsed.details === 'object') { - resolvedDetails = parsed.details; - } - } - } - super(msg); - this.code = resolvedCode; - this.details = resolvedDetails; - } -} - -export class PreviewApiError extends Error { - constructor( - public status: number, - public payload: ParsePreviewErrorResponse, - ) { - super(payload.message); - } -} - -async function request(path: string, options: RequestInit = {}): Promise { - assertDesktopHttpApiAvailable(path); - const url = `${getBaseUrl()}${path}`; - const method = (options.method ?? 'GET').toUpperCase(); - const needsCsrf = method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS'; - const headers: Record = { - 'Content-Type': 'application/json', - ...(options.headers as Record), - }; - - const auth = await getAuthHeader(); - if (auth) { - headers.Authorization = auth; - } - if (needsCsrf) { - const csrf = getCsrfToken(); - if (csrf) headers['X-CSRF-Token'] = csrf; - } - - const res = await fetch(url, { - ...options, - headers, - credentials: 'include', - }); - - if (!res.ok) { - const body = await res.text(); - throw new ApiError(res.status, body); - } - - if (res.status === 204) { - return undefined as T; - } - - const contentType = res.headers.get('content-type') || ''; - if (!contentType.includes('application/json')) { - return undefined as T; - } - - const text = await res.text(); - if (!text.trim()) { - return undefined as T; - } - - return JSON.parse(text) as T; + return runUiEffect(verifyAuthEffect()); } export async function listSessions(params?: SessionListParams): Promise { - try { - return await getSessionReadCore().listSessions(params); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(listSessionsEffect(params)); } export async function listSessionRepos(): Promise { - try { - const repos = await getSessionReadCore().listRepos(); - return { repos }; - } catch (error) { - throw normalizeSessionAdapterError(error); - } + const repos = await runUiEffect(listSessionReposEffect()); + return { repos }; } export async function getSession(id: string): Promise { - try { - return await getSessionReadCore().getSession(id); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(getSessionEffect(id)); } export async function getSessionDetail(id: string): Promise { - try { - return await getSessionReadCore().getSessionDetail(id); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(getSessionDetailEffect(id)); } export async function getSessionSemanticSummary( sessionId: string, ): Promise { - try { - return await getSessionReadCore().getSessionSummary(sessionId); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(getSessionSemanticSummaryEffect(sessionId)); } export async function regenerateSessionSemanticSummary( sessionId: string, ): Promise { - try { - return await getSessionReadCore().regenerateSessionSummary(sessionId); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(regenerateSessionSemanticSummaryEffect(sessionId)); } export async function buildSessionHandoff( sessionId: string, pinLatest: boolean = true, ): Promise { - try { - return await getSessionReadCore().buildHandoff(sessionId, pinLatest); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(buildSessionHandoffEffect(sessionId, pinLatest)); } export async function quickShareSession( sessionId: string, remote?: string | null, ): Promise { - try { - return await getSessionReadCore().quickShareSession(sessionId, remote ?? null); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(quickShareSessionEffect(sessionId, remote ?? null)); } export async function readSessionChanges( sessionId: string, scope?: DesktopChangeReaderScope | null, ): Promise { - try { - return await getSessionReadCore().readSessionChanges(sessionId, scope); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(readSessionChangesEffect(sessionId, scope)); } export async function askSessionChanges( @@ -391,11 +157,7 @@ export async function askSessionChanges( question: string, scope?: DesktopChangeReaderScope | null, ): Promise { - try { - return await getSessionReadCore().askSessionChanges(sessionId, question, scope); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(askSessionChangesEffect(sessionId, question, scope)); } export async function changeReaderTextToSpeech( @@ -403,95 +165,51 @@ export async function changeReaderTextToSpeech( sessionId?: string | null, scope?: DesktopChangeReaderScope | null, ): Promise { - try { - return await getSessionReadCore().changeReaderTts(text, sessionId, scope); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(changeReaderTextToSpeechEffect(text, sessionId, scope)); } export async function getRuntimeSettings(): Promise { - try { - return await getSessionReadCore().getRuntimeSettings(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(getRuntimeSettingsEffect()); } export async function updateRuntimeSettings( request: DesktopRuntimeSettingsUpdateRequest, ): Promise { - try { - return await getSessionReadCore().updateRuntimeSettings(request); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(updateRuntimeSettingsEffect(request)); } export async function getLifecycleCleanupStatus(): Promise { - try { - return await getSessionReadCore().lifecycleCleanupStatus(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(getLifecycleCleanupStatusEffect()); } export async function runSummaryBatch(): Promise { - try { - return await getSessionReadCore().summaryBatchRun(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(runSummaryBatchEffect()); } export async function getSummaryBatchStatus(): Promise { - try { - return await getSessionReadCore().summaryBatchStatus(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(getSummaryBatchStatusEffect()); } export async function detectSummaryProvider(): Promise { - try { - return await getSessionReadCore().detectSummaryProvider(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(detectSummaryProviderEffect()); } export async function vectorPreflight(): Promise { - try { - return await getSessionReadCore().vectorPreflight(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(vectorPreflightEffect()); } export async function vectorInstallModel( model: string, ): Promise { - try { - return await getSessionReadCore().vectorInstallModel(model); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(vectorInstallModelEffect(model)); } export async function vectorIndexRebuild(): Promise { - try { - return await getSessionReadCore().vectorIndexRebuild(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(vectorIndexRebuildEffect()); } export async function vectorIndexStatus(): Promise { - try { - return await getSessionReadCore().vectorIndexStatus(); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(vectorIndexStatusEffect()); } export async function searchSessionsVector( @@ -499,30 +217,23 @@ export async function searchSessionsVector( cursor?: string | null, limit?: number, ): Promise { - try { - return await getSessionReadCore().searchSessionsVector(query, cursor, limit); - } catch (error) { - throw normalizeSessionAdapterError(error); - } + return runUiEffect(searchSessionsVectorEffect(query, cursor, limit)); } export async function getLocalReviewBundle(reviewId: string): Promise { - return request(`/api/review/local/${encodeURIComponent(reviewId)}`); + return runUiEffect(requestEffect(`/api/review/local/${encodeURIComponent(reviewId)}`)); } export async function getSettings(): Promise { - return request('/api/auth/me'); + return runUiEffect(getSettingsEffect()); } export async function issueApiKey(): Promise { - return request('/api/auth/api-keys/issue', { - method: 'POST', - }); + return runUiEffect(issueApiKeyEffect()); } export async function listGitCredentials(): Promise { - const response = await request('/api/auth/git-credentials'); - return response.credentials ?? []; + return runUiEffect(listGitCredentialsEffect()); } export async function createGitCredential(params: { @@ -532,22 +243,11 @@ export async function createGitCredential(params: { header_name: string; header_value: string; }): Promise { - return request('/api/auth/git-credentials', { - method: 'POST', - body: JSON.stringify({ - label: params.label, - host: params.host, - path_prefix: params.path_prefix ?? null, - header_name: params.header_name, - header_value: params.header_value, - }), - }); + return runUiEffect(createGitCredentialEffect(params)); } export async function deleteGitCredential(id: string): Promise { - await request('/api/auth/git-credentials/' + encodeURIComponent(id), { - method: 'DELETE', - }); + return runUiEffect(deleteGitCredentialEffect(id)); } export async function authRegister( @@ -555,87 +255,23 @@ export async function authRegister( password: string, nickname: string, ): Promise { - assertDesktopHttpApiAvailable('/api/auth/register'); - const url = `${getBaseUrl()}/api/auth/register`; - const res = await fetch(url, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ email, password, nickname }), - credentials: 'include', - }); - if (!res.ok) { - const body = await res.text(); - throw new ApiError(res.status, body); - } - return (await res.json()) as AuthTokenResponse; + return runUiEffect(authRegisterEffect(email, password, nickname)); } export async function authLogin(email: string, password: string): Promise { - assertDesktopHttpApiAvailable('/api/auth/login'); - const url = `${getBaseUrl()}/api/auth/login`; - const res = await fetch(url, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ email, password }), - credentials: 'include', - }); - if (!res.ok) { - const body = await res.text(); - throw new ApiError(res.status, body); - } - return (await res.json()) as AuthTokenResponse; -} - -async function tryRefreshToken(): Promise { - if (isDesktopLocalRuntime()) return false; - try { - const url = `${getBaseUrl()}/api/auth/refresh`; - const headers: Record = { 'Content-Type': 'application/json' }; - const csrf = getCsrfToken(); - if (csrf) headers['X-CSRF-Token'] = csrf; - const res = await fetch(url, { - method: 'POST', - headers, - credentials: 'include', - }); - if (!res.ok) return false; - await res.json(); - return true; - } catch { - return false; - } + return runUiEffect(authLoginEffect(email, password)); } export async function authLogout(): Promise { - try { - await request('/api/auth/logout', { - method: 'POST', - }); - } catch { - // ignore errors on logout - } + return runUiEffect(authLogoutEffect()); } export async function getAuthProviders(): Promise { - try { - return await getSessionReadAdapter().getAuthProviders(); - } catch { - return { email_password: false, oauth: [] }; - } + return runUiEffect(getAuthProvidersSafeEffect()); } export async function getApiCapabilities(): Promise { - try { - return await getSessionReadAdapter().getCapabilities(); - } catch { - // ignore and fall through to safe defaults - } - return { - auth_enabled: false, - parse_preview_enabled: false, - register_targets: [], - share_modes: [], - }; + return runUiEffect(getApiCapabilitiesSafeEffect()); } export async function isAuthApiAvailable(): Promise { @@ -648,42 +284,6 @@ export async function isParsePreviewApiAvailable(): Promise { return capabilities.parse_preview_enabled; } -async function postParsePreview(req: ParsePreviewRequest): Promise { - assertDesktopHttpApiAvailable('/api/parse/preview'); - const url = `${getBaseUrl()}/api/parse/preview`; - const headers: Record = { 'Content-Type': 'application/json' }; - const auth = await getAuthHeader(); - if (auth) headers.Authorization = auth; - const csrf = getCsrfToken(); - if (csrf) headers['X-CSRF-Token'] = csrf; - - const res = await fetch(url, { - method: 'POST', - headers, - body: JSON.stringify(req), - credentials: 'include', - }); - - const body = await res.text(); - if (!res.ok) { - let parsed: ParsePreviewErrorResponse | null = null; - try { - parsed = JSON.parse(body) as ParsePreviewErrorResponse; - } catch { - parsed = null; - } - if (parsed && typeof parsed.code === 'string' && typeof parsed.message === 'string') { - throw new PreviewApiError(res.status, parsed); - } - throw new ApiError(res.status, body); - } - - if (!body.trim()) { - throw new ApiError(res.status, 'Empty parse preview response'); - } - return JSON.parse(body) as ParsePreviewResponse; -} - export async function previewSessionFromGithubSource(params: { owner: string; repo: string; @@ -691,17 +291,7 @@ export async function previewSessionFromGithubSource(params: { path: string; parser_hint?: string; }): Promise { - const source: ParseSource = { - kind: 'github', - owner: params.owner, - repo: params.repo, - ref: params.ref, - path: params.path, - }; - return postParsePreview({ - source, - parser_hint: params.parser_hint ?? null, - }); + return runUiEffect(previewSessionFromGithubSourceEffect(params)); } export async function previewSessionFromGitSource(params: { @@ -710,16 +300,7 @@ export async function previewSessionFromGitSource(params: { path: string; parser_hint?: string; }): Promise { - const source: ParseSource = { - kind: 'git', - remote: params.remote, - ref: params.ref, - path: params.path, - }; - return postParsePreview({ - source, - parser_hint: params.parser_hint ?? null, - }); + return runUiEffect(previewSessionFromGitSourceEffect(params)); } export async function previewSessionFromInlineSource(params: { @@ -727,46 +308,13 @@ export async function previewSessionFromInlineSource(params: { content_base64: string; parser_hint?: string; }): Promise { - const source: ParseSource = { - kind: 'inline', - filename: params.filename, - content_base64: params.content_base64, - }; - return postParsePreview({ - source, - parser_hint: params.parser_hint ?? null, - }); -} - -export function getParsePreviewError(error: unknown): ParsePreviewErrorResponse | null { - if (error instanceof PreviewApiError) return error.payload; - if (error instanceof ApiError) { - try { - const parsed = JSON.parse(error.body) as ParsePreviewErrorResponse; - if (typeof parsed.code === 'string' && typeof parsed.message === 'string') { - return parsed; - } - } catch { - // ignore non-json errors - } - } - return null; + return runUiEffect(previewSessionFromInlineSourceEffect(params)); } export function getOAuthUrl(provider: string): string { - if (isDesktopLocalRuntime()) return '#'; - return `${getBaseUrl()}/api/auth/oauth/${encodeURIComponent(provider)}`; + return getOAuthUrlFromRuntime(readBrowserRuntime(), provider); } export async function handleAuthCallback(): Promise { - if (typeof window === 'undefined') return false; - if (window.location.hash) { - window.history.replaceState(null, '', window.location.pathname); - } - try { - await request('/api/auth/verify', { method: 'POST' }); - return true; - } catch { - return false; - } + return runUiEffect(handleAuthCallbackEffect()); } diff --git a/packages/ui/src/components/SessionDetailPage.svelte b/packages/ui/src/components/SessionDetailPage.svelte index 840d5af0..f2e60c7e 100644 --- a/packages/ui/src/components/SessionDetailPage.svelte +++ b/packages/ui/src/components/SessionDetailPage.svelte @@ -1,6 +1,5 @@ -{#if pageState === 'loading' || pageState === 'idle'} +{#if state.pageState === 'loading' || state.pageState === 'idle'}
Loading source preview...
-{:else if pageState === 'unsupported'} +{:else if state.pageState === 'unsupported'}
This deployment does not support source parse preview.
-{:else if pageState === 'select_parser'} +{:else if state.pageState === 'select_parser'}
- {#if errorMessage} -
{errorMessage}
+ {#if state.errorMessage} +
+ {state.errorMessage} +
{/if}
-{:else if pageState === 'error'} +{:else if state.pageState === 'error'}
- {errorMessage ?? 'Failed to load source.'} + {state.errorMessage ?? 'Failed to load source.'}
-{:else if preview && currentRoute} +{:else if state.preview && state.currentRoute}
- + Date: Fri, 6 Mar 2026 15:24:10 +0900 Subject: [PATCH 02/30] extract shell and settings ui models --- packages/ui/src/components/AppShell.svelte | 240 ++++--------- .../ui/src/components/SettingsPage.svelte | 183 +++++----- packages/ui/src/index.ts | 14 + .../ui/src/models/app-shell-model.test.ts | 106 ++++++ packages/ui/src/models/app-shell-model.ts | 209 +++++++++++ packages/ui/src/models/settings-model.test.ts | 78 ++++ packages/ui/src/models/settings-model.ts | 332 ++++++++++++++++++ 7 files changed, 901 insertions(+), 261 deletions(-) create mode 100644 packages/ui/src/models/app-shell-model.test.ts create mode 100644 packages/ui/src/models/app-shell-model.ts create mode 100644 packages/ui/src/models/settings-model.test.ts create mode 100644 packages/ui/src/models/settings-model.ts diff --git a/packages/ui/src/components/AppShell.svelte b/packages/ui/src/components/AppShell.svelte index 5c7057cf..272e53ef 100644 --- a/packages/ui/src/components/AppShell.svelte +++ b/packages/ui/src/components/AppShell.svelte @@ -2,13 +2,13 @@ import type { Snippet } from 'svelte'; import { tick } from 'svelte'; import { - ApiError, authLogout, getApiCapabilities, getSettings, isAuthenticated, verifyAuth, } from '../api'; +import { createShellModel, createShellModelState } from '../models/app-shell-model'; import type { UserSettings } from '../types'; import ThemeToggle from './ThemeToggle.svelte'; @@ -46,22 +46,16 @@ const { onNavigate?: (path: string) => void; } = $props(); -let user = $state(null); -let paletteOpen = $state(false); -let paletteQuery = $state(''); -let paletteSelectionIndex = $state(0); +const shellState = $state(createShellModelState()); let paletteInput: HTMLInputElement | undefined = $state(); -let helpOpen = $state(false); let helpDialog: HTMLDivElement | undefined = $state(); -let accountMenuOpen = $state(false); let accountMenuRoot: HTMLDivElement | undefined = $state(); -let authEnabled = $state(false); -let hasLocalAuth = $state(false); -let desktopRuntime = $state(false); const isSessionDetail = $derived(currentPath.startsWith('/session/')); const isSessionList = $derived(currentPath === '/sessions'); -const showLoginLink = $derived(!hasLocalAuth && (!desktopRuntime || authEnabled)); +const showLoginLink = $derived( + !shellState.hasLocalAuth && (!shellState.desktopRuntime || shellState.authEnabled), +); function trimNonEmpty(value: string | null | undefined): string | null { if (typeof value !== 'string') return null; @@ -118,69 +112,51 @@ function getDesktopInvoke(): DesktopInvoke | null { return typeof invoke === 'function' ? invoke : null; } +const shellModel = createShellModel(shellState, { + getApiCapabilities, + verifyAuth, + getSettings, + authLogout, + isAuthenticated, + isDesktopRuntime: () => { + if (typeof window === 'undefined') return false; + const desktopWindow = window as DesktopWindow; + return '__TAURI_INTERNALS__' in desktopWindow || desktopWindow.location.protocol === 'tauri:'; + }, + takeLaunchRoute: async () => { + const invoke = getDesktopInvoke(); + if (!invoke) return null; + const maybeRoute = await invoke('desktop_take_launch_route'); + return typeof maybeRoute === 'string' ? maybeRoute : null; + }, + getCurrentLocationPath: () => { + if (typeof window === 'undefined') return currentPath; + return `${window.location.pathname}${window.location.search}${window.location.hash}`; + }, + startInterval: (callback, ms) => window.setInterval(callback, ms), + clearInterval: (handle) => { + window.clearInterval(handle as number); + }, + navigate: (path) => onNavigate(path), +}); + const navLinks = $derived.by(() => { const links: Array<{ href: string; label: string }> = [{ href: '/sessions', label: 'Sessions' }]; links.push({ href: '/docs', label: 'Docs' }); - if (desktopRuntime || hasLocalAuth) { + if (shellState.desktopRuntime || shellState.hasLocalAuth) { links.push({ href: '/settings', label: 'Settings' }); } return links; }); $effect(() => { - let cancelled = false; - if (typeof window !== 'undefined') { - const desktopWindow = window as DesktopWindow; - desktopRuntime = - '__TAURI_INTERNALS__' in desktopWindow || desktopWindow.location.protocol === 'tauri:'; - } - getApiCapabilities() - .then((capabilities) => { - if (cancelled) return; - authEnabled = capabilities.auth_enabled; - }) - .catch(() => { - if (cancelled) return; - authEnabled = false; - }); - - return () => { - cancelled = true; - }; + void shellModel.loadCapabilities(); }); $effect(() => { - if (!desktopRuntime || typeof window === 'undefined') return; - const invoke = getDesktopInvoke(); - if (!invoke) return; - - let cancelled = false; - let commandSupported = true; - - const pollLaunchRoute = async () => { - if (cancelled || !commandSupported) return; - try { - const maybeRoute = await invoke('desktop_take_launch_route'); - if (typeof maybeRoute !== 'string' || maybeRoute.trim().length === 0) return; - const nextPath = maybeRoute.trim(); - const currentPath = `${window.location.pathname}${window.location.search}${window.location.hash}`; - if (nextPath === currentPath) return; - onNavigate(nextPath); - } catch { - // Older desktop runtimes may not implement this command yet. - commandSupported = false; - } - }; - - void pollLaunchRoute(); - const timer = window.setInterval(() => { - void pollLaunchRoute(); - }, 1200); - - return () => { - cancelled = true; - window.clearInterval(timer); - }; + void shellState.desktopRuntime; + if (!shellState.desktopRuntime) return; + return shellModel.startLaunchRoutePolling(); }); const shortcutHints = $derived.by(() => { @@ -204,61 +180,13 @@ const shortcutHints = $derived.by(() => { $effect(() => { void currentPath; - if (!authEnabled) { - user = null; - hasLocalAuth = false; - accountMenuOpen = false; - return; - } - - if (!isAuthenticated()) { - user = null; - hasLocalAuth = false; - accountMenuOpen = false; - return; - } - - let cancelled = false; - verifyAuth() - .then(async (ok) => { - if (!ok || cancelled) { - user = null; - hasLocalAuth = false; - accountMenuOpen = false; - return; - } - try { - const settings = await getSettings(); - if (!cancelled) { - user = settings; - hasLocalAuth = true; - } - } catch (e) { - if (cancelled) return; - user = null; - hasLocalAuth = false; - accountMenuOpen = false; - if (e instanceof ApiError && (e.status === 401 || e.status === 403)) { - await authLogout(); - } - } - }) - .catch(() => { - if (!cancelled) { - user = null; - hasLocalAuth = false; - accountMenuOpen = false; - } - }); - - return () => { - cancelled = true; - }; + void shellState.authEnabled; + void shellModel.loadUser(); }); $effect(() => { void currentPath; - accountMenuOpen = false; + shellModel.resetMenusForPath(); }); function createPaletteCommand( @@ -293,7 +221,7 @@ const allPaletteCommands = $derived.by(() => { ), ]; - if (!hasLocalAuth && (!desktopRuntime || authEnabled)) { + if (!shellState.hasLocalAuth && (!shellState.desktopRuntime || shellState.authEnabled)) { commands.push( createPaletteCommand( 'go-login', @@ -304,7 +232,7 @@ const allPaletteCommands = $derived.by(() => { ), ); } - if (hasLocalAuth || desktopRuntime) { + if (shellState.hasLocalAuth || shellState.desktopRuntime) { commands.push( createPaletteCommand( 'go-settings', @@ -343,7 +271,7 @@ const allPaletteCommands = $derived.by(() => { return commands; }); -const normalizedPaletteQuery = $derived(paletteQuery.trim().toLowerCase()); +const normalizedPaletteQuery = $derived(shellState.paletteQuery.trim().toLowerCase()); const visiblePaletteCommands = $derived.by(() => { if (!normalizedPaletteQuery) return allPaletteCommands; @@ -357,18 +285,11 @@ const visiblePaletteCommands = $derived.by(() => { $effect(() => { void normalizedPaletteQuery; - paletteSelectionIndex = 0; + shellModel.resetPaletteSelection(); }); $effect(() => { - const max = visiblePaletteCommands.length - 1; - if (max < 0) { - paletteSelectionIndex = 0; - return; - } - if (paletteSelectionIndex > max) { - paletteSelectionIndex = max; - } + shellModel.clampPaletteSelection(visiblePaletteCommands.length - 1); }); function isLinkActive(href: string): boolean { @@ -391,24 +312,21 @@ function isEditableTarget(target: EventTarget | null): boolean { } async function openPalette() { - paletteOpen = true; - paletteQuery = ''; - paletteSelectionIndex = 0; + shellModel.openPalette(); await tick(); paletteInput?.focus(); } function closePalette() { - paletteOpen = false; - paletteQuery = ''; + shellModel.closePalette(); } function openHelp() { - helpOpen = true; + shellModel.openHelp(); } function closeHelp() { - helpOpen = false; + shellModel.closeHelp(); } function trapHelpFocus(e: KeyboardEvent) { @@ -436,15 +354,15 @@ function trapHelpFocus(e: KeyboardEvent) { } function closeAccountMenu() { - accountMenuOpen = false; + shellModel.closeAccountMenu(); } function toggleAccountMenu() { - accountMenuOpen = !accountMenuOpen; + shellModel.toggleAccountMenu(); } function handleWindowPointerDown(e: MouseEvent) { - if (!accountMenuOpen) return; + if (!shellState.accountMenuOpen) return; const target = e.target; if (!(target instanceof Node)) return; if (accountMenuRoot?.contains(target)) return; @@ -458,9 +376,7 @@ function executePaletteCommand(command: PaletteCommand | undefined) { } function movePaletteSelection(direction: 1 | -1) { - const len = visiblePaletteCommands.length; - if (len === 0) return; - paletteSelectionIndex = (paletteSelectionIndex + direction + len) % len; + shellModel.movePaletteSelection(direction, visiblePaletteCommands.length); } function handlePaletteInputKeydown(e: KeyboardEvent) { @@ -476,7 +392,7 @@ function handlePaletteInputKeydown(e: KeyboardEvent) { } if (e.key === 'Enter') { e.preventDefault(); - executePaletteCommand(visiblePaletteCommands[paletteSelectionIndex]); + executePaletteCommand(visiblePaletteCommands[shellState.paletteSelectionIndex]); return; } if (e.key === 'Escape') { @@ -486,15 +402,11 @@ function handlePaletteInputKeydown(e: KeyboardEvent) { } async function handleSignOut() { - closeAccountMenu(); - await authLogout(); - user = null; - hasLocalAuth = false; - onNavigate('/sessions'); + await shellModel.signOut(); } function handleAccountMenuNavigate(path: string) { - closeAccountMenu(); + shellModel.closeAccountMenu(); onNavigate(path); } @@ -502,14 +414,14 @@ function handleGlobalKey(e: KeyboardEvent) { if (isHelpShortcut(e)) { if (isEditableTarget(e.target)) return; e.preventDefault(); - if (helpOpen) closeHelp(); + if (shellState.helpOpen) closeHelp(); else openHelp(); return; } if (isPaletteShortcut(e)) { e.preventDefault(); - if (paletteOpen) { + if (shellState.paletteOpen) { closePalette(); } else { void openPalette(); @@ -517,7 +429,7 @@ function handleGlobalKey(e: KeyboardEvent) { return; } - if (helpOpen) { + if (shellState.helpOpen) { if (e.key === 'Escape') { e.preventDefault(); closeHelp(); @@ -527,13 +439,13 @@ function handleGlobalKey(e: KeyboardEvent) { return; } - if (accountMenuOpen && e.key === 'Escape') { + if (shellState.accountMenuOpen && e.key === 'Escape') { e.preventDefault(); closeAccountMenu(); return; } - if (paletteOpen) { + if (shellState.paletteOpen) { if (e.key === 'Escape') { e.preventDefault(); closePalette(); @@ -572,20 +484,20 @@ function handleGlobalKey(e: KeyboardEvent) { {/each}
- {#if hasLocalAuth} + {#if shellState.hasLocalAuth}
- {#if accountMenuOpen} + {#if shellState.accountMenuOpen}

Account

-

{user?.nickname}

-

{user?.email ?? 'email not linked'}

+

{shellState.user?.nickname}

+

{shellState.user?.email ?? 'email not linked'}

-

User ID: {user?.user_id}

-

Joined: {shortDate(user?.created_at)}

-

Providers: {linkedProvidersLabel(user)}

+

User ID: {shellState.user?.user_id}

+

Joined: {shortDate(shellState.user?.created_at)}

+

Providers: {linkedProvidersLabel(shellState.user)}

@@ -687,7 +599,7 @@ function handleGlobalKey(e: KeyboardEvent) {
-{#if paletteOpen} +{#if shellState.paletteOpen}
{/if} -{#if helpOpen} +{#if shellState.helpOpen}
+
+
diff --git a/packages/ui/src/components/settings-page/GitCredentialListPanel.svelte b/packages/ui/src/components/settings-page/GitCredentialListPanel.svelte new file mode 100644 index 00000000..aa6689d0 --- /dev/null +++ b/packages/ui/src/components/settings-page/GitCredentialListPanel.svelte @@ -0,0 +1,46 @@ + + +
+
+ Label + Host + Path Prefix + Action +
+ {#if credentialsLoading} +
Loading credentials...
+ {:else if credentials.length === 0} +
No manual credentials registered.
+ {:else} + {#each credentials as credential} +
+
{credential.label}
+
{credential.host}
+
{credential.path_prefix || '*'}
+ +
+ {/each} + {/if} +
diff --git a/packages/ui/src/components/settings-page/GitCredentialsPanel.svelte b/packages/ui/src/components/settings-page/GitCredentialsPanel.svelte new file mode 100644 index 00000000..ee7f5f9a --- /dev/null +++ b/packages/ui/src/components/settings-page/GitCredentialsPanel.svelte @@ -0,0 +1,75 @@ + + +
+
+

Private Git Credentials

+

+ Preferred: connect GitHub/GitLab OAuth. Manual credentials are used for private self-managed or generic git remotes. +

+
+ + {#if !credentialsSupported} +

+ This deployment does not expose credential management endpoints. +

+ {:else} + + + + +

+ Secrets are never shown again after save. Stored values are encrypted at rest. +

+ {/if} +
diff --git a/packages/ui/src/components/settings-page/SettingsApiKeyPanel.svelte b/packages/ui/src/components/settings-page/SettingsApiKeyPanel.svelte new file mode 100644 index 00000000..92d93611 --- /dev/null +++ b/packages/ui/src/components/settings-page/SettingsApiKeyPanel.svelte @@ -0,0 +1,60 @@ + + +
+
+
+

Personal API Key

+

+ Issue a new key for CLI and automation access. Existing active key moves to grace mode. +

+
+ +
+ + {#if issuedApiKey} +
+

Shown once. Save this key now.

+ + {issuedApiKey} + +
+ + {#if copyMessage} + {copyMessage} + {/if} +
+
+ {/if} +
diff --git a/packages/ui/src/components/settings-page/SettingsAuthGatePanel.svelte b/packages/ui/src/components/settings-page/SettingsAuthGatePanel.svelte new file mode 100644 index 00000000..b0bddbd9 --- /dev/null +++ b/packages/ui/src/components/settings-page/SettingsAuthGatePanel.svelte @@ -0,0 +1,24 @@ + + +
+

Sign in is required to view personal settings.

+
+ +
+
diff --git a/packages/ui/src/components/settings-page/SettingsOverviewHeaderPanel.svelte b/packages/ui/src/components/settings-page/SettingsOverviewHeaderPanel.svelte new file mode 100644 index 00000000..e0834655 --- /dev/null +++ b/packages/ui/src/components/settings-page/SettingsOverviewHeaderPanel.svelte @@ -0,0 +1,8 @@ +
+

Account

+

Settings

+

Personal profile and API access controls.

+
diff --git a/packages/ui/src/components/settings-page/SettingsProfilePanel.svelte b/packages/ui/src/components/settings-page/SettingsProfilePanel.svelte new file mode 100644 index 00000000..4fabf471 --- /dev/null +++ b/packages/ui/src/components/settings-page/SettingsProfilePanel.svelte @@ -0,0 +1,40 @@ + + +
+

Profile

+ {#if settings} +
+
User ID
+
{settings.user_id}
+
Nickname
+
{settings.nickname}
+
Email
+
{settings.email ?? 'not linked'}
+
Joined
+
{formatDate(settings.created_at)}
+
Linked OAuth
+
+ {#if settings.oauth_providers.length === 0} + none + {:else} + {settings.oauth_providers.map((provider) => provider.display_name).join(', ')} + {/if} +
+
+ {:else} +

No profile data available.

+ {/if} +
From 0d9210e5dfde689175c596d1272c4f94c467210e Mon Sep 17 00:00:00 2001 From: hwisu Date: Mon, 9 Mar 2026 14:09:52 +0900 Subject: [PATCH 20/30] refactor(rust): align runtime boundaries with 2026 practices --- Cargo.lock | 145 ++- Cargo.toml | 10 + crates/api-client/Cargo.toml | 5 +- crates/api-client/src/client.rs | 229 +++- crates/api-client/src/lib.rs | 2 +- crates/api-client/src/retry.rs | 5 +- crates/cli/Cargo.toml | 3 + crates/cli/src/config.rs | 27 +- crates/cli/src/discover.rs | 2 +- crates/cli/src/handoff.rs | 2 +- crates/cli/src/index.rs | 2 +- crates/cli/src/review.rs | 3 +- crates/cli/src/runtime_settings.rs | 12 +- crates/cli/src/setup_cmd/branch_sync.rs | 19 +- crates/cli/src/setup_cmd/shims.rs | 8 +- crates/cli/src/setup_cmd/status.rs | 8 +- crates/cli/src/stream_push.rs | 26 +- crates/cli/src/summary_cmd.rs | 6 +- crates/daemon/Cargo.toml | 3 + crates/daemon/src/config.rs | 32 +- crates/daemon/src/scheduler/pipeline.rs | 3 +- crates/daemon/src/watcher.rs | 4 +- crates/local-db/Cargo.toml | 1 + crates/local-db/src/connection.rs | 70 +- crates/local-store/Cargo.toml | 1 + crates/local-store/src/lib.rs | 19 +- crates/parser-discovery/Cargo.toml | 20 + .../src/lib.rs} | 285 +++-- crates/parsers/Cargo.toml | 3 + crates/parsers/src/claude_code/mod.rs | 7 +- crates/parsers/src/claude_code/parse.rs | 1116 +---------------- crates/parsers/src/claude_code/parse/tests.rs | 487 +++++++ crates/parsers/src/claude_code/raw.rs | 203 +++ crates/parsers/src/claude_code/subagent.rs | 402 ++++++ crates/parsers/src/claude_code/transform.rs | 10 +- crates/parsers/src/cursor/mod.rs | 2 + crates/parsers/src/cursor/parse.rs | 170 +-- crates/parsers/src/cursor/time.rs | 18 + crates/parsers/src/cursor/types.rs | 134 ++ crates/parsers/src/gemini.rs | 357 +----- crates/parsers/src/gemini/tests.rs | 356 ++++++ crates/parsers/src/lib.rs | 1 - crates/parsers/src/opencode.rs | 485 +------ crates/parsers/src/opencode/tests.rs | 473 +++++++ crates/parsers/tests/real_data.rs | 6 +- crates/paths/Cargo.toml | 21 + crates/paths/src/lib.rs | 161 +++ crates/server/src/routes/admin.rs | 20 +- crates/server/src/routes/auth.rs | 320 ++--- crates/server/src/routes/ingest/auth.rs | 47 +- crates/server/src/routes/ingest/fetch.rs | 15 +- crates/server/src/routes/ingest/mod.rs | 5 +- crates/server/src/routes/oauth.rs | 218 ++-- crates/server/src/routes/sessions.rs | 79 +- crates/server/src/storage.rs | 1003 ++++++++++++++- crates/summary-runtime/Cargo.toml | 25 + crates/summary-runtime/src/lib.rs | 74 ++ crates/summary-runtime/src/provider.rs | 376 ++++++ crates/summary/Cargo.toml | 1 - crates/summary/src/lib.rs | 169 +-- crates/summary/src/provider.rs | 349 +----- desktop/src-tauri/Cargo.toml | 3 + desktop/src-tauri/src/app/change_reader.rs | 2 +- desktop/src-tauri/src/app/runtime_settings.rs | 2 +- desktop/src-tauri/src/app/session_access.rs | 3 +- desktop/src-tauri/src/app/session_summary.rs | 3 +- desktop/src-tauri/src/app/vector.rs | 25 +- desktop/src-tauri/src/main.rs | 22 +- 68 files changed, 4832 insertions(+), 3293 deletions(-) create mode 100644 crates/parser-discovery/Cargo.toml rename crates/{parsers/src/discover.rs => parser-discovery/src/lib.rs} (56%) create mode 100644 crates/parsers/src/claude_code/parse/tests.rs create mode 100644 crates/parsers/src/claude_code/raw.rs create mode 100644 crates/parsers/src/claude_code/subagent.rs create mode 100644 crates/parsers/src/cursor/time.rs create mode 100644 crates/parsers/src/cursor/types.rs create mode 100644 crates/parsers/src/gemini/tests.rs create mode 100644 crates/parsers/src/opencode/tests.rs create mode 100644 crates/paths/Cargo.toml create mode 100644 crates/paths/src/lib.rs create mode 100644 crates/summary-runtime/Cargo.toml create mode 100644 crates/summary-runtime/src/lib.rs create mode 100644 crates/summary-runtime/src/provider.rs diff --git a/Cargo.lock b/Cargo.lock index 1110934d..db5937df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -509,13 +509,34 @@ dependencies = [ "subtle", ] +[[package]] +name = "directories" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +dependencies = [ + "dirs-sys 0.4.1", +] + [[package]] name = "dirs" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" dependencies = [ - "dirs-sys", + "dirs-sys 0.5.0", +] + +[[package]] +name = "dirs-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +dependencies = [ + "libc", + "option-ext", + "redox_users 0.4.6", + "windows-sys 0.48.0", ] [[package]] @@ -526,7 +547,7 @@ checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", - "redox_users", + "redox_users 0.5.2", "windows-sys 0.61.2", ] @@ -2172,9 +2193,12 @@ dependencies = [ "opensession-git-native", "opensession-local-db", "opensession-local-store", + "opensession-parser-discovery", "opensession-parsers", + "opensession-paths", "opensession-runtime-config", "opensession-summary", + "opensession-summary-runtime", "regex", "reqwest", "serde", @@ -2212,11 +2236,11 @@ dependencies = [ name = "opensession-api-client" version = "0.2.34" dependencies = [ - "anyhow", "opensession-api", "reqwest", "serde", "serde_json", + "thiserror 2.0.18", "tokio", "tracing", ] @@ -2248,9 +2272,12 @@ dependencies = [ "opensession-core", "opensession-git-native", "opensession-local-db", + "opensession-parser-discovery", "opensession-parsers", + "opensession-paths", "opensession-runtime-config", "opensession-summary", + "opensession-summary-runtime", "regex", "reqwest", "serde", @@ -2305,6 +2332,7 @@ dependencies = [ "chrono", "opensession-api", "opensession-core", + "opensession-paths", "rusqlite", "serde", "serde_json", @@ -2317,11 +2345,22 @@ name = "opensession-local-store" version = "0.2.34" dependencies = [ "opensession-core", + "opensession-paths", "sha2", "tempfile", "thiserror 2.0.18", ] +[[package]] +name = "opensession-parser-discovery" +version = "0.2.34" +dependencies = [ + "glob", + "opensession-paths", + "rusqlite", + "shellexpand", +] + [[package]] name = "opensession-parsers" version = "0.2.34" @@ -2330,6 +2369,7 @@ dependencies = [ "chrono", "glob", "opensession-core", + "opensession-parser-discovery", "regex", "rusqlite", "serde", @@ -2341,6 +2381,15 @@ dependencies = [ "uuid", ] +[[package]] +name = "opensession-paths" +version = "0.2.34" +dependencies = [ + "directories", + "opensession-runtime-config", + "thiserror 2.0.18", +] + [[package]] name = "opensession-runtime-config" version = "0.2.34" @@ -2381,13 +2430,24 @@ dependencies = [ "hex", "opensession-core", "opensession-runtime-config", - "reqwest", "serde", "serde_json", "sha2", "tokio", ] +[[package]] +name = "opensession-summary-runtime" +version = "0.2.34" +dependencies = [ + "opensession-core", + "opensession-runtime-config", + "opensession-summary", + "reqwest", + "serde", + "tokio", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -2662,6 +2722,17 @@ dependencies = [ "bitflags 2.10.0", ] +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 1.0.69", +] + [[package]] name = "redox_users" version = "0.5.2" @@ -3790,6 +3861,15 @@ dependencies = [ "windows-link", ] +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + [[package]] name = "windows-sys" version = "0.52.0" @@ -3826,6 +3906,21 @@ dependencies = [ "windows-link", ] +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -3859,6 +3954,12 @@ dependencies = [ "windows_x86_64_msvc 0.53.1", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -3871,6 +3972,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + [[package]] name = "windows_aarch64_msvc" version = "0.52.6" @@ -3883,6 +3990,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -3907,6 +4020,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + [[package]] name = "windows_i686_msvc" version = "0.52.6" @@ -3919,6 +4038,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + [[package]] name = "windows_x86_64_gnu" version = "0.52.6" @@ -3931,6 +4056,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -3943,6 +4074,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + [[package]] name = "windows_x86_64_msvc" version = "0.52.6" diff --git a/Cargo.toml b/Cargo.toml index 2273ed94..0f266d9c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,10 +2,13 @@ resolver = "3" members = [ "crates/core", + "crates/paths", "crates/local-store", "crates/runtime-config", "crates/summary", + "crates/summary-runtime", "crates/parsers", + "crates/parser-discovery", "crates/api", "crates/api-client", "crates/local-db", @@ -17,10 +20,13 @@ members = [ ] default-members = [ "crates/core", + "crates/paths", "crates/local-store", "crates/runtime-config", "crates/summary", + "crates/summary-runtime", "crates/parsers", + "crates/parser-discovery", "crates/api", "crates/api-client", "crates/local-db", @@ -56,10 +62,13 @@ authors = ["opensession.io contributors"] [workspace.dependencies] opensession-core = { version = "0.2.34", path = "crates/core" } +opensession-paths = { version = "0.2.34", path = "crates/paths" } opensession-local-store = { version = "0.2.34", path = "crates/local-store" } opensession-runtime-config = { version = "0.2.34", path = "crates/runtime-config" } opensession-summary = { version = "0.2.34", path = "crates/summary" } +opensession-summary-runtime = { version = "0.2.34", path = "crates/summary-runtime" } opensession-parsers = { version = "0.2.34", path = "crates/parsers" } +opensession-parser-discovery = { version = "0.2.34", path = "crates/parser-discovery" } opensession-api = { version = "0.2.34", path = "crates/api", default-features = false } opensession-api-client = { version = "0.2.34", path = "crates/api-client" } opensession-local-db = { version = "0.2.34", path = "crates/local-db" } @@ -92,6 +101,7 @@ urlencoding = "2" tempfile = "3" gix = "0.79" sea-query = { version = "0.32", features = ["backend-sqlite", "derive"] } +directories = "5" [profile.dev.package."*"] opt-level = 1 diff --git a/crates/api-client/Cargo.toml b/crates/api-client/Cargo.toml index 9caf0ace..3fc2c229 100644 --- a/crates/api-client/Cargo.toml +++ b/crates/api-client/Cargo.toml @@ -21,4 +21,7 @@ serde = { workspace = true } serde_json = { workspace = true } tokio = { workspace = true, features = ["time"] } tracing = { workspace = true } -anyhow = { workspace = true } +thiserror = { workspace = true } + +[dev-dependencies] +tokio = { workspace = true, features = ["io-util", "macros", "net", "rt-multi-thread"] } diff --git a/crates/api-client/src/client.rs b/crates/api-client/src/client.rs index 21649eae..45dd6ea3 100644 --- a/crates/api-client/src/client.rs +++ b/crates/api-client/src/client.rs @@ -1,10 +1,27 @@ use std::time::Duration; -use anyhow::{Result, bail}; use serde::Serialize; +use thiserror::Error; use opensession_api::*; +pub type Result = std::result::Result; + +#[derive(Debug, Error)] +pub enum ApiClientError { + #[error("auth token not set")] + AuthTokenMissing, + #[error("transport error: {0}")] + Transport(reqwest::Error), + #[error("unexpected API status {status}: {body}")] + UnexpectedStatus { + status: reqwest::StatusCode, + body: String, + }, + #[error("response decode error: {0}")] + Decode(reqwest::Error), +} + /// Typed HTTP client for the OpenSession API. /// /// Provides high-level methods for each API endpoint (using the stored auth @@ -19,7 +36,10 @@ pub struct ApiClient { impl ApiClient { /// Create a new client with the given base URL and timeout. pub fn new(base_url: &str, timeout: Duration) -> Result { - let client = reqwest::Client::builder().timeout(timeout).build()?; + let client = reqwest::Client::builder() + .timeout(timeout) + .build() + .map_err(ApiClientError::Transport)?; Ok(Self { client, base_url: base_url.trim_end_matches('/').to_string(), @@ -62,16 +82,21 @@ impl ApiClient { format!("{}/api{}", self.base_url, path) } - fn token_or_bail(&self) -> Result<&str> { + fn token_or_err(&self) -> Result<&str> { self.auth_token .as_deref() - .ok_or_else(|| anyhow::anyhow!("auth token not set")) + .ok_or(ApiClientError::AuthTokenMissing) } // ── Health ──────────────────────────────────────────────────────────── pub async fn health(&self) -> Result { - let resp = self.client.get(self.url("/health")).send().await?; + let resp = self + .client + .get(self.url("/health")) + .send() + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } @@ -83,7 +108,8 @@ impl ApiClient { .post(self.url("/auth/login")) .json(req) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } @@ -93,29 +119,32 @@ impl ApiClient { .post(self.url("/auth/register")) .json(req) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn verify(&self) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .post(self.url("/auth/verify")) .bearer_auth(token) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn me(&self) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .get(self.url("/auth/me")) .bearer_auth(token) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } @@ -125,52 +154,68 @@ impl ApiClient { .post(self.url("/auth/refresh")) .json(req) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn logout(&self, req: &LogoutRequest) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .post(self.url("/auth/logout")) .bearer_auth(token) .json(req) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn change_password(&self, req: &ChangePasswordRequest) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .post(self.url("/auth/change-password")) .bearer_auth(token) .json(req) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn issue_api_key(&self) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .post(self.url("/auth/api-keys/issue")) .bearer_auth(token) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } // ── Sessions ────────────────────────────────────────────────────────── + pub async fn upload_session(&self, req: &UploadRequest) -> Result { + let token = self.token_or_err()?; + let resp = self + .client + .post(self.url("/sessions")) + .bearer_auth(token) + .json(req) + .send() + .await + .map_err(ApiClientError::Transport)?; + parse_response(resp).await + } + pub async fn list_sessions(&self, query: &SessionListQuery) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let mut url = self.url("/sessions"); - // Build query string from the struct fields let mut params = Vec::new(); params.push(format!("page={}", query.page)); params.push(format!("per_page={}", query.per_page)); @@ -190,40 +235,49 @@ impl ApiClient { url = format!("{}?{}", url, params.join("&")); } - let resp = self.client.get(&url).bearer_auth(token).send().await?; + let resp = self + .client + .get(&url) + .bearer_auth(token) + .send() + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn get_session(&self, id: &str) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .get(self.url(&format!("/sessions/{id}"))) .bearer_auth(token) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn delete_session(&self, id: &str) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .delete(self.url(&format!("/sessions/{id}"))) .bearer_auth(token) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } pub async fn get_session_raw(&self, id: &str) -> Result { - let token = self.token_or_bail()?; + let token = self.token_or_err()?; let resp = self .client .get(self.url(&format!("/sessions/{id}/raw"))) .bearer_auth(token) .send() - .await?; + .await + .map_err(ApiClientError::Transport)?; parse_response(resp).await } @@ -231,22 +285,22 @@ impl ApiClient { /// Authenticated GET returning the raw response. pub async fn get_with_auth(&self, path: &str, token: &str) -> Result { - Ok(self - .client + self.client .get(self.url(path)) .bearer_auth(token) .send() - .await?) + .await + .map_err(ApiClientError::Transport) } /// Authenticated POST (no body) returning the raw response. pub async fn post_with_auth(&self, path: &str, token: &str) -> Result { - Ok(self - .client + self.client .post(self.url(path)) .bearer_auth(token) .send() - .await?) + .await + .map_err(ApiClientError::Transport) } /// Authenticated POST with JSON body returning the raw response. @@ -256,13 +310,13 @@ impl ApiClient { token: &str, body: &T, ) -> Result { - Ok(self - .client + self.client .post(self.url(path)) .bearer_auth(token) .json(body) .send() - .await?) + .await + .map_err(ApiClientError::Transport) } /// Authenticated PUT with JSON body returning the raw response. @@ -272,23 +326,23 @@ impl ApiClient { token: &str, body: &T, ) -> Result { - Ok(self - .client + self.client .put(self.url(path)) .bearer_auth(token) .json(body) .send() - .await?) + .await + .map_err(ApiClientError::Transport) } /// Authenticated DELETE returning the raw response. pub async fn delete_with_auth(&self, path: &str, token: &str) -> Result { - Ok(self - .client + self.client .delete(self.url(path)) .bearer_auth(token) .send() - .await?) + .await + .map_err(ApiClientError::Transport) } /// Unauthenticated POST with JSON body returning the raw response. @@ -297,25 +351,52 @@ impl ApiClient { path: &str, body: &T, ) -> Result { - Ok(self.client.post(self.url(path)).json(body).send().await?) + self.client + .post(self.url(path)) + .json(body) + .send() + .await + .map_err(ApiClientError::Transport) } } -/// Parse an HTTP response: return the deserialized body on 2xx, -/// or an error containing the status and body text. async fn parse_response(resp: reqwest::Response) -> Result { let status = resp.status(); if !status.is_success() { - let body = resp.text().await.unwrap_or_default(); - bail!("{status}: {body}"); + let body = match resp.text().await { + Ok(body) => body, + Err(err) => format!(""), + }; + return Err(ApiClientError::UnexpectedStatus { status, body }); } - Ok(resp.json().await?) + resp.json().await.map_err(ApiClientError::Decode) } #[cfg(test)] mod tests { - use super::ApiClient; + use super::{ApiClient, ApiClientError}; use std::time::Duration; + use tokio::io::{AsyncReadExt, AsyncWriteExt}; + use tokio::net::TcpListener; + + async fn serve_once(response: &'static str) -> String { + let listener = TcpListener::bind("127.0.0.1:0") + .await + .expect("bind test listener"); + let addr = listener.local_addr().expect("listener address"); + + tokio::spawn(async move { + let (mut stream, _) = listener.accept().await.expect("accept request"); + let mut buf = [0u8; 1024]; + let _ = stream.read(&mut buf).await; + stream + .write_all(response.as_bytes()) + .await + .expect("write response"); + }); + + format!("http://{addr}") + } #[test] fn set_auth_trims_surrounding_whitespace() { @@ -337,4 +418,54 @@ mod tests { client.set_auth(" ".to_string()); assert_eq!(client.auth_token(), None); } + + #[tokio::test] + async fn verify_without_auth_token_returns_typed_error() { + let client = ApiClient::new("https://example.com", Duration::from_secs(1)) + .expect("client should construct"); + + let error = client.verify().await.expect_err("verify should fail"); + assert!(matches!(error, ApiClientError::AuthTokenMissing)); + } + + #[tokio::test] + async fn parse_response_surfaces_unexpected_status_with_body() { + let base_url = serve_once( + "HTTP/1.1 401 Unauthorized\r\nContent-Length: 12\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\nmissing auth", + ) + .await; + let client = + ApiClient::new(&base_url, Duration::from_secs(1)).expect("client should construct"); + + let error = client.health().await.expect_err("health should fail"); + match error { + ApiClientError::UnexpectedStatus { status, body } => { + assert_eq!(status, reqwest::StatusCode::UNAUTHORIZED); + assert_eq!(body, "missing auth"); + } + other => panic!("unexpected error variant: {other:?}"), + } + } + + #[tokio::test] + async fn parse_response_surfaces_decode_errors() { + let base_url = serve_once( + "HTTP/1.1 200 OK\r\nContent-Length: 8\r\nContent-Type: application/json\r\nConnection: close\r\n\r\nnot-json", + ) + .await; + let client = + ApiClient::new(&base_url, Duration::from_secs(1)).expect("client should construct"); + + let error = client.health().await.expect_err("health should fail"); + assert!(matches!(error, ApiClientError::Decode(_))); + } + + #[tokio::test] + async fn invalid_base_url_surfaces_transport_error() { + let client = + ApiClient::new("not-a-url", Duration::from_secs(1)).expect("client should construct"); + + let error = client.health().await.expect_err("health should fail"); + assert!(matches!(error, ApiClientError::Transport(_))); + } } diff --git a/crates/api-client/src/lib.rs b/crates/api-client/src/lib.rs index b25fd938..3a13de5a 100644 --- a/crates/api-client/src/lib.rs +++ b/crates/api-client/src/lib.rs @@ -1,6 +1,6 @@ pub mod client; pub mod retry; -pub use client::ApiClient; +pub use client::{ApiClient, ApiClientError}; pub use opensession_api; pub use retry::RetryConfig; diff --git a/crates/api-client/src/retry.rs b/crates/api-client/src/retry.rs index d57be394..d358139d 100644 --- a/crates/api-client/src/retry.rs +++ b/crates/api-client/src/retry.rs @@ -1,8 +1,9 @@ use std::time::Duration; -use anyhow::{Context, Result}; use tracing::warn; +use crate::client::{ApiClientError, Result}; + /// Configuration for retry behaviour on upload-style POST requests. pub struct RetryConfig { pub max_retries: usize, @@ -65,7 +66,7 @@ pub async fn retry_post( ); tokio::time::sleep(Duration::from_secs(config.delays[attempt])).await; } else { - return Err(e).context("Failed to connect after retries"); + return Err(ApiClientError::Transport(e)); } } } diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index 5f1f8e35..201b07c5 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -27,10 +27,13 @@ workspace = true [dependencies] opensession-core = { workspace = true } +opensession-paths = { workspace = true } opensession-local-store = { workspace = true } opensession-runtime-config = { workspace = true } opensession-summary = { workspace = true } +opensession-summary-runtime = { workspace = true } opensession-parsers = { workspace = true } +opensession-parser-discovery = { workspace = true } opensession-api = { workspace = true, default-features = false } opensession-api-client = { workspace = true } opensession-local-db = { workspace = true } diff --git a/crates/cli/src/config.rs b/crates/cli/src/config.rs index 0d411cb9..ea6ca5eb 100644 --- a/crates/cli/src/config.rs +++ b/crates/cli/src/config.rs @@ -1,5 +1,5 @@ use anyhow::{Context, Result}; -use opensession_runtime_config::{DaemonConfig, CONFIG_FILE_NAME}; +use opensession_runtime_config::DaemonConfig; use serde::{Deserialize, Serialize}; use std::path::{Path, PathBuf}; @@ -60,15 +60,12 @@ impl Default for DaemonRefConfig { /// Get the config directory path (~/.config/opensession/) pub fn config_dir() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("Could not determine home directory")?; - Ok(PathBuf::from(home).join(".config").join("opensession")) + opensession_paths::config_dir().context("Could not determine home directory") } /// Canonical config file path. pub fn config_path() -> Result { - Ok(config_dir()?.join(CONFIG_FILE_NAME)) + opensession_paths::runtime_config_path().context("Could not determine config file path") } fn read_config_doc(path: &Path) -> Result { @@ -255,7 +252,7 @@ pub fn set_daemon_watch_paths(repos: Vec) -> Result<()> { #[cfg(test)] mod tests { - use super::load_runtime_config_from_doc; + use super::{config_dir, config_path, load_runtime_config_from_doc}; #[test] fn runtime_config_loads_server_table() { @@ -272,4 +269,20 @@ api_key = "k" assert_eq!(cfg.server.url, "https://opensession.io"); assert_eq!(cfg.server.api_key, "k"); } + + #[test] + fn cli_config_dir_uses_centralized_path() { + assert_eq!( + config_dir().expect("config dir"), + opensession_paths::config_dir().expect("central config dir") + ); + } + + #[test] + fn cli_config_path_uses_centralized_runtime_path() { + assert_eq!( + config_path().expect("config path"), + opensession_paths::runtime_config_path().expect("central runtime config path") + ); + } } diff --git a/crates/cli/src/discover.rs b/crates/cli/src/discover.rs index e26ee1e9..2345fdf5 100644 --- a/crates/cli/src/discover.rs +++ b/crates/cli/src/discover.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use opensession_parsers::discover::discover_sessions; +use opensession_parser_discovery::discover_sessions; /// List all locally discovered AI sessions pub fn run_discover() -> Result<()> { diff --git a/crates/cli/src/handoff.rs b/crates/cli/src/handoff.rs index 5db62809..d83ff65c 100644 --- a/crates/cli/src/handoff.rs +++ b/crates/cli/src/handoff.rs @@ -18,7 +18,7 @@ use opensession_git_native::{ store_handoff_artifact, }; use opensession_parsers::ParserRegistry; -use opensession_parsers::discover::discover_sessions; +use opensession_parser_discovery::discover_sessions; use std::io::{IsTerminal, Write}; use std::time::UNIX_EPOCH; diff --git a/crates/cli/src/index.rs b/crates/cli/src/index.rs index 8e7ac071..c6f83d5f 100644 --- a/crates/cli/src/index.rs +++ b/crates/cli/src/index.rs @@ -2,7 +2,7 @@ use anyhow::Result; use opensession_core::session::{is_auxiliary_session, working_directory}; use opensession_git_native::extract_git_context; use opensession_local_db::LocalDb; -use opensession_parsers::discover::discover_sessions; +use opensession_parser_discovery::discover_sessions; use opensession_parsers::ParserRegistry; use std::path::Path; diff --git a/crates/cli/src/review.rs b/crates/cli/src/review.rs index 4e27df25..9dac8e08 100644 --- a/crates/cli/src/review.rs +++ b/crates/cli/src/review.rs @@ -8,7 +8,8 @@ use opensession_api::{ }; use opensession_core::{ContentBlock, EventType, Session}; use opensession_runtime_config::SummarySettings; -use opensession_summary::{SemanticSummaryArtifact, summarize_git_commit}; +use opensession_summary::SemanticSummaryArtifact; +use opensession_summary_runtime::summarize_git_commit; use reqwest::Url; use serde::Deserialize; use std::collections::{BTreeSet, HashMap, HashSet, VecDeque}; diff --git a/crates/cli/src/runtime_settings.rs b/crates/cli/src/runtime_settings.rs index 040f33ad..311262d8 100644 --- a/crates/cli/src/runtime_settings.rs +++ b/crates/cli/src/runtime_settings.rs @@ -1,16 +1,10 @@ use anyhow::{Context, Result}; use opensession_runtime_config::DaemonConfig; -use opensession_summary::provider::LocalSummaryProfile; +use opensession_summary_runtime::LocalSummaryProfile; use std::path::PathBuf; pub fn runtime_config_path() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("Could not determine home directory")?; - Ok(PathBuf::from(home) - .join(".config") - .join("opensession") - .join(opensession_runtime_config::CONFIG_FILE_NAME)) + opensession_paths::runtime_config_path().context("Could not determine home directory") } pub fn load_runtime_config() -> Result { @@ -37,7 +31,7 @@ pub fn save_runtime_config(config: &DaemonConfig) -> Result { } pub fn detect_local_summary_profile() -> Option { - opensession_summary::detect_summary_provider() + opensession_summary_runtime::detect_local_summary_profile() } pub fn apply_summary_profile(config: &mut DaemonConfig, profile: &LocalSummaryProfile) { diff --git a/crates/cli/src/setup_cmd/branch_sync.rs b/crates/cli/src/setup_cmd/branch_sync.rs index da75b0c7..0aceffa2 100644 --- a/crates/cli/src/setup_cmd/branch_sync.rs +++ b/crates/cli/src/setup_cmd/branch_sync.rs @@ -4,8 +4,9 @@ use opensession_core::Session; use opensession_core::sanitize::{SanitizeConfig, sanitize_session}; use opensession_core::session::{GitMeta, build_git_storage_meta_json_with_git, working_directory}; use opensession_git_native::{NativeGitStorage, extract_git_context}; -use opensession_parsers::{ParserRegistry, discover::discover_sessions}; -use opensession_runtime_config::{CONFIG_FILE_NAME, DaemonConfig}; +use opensession_parser_discovery::discover_sessions; +use opensession_parsers::ParserRegistry; +use opensession_runtime_config::DaemonConfig; use std::cmp::Reverse; use std::collections::HashSet; use std::path::{Path, PathBuf}; @@ -209,17 +210,11 @@ fn session_commit_links( } fn load_daemon_config() -> DaemonConfig { - let home = match std::env::var("HOME").or_else(|_| std::env::var("USERPROFILE")) { - Ok(home) => home, - Err(_) => return DaemonConfig::default(), + let Ok(path) = opensession_paths::runtime_config_path() else { + return DaemonConfig::default(); }; - let path = PathBuf::from(home) - .join(".config") - .join("opensession") - .join(CONFIG_FILE_NAME); - let content = match std::fs::read_to_string(path) { - Ok(content) => content, - Err(_) => return DaemonConfig::default(), + let Ok(content) = std::fs::read_to_string(path) else { + return DaemonConfig::default(); }; toml::from_str(&content).unwrap_or_default() } diff --git a/crates/cli/src/setup_cmd/shims.rs b/crates/cli/src/setup_cmd/shims.rs index fda791ce..a55e9229 100644 --- a/crates/cli/src/setup_cmd/shims.rs +++ b/crates/cli/src/setup_cmd/shims.rs @@ -51,12 +51,8 @@ pub(super) fn shim_action_label(action: ShimInstallAction) -> &'static str { } pub(super) fn shim_path(name: &str) -> Result { - let home = std::env::var("HOME") - .context("HOME environment variable is not set; cannot resolve shim path")?; - Ok(PathBuf::from(home) - .join(".local") - .join("share") - .join("opensession") + Ok(opensession_paths::data_dir() + .context("Could not determine shim base directory")? .join("bin") .join(name)) } diff --git a/crates/cli/src/setup_cmd/status.rs b/crates/cli/src/setup_cmd/status.rs index 12f94e09..cd4a597c 100644 --- a/crates/cli/src/setup_cmd/status.rs +++ b/crates/cli/src/setup_cmd/status.rs @@ -263,12 +263,8 @@ pub(super) fn daemon_status_summary( } fn daemon_pid_path() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("HOME/USERPROFILE is not set; cannot resolve daemon pid path")?; - Ok(PathBuf::from(home) - .join(".config") - .join("opensession") + Ok(opensession_paths::config_dir() + .context("Could not determine daemon pid path")? .join("daemon.pid")) } diff --git a/crates/cli/src/stream_push.rs b/crates/cli/src/stream_push.rs index a98b8e18..f54121eb 100644 --- a/crates/cli/src/stream_push.rs +++ b/crates/cli/src/stream_push.rs @@ -4,7 +4,7 @@ //! (< 2s). Parses the full session file and upserts it into the local DB. //! The daemon handles uploading to the server via debounced file watching. -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use opensession_core::session::{is_auxiliary_session, working_directory}; use opensession_git_native::extract_git_context; use opensession_local_db::LocalDb; @@ -24,12 +24,8 @@ struct StreamState { } fn state_dir() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("Could not determine home directory")?; - Ok(PathBuf::from(home) - .join(".config") - .join("opensession") + Ok(opensession_paths::config_dir() + .context("Could not determine home directory")? .join("stream-state")) } @@ -74,16 +70,14 @@ fn resolve_session_file(agent: &str) -> Result { /// Claude Code stores sessions under `~/.claude/projects//`. /// The project directory name is the CWD with `/` replaced by `-`. fn resolve_claude_code_session() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("Could not determine home directory")?; + let home = opensession_paths::home_dir().context("Could not determine home directory")?; let cwd = std::env::current_dir().context("Could not determine current directory")?; let cwd_str = cwd.to_string_lossy(); // Claude Code project dir: CWD with '/' replaced by '-' let project_dir_name = cwd_str.replace('/', "-"); - let projects_dir = PathBuf::from(&home).join(".claude").join("projects"); + let projects_dir = home.join(".claude").join("projects"); let project_dir = projects_dir.join(&project_dir_name); if !project_dir.is_dir() { @@ -148,7 +142,9 @@ pub fn run_stream_push(agent: &str) -> Result<()> { } // Extract git context from session's working directory - let git = working_directory(&session).map(extract_git_context).unwrap_or_default(); + let git = working_directory(&session) + .map(extract_git_context) + .unwrap_or_default(); let local_git = opensession_local_db::git::GitContext { remote: git.remote.clone(), branch: git.branch.clone(), @@ -178,10 +174,8 @@ pub fn enable_stream_write(agent: &str) -> Result<()> { } fn claude_settings_path() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("Could not determine home directory")?; - Ok(PathBuf::from(home).join(".claude").join("settings.json")) + let home = opensession_paths::home_dir().context("Could not determine home directory")?; + Ok(home.join(".claude").join("settings.json")) } const HOOK_MATCHER: &str = "Edit|Write|Bash|NotebookEdit"; diff --git a/crates/cli/src/summary_cmd.rs b/crates/cli/src/summary_cmd.rs index 2eb99ee8..bc470dc3 100644 --- a/crates/cli/src/summary_cmd.rs +++ b/crates/cli/src/summary_cmd.rs @@ -6,9 +6,9 @@ use opensession_git_native::extract_git_context; use opensession_local_db::{LocalDb, SessionSemanticSummaryUpsert}; use opensession_local_store::find_repo_root; use opensession_parsers::ParserRegistry; -use opensession_summary::{ - GitSummaryRequest, SemanticSummaryArtifact, summarize_git_commit, summarize_git_working_tree, - summarize_session, +use opensession_summary::{GitSummaryRequest, SemanticSummaryArtifact}; +use opensession_summary_runtime::{ + summarize_git_commit, summarize_git_working_tree, summarize_session, }; use serde::Serialize; use std::path::{Path, PathBuf}; diff --git a/crates/daemon/Cargo.toml b/crates/daemon/Cargo.toml index 3e10ba4b..1ebcdba7 100644 --- a/crates/daemon/Cargo.toml +++ b/crates/daemon/Cargo.toml @@ -17,9 +17,12 @@ workspace = true [dependencies] opensession-core = { workspace = true } +opensession-paths = { workspace = true } opensession-runtime-config = { workspace = true } opensession-summary = { workspace = true } +opensession-summary-runtime = { workspace = true } opensession-parsers = { workspace = true } +opensession-parser-discovery = { workspace = true } opensession-api = { workspace = true, default-features = false } opensession-api-client = { workspace = true } opensession-local-db = { workspace = true } diff --git a/crates/daemon/src/config.rs b/crates/daemon/src/config.rs index 113e17c6..37521344 100644 --- a/crates/daemon/src/config.rs +++ b/crates/daemon/src/config.rs @@ -1,25 +1,22 @@ use anyhow::{Context, Result}; +use opensession_paths::home_dir; use serde::{Deserialize, Serialize}; use std::collections::HashSet; use std::path::{Path, PathBuf}; // Re-export shared runtime config types pub use opensession_runtime_config::{ - CONFIG_FILE_NAME, DaemonConfig, DaemonSettings, GitStorageMethod, PublishMode, - SessionDefaultView, + DaemonConfig, DaemonSettings, GitStorageMethod, PublishMode, SessionDefaultView, }; /// Get the config directory path pub fn config_dir() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("Could not determine home directory")?; - Ok(PathBuf::from(home).join(".config").join("opensession")) + opensession_paths::config_dir().context("Could not determine home directory") } /// Get the daemon config file path pub fn config_path() -> Result { - Ok(config_dir()?.join(CONFIG_FILE_NAME)) + opensession_paths::runtime_config_path().context("Could not determine config file path") } /// Get the PID file path @@ -53,10 +50,7 @@ fn normalize_fixed_runtime_tuning(config: &mut DaemonConfig) { /// Resolve watch paths based on watcher config pub fn resolve_watch_paths(config: &DaemonConfig) -> Vec { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .map(PathBuf::from) - .unwrap_or_else(|_| PathBuf::from(".")); + let home = home_dir().unwrap_or_else(|_| PathBuf::from(".")); let raw_paths = config.watchers.custom_paths.clone(); @@ -330,6 +324,22 @@ mod tests { assert_eq!(cfg.daemon.max_retries, defaults.daemon.max_retries); } + #[test] + fn daemon_config_dir_uses_centralized_path() { + assert_eq!( + config_dir().expect("config dir"), + opensession_paths::config_dir().expect("central config dir") + ); + } + + #[test] + fn daemon_config_path_uses_centralized_runtime_path() { + assert_eq!( + config_path().expect("config path"), + opensession_paths::runtime_config_path().expect("central runtime config path") + ); + } + #[test] fn test_find_repo_root() { let tmp = tempfile::tempdir().unwrap(); diff --git a/crates/daemon/src/scheduler/pipeline.rs b/crates/daemon/src/scheduler/pipeline.rs index 3f4723ad..d4909dfb 100644 --- a/crates/daemon/src/scheduler/pipeline.rs +++ b/crates/daemon/src/scheduler/pipeline.rs @@ -9,7 +9,8 @@ use opensession_git_native::{ use opensession_local_db::LocalDb; use opensession_parsers::ParserRegistry; use opensession_runtime_config::SummaryStorageBackend; -use opensession_summary::{GitSummaryRequest, summarize_session}; +use opensession_summary::GitSummaryRequest; +use opensession_summary_runtime::summarize_session; use std::path::{Path, PathBuf}; use tracing::{debug, info, warn}; diff --git a/crates/daemon/src/watcher.rs b/crates/daemon/src/watcher.rs index d960fb93..5cb8e03e 100644 --- a/crates/daemon/src/watcher.rs +++ b/crates/daemon/src/watcher.rs @@ -1,6 +1,6 @@ use anyhow::{Context, Result}; use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher}; -use opensession_parsers::discover; +use opensession_parser_discovery::discover_sessions; use std::collections::HashSet; use std::path::{Path, PathBuf}; use tokio::sync::mpsc; @@ -22,7 +22,7 @@ pub fn seed_existing_session_files( return 0; } - let discovered_paths = discover::discover_sessions() + let discovered_paths = discover_sessions() .into_iter() .flat_map(|location| location.paths); enqueue_discovered_paths(watch_roots, discovered_paths, tx) diff --git a/crates/local-db/Cargo.toml b/crates/local-db/Cargo.toml index 59b1e552..92adabe8 100644 --- a/crates/local-db/Cargo.toml +++ b/crates/local-db/Cargo.toml @@ -22,6 +22,7 @@ workspace = true [dependencies] opensession-core = { workspace = true } opensession-api = { workspace = true, default-features = false, features = ["backend"] } +opensession-paths = { workspace = true } rusqlite = { workspace = true } chrono = { workspace = true } serde = { workspace = true } diff --git a/crates/local-db/src/connection.rs b/crates/local-db/src/connection.rs index f67abd36..3f508e3e 100644 --- a/crates/local-db/src/connection.rs +++ b/crates/local-db/src/connection.rs @@ -1,4 +1,5 @@ use anyhow::{Context, Result}; +use opensession_paths::local_db_path; use rusqlite::Connection; use std::path::PathBuf; use std::sync::{Mutex, MutexGuard}; @@ -17,7 +18,7 @@ pub struct LocalDb { impl LocalDb { /// Open (or create) the local database at the default path. - /// `~/.local/share/opensession/local.db` + /// `~/.local/share/opensession/local.db` or `OPENSESSION_LOCAL_DB_PATH` when set. pub fn open() -> Result { let path = default_db_path()?; Self::open_path(&path) @@ -57,12 +58,63 @@ fn open_connection_with_latest_schema(path: &PathBuf) -> Result { } fn default_db_path() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .context("Could not determine home directory")?; - Ok(PathBuf::from(home) - .join(".local") - .join("share") - .join("opensession") - .join("local.db")) + local_db_path().context("Could not determine local db path") +} + +#[cfg(test)] +mod tests { + use super::default_db_path; + use std::ffi::OsString; + use std::path::PathBuf; + use std::sync::{Mutex, OnceLock}; + + fn env_test_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + struct EnvVarGuard { + key: &'static str, + previous: Option, + } + + impl EnvVarGuard { + fn set(key: &'static str, value: &str) -> Self { + let previous = std::env::var_os(key); + // SAFETY: tests serialize environment mutation with `env_test_lock`, so process + // environment updates do not race with other tests in this module. + unsafe { std::env::set_var(key, value) }; + Self { key, previous } + } + } + + impl Drop for EnvVarGuard { + fn drop(&mut self) { + if let Some(value) = &self.previous { + // SAFETY: tests serialize environment mutation with `env_test_lock`. + unsafe { std::env::set_var(self.key, value) }; + } else { + // SAFETY: tests serialize environment mutation with `env_test_lock`. + unsafe { std::env::remove_var(self.key) }; + } + } + } + + #[test] + fn default_db_path_uses_centralized_location() { + let _lock = env_test_lock().lock().expect("env lock"); + let _guard = EnvVarGuard::set("OPENSESSION_LOCAL_DB_PATH", ""); + let path = default_db_path().expect("default db path"); + assert!(path.ends_with(PathBuf::from(".local/share/opensession/local.db"))); + } + + #[test] + fn default_db_path_honors_env_override() { + let _lock = env_test_lock().lock().expect("env lock"); + let _guard = EnvVarGuard::set("OPENSESSION_LOCAL_DB_PATH", "/tmp/custom-local.db"); + assert_eq!( + default_db_path().expect("default db path"), + PathBuf::from("/tmp/custom-local.db") + ); + } } diff --git a/crates/local-store/Cargo.toml b/crates/local-store/Cargo.toml index c107c376..a429f00d 100644 --- a/crates/local-store/Cargo.toml +++ b/crates/local-store/Cargo.toml @@ -16,6 +16,7 @@ workspace = true [dependencies] opensession-core = { workspace = true } +opensession-paths = { workspace = true } sha2 = { workspace = true } thiserror = { workspace = true } diff --git a/crates/local-store/src/lib.rs b/crates/local-store/src/lib.rs index 23902c89..cec99b4c 100644 --- a/crates/local-store/src/lib.rs +++ b/crates/local-store/src/lib.rs @@ -1,4 +1,5 @@ use opensession_core::source_uri::{SourceSpec, SourceUri, SourceUriError}; +use opensession_paths::local_store_root; use sha2::{Digest, Sha256}; use std::path::{Path, PathBuf}; @@ -96,15 +97,7 @@ fn default_store_root(cwd: &Path) -> Result { } pub fn global_store_root() -> Result { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .map(PathBuf::from) - .map_err(|_| LocalStoreError::HomeUnavailable)?; - Ok(home - .join(".local") - .join("share") - .join("opensession") - .join("objects")) + local_store_root().map_err(|_| LocalStoreError::HomeUnavailable) } fn object_path(root: &Path, hash: &str) -> Result { @@ -153,8 +146,10 @@ fn validate_hash(hash: &str) -> Result<(), LocalStoreError> { #[cfg(test)] mod tests { use super::{ - LocalStoreError, find_repo_root, read_local_object, sha256_hex, store_local_object, + LocalStoreError, find_repo_root, global_store_root, read_local_object, sha256_hex, + store_local_object, }; + use opensession_paths::local_store_root; use tempfile::tempdir; #[test] @@ -167,8 +162,8 @@ mod tests { #[test] fn global_store_root_uses_standard_home_fallback() { - let root = super::global_store_root().expect("global store root"); - assert!(root.ends_with("opensession/objects")); + let root = global_store_root().expect("global store root"); + assert_eq!(root, local_store_root().expect("centralized store root")); } #[test] diff --git a/crates/parser-discovery/Cargo.toml b/crates/parser-discovery/Cargo.toml new file mode 100644 index 00000000..77272764 --- /dev/null +++ b/crates/parser-discovery/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "opensession-parser-discovery" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +repository.workspace = true +description = "Session discovery adapters for OpenSession parsers" + +[lib] +doctest = false + +[lints] +workspace = true + +[dependencies] +opensession-paths = { workspace = true } +glob = { workspace = true } +shellexpand = { workspace = true } +rusqlite = { workspace = true } diff --git a/crates/parsers/src/discover.rs b/crates/parser-discovery/src/lib.rs similarity index 56% rename from crates/parsers/src/discover.rs rename to crates/parser-discovery/src/lib.rs index 89364684..6b0115cb 100644 --- a/crates/parsers/src/discover.rs +++ b/crates/parser-discovery/src/lib.rs @@ -1,57 +1,73 @@ use rusqlite::{Connection, OpenFlags}; use std::collections::HashSet; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; /// Metadata about a discovered session location for a specific AI tool. +#[derive(Debug, Clone, PartialEq, Eq)] pub struct SessionLocation { pub tool: String, pub paths: Vec, } /// Discover local session files from known paths for all supported AI tools. +#[must_use] pub fn discover_sessions() -> Vec { - let home = dirs_home(); + discover_sessions_from_home(&dirs_home()) +} + +/// Discover sessions for a specific tool by name. +#[must_use] +pub fn discover_for_tool(tool: &str) -> Vec { + discover_for_tool_in(&dirs_home(), tool) +} + +/// Discover sessions matching an external parser's glob pattern. +#[must_use] +pub fn discover_external(glob_pattern: &str) -> Vec { + let expanded = shellexpand::tilde(glob_pattern).to_string(); + glob::glob(&expanded) + .map(|paths| paths.filter_map(Result::ok).collect()) + .unwrap_or_default() +} + +fn discover_sessions_from_home(home: &Path) -> Vec { let mut locations = Vec::new(); - discover_claude_code(&home, &mut locations); - discover_codex(&home, &mut locations); - discover_opencode(&home, &mut locations); - discover_cline(&home, &mut locations); - discover_amp(&home, &mut locations); - discover_cursor(&home, &mut locations); - discover_gemini(&home, &mut locations); + discover_claude_code(home, &mut locations); + discover_codex(home, &mut locations); + discover_opencode(home, &mut locations); + discover_cline(home, &mut locations); + discover_amp(home, &mut locations); + discover_cursor(home, &mut locations); + discover_gemini(home, &mut locations); locations } -/// Discover sessions for a specific tool by name. -pub fn discover_for_tool(tool: &str) -> Vec { - let home = dirs_home(); +fn discover_for_tool_in(home: &Path, tool: &str) -> Vec { match tool { "claude-code" => find_files_with_ext(&home.join(".claude").join("projects"), "jsonl") .into_iter() - .filter(|p| !crate::is_auxiliary_session_path(p)) + .filter(|path| !is_auxiliary_session_path(path)) .collect(), - "codex" => find_codex_sessions(&home), - "opencode" => find_opencode_sessions(&home), - "cline" => find_cline_sessions(&home), - "amp" => find_amp_threads(&home), - "cursor" => find_cursor_vscdb(&home), - "gemini" => find_gemini_sessions(&home), + "codex" => find_codex_sessions(home), + "opencode" => find_opencode_sessions(home), + "cline" => find_cline_sessions(home), + "amp" => find_amp_threads(home), + "cursor" => find_cursor_vscdb(home), + "gemini" => find_gemini_sessions(home), _ => Vec::new(), } } -// ── Per-tool discovery ────────────────────────────────────────────────────── - -fn discover_claude_code(home: &std::path::Path, locations: &mut Vec) { +fn discover_claude_code(home: &Path, locations: &mut Vec) { let claude_path = home.join(".claude").join("projects"); if !claude_path.exists() { return; } let paths: Vec<_> = find_files_with_ext(&claude_path, "jsonl") .into_iter() - .filter(|p| !crate::is_auxiliary_session_path(p)) + .filter(|path| !is_auxiliary_session_path(path)) .collect(); if !paths.is_empty() { locations.push(SessionLocation { @@ -61,7 +77,7 @@ fn discover_claude_code(home: &std::path::Path, locations: &mut Vec) { +fn discover_codex(home: &Path, locations: &mut Vec) { let paths = find_codex_sessions(home); if !paths.is_empty() { locations.push(SessionLocation { @@ -71,7 +87,7 @@ fn discover_codex(home: &std::path::Path, locations: &mut Vec) } } -fn discover_opencode(home: &std::path::Path, locations: &mut Vec) { +fn discover_opencode(home: &Path, locations: &mut Vec) { let paths = find_opencode_sessions(home); if !paths.is_empty() { locations.push(SessionLocation { @@ -81,7 +97,7 @@ fn discover_opencode(home: &std::path::Path, locations: &mut Vec) { +fn discover_cline(home: &Path, locations: &mut Vec) { let paths = find_cline_sessions(home); if !paths.is_empty() { locations.push(SessionLocation { @@ -91,7 +107,7 @@ fn discover_cline(home: &std::path::Path, locations: &mut Vec) } } -fn discover_amp(home: &std::path::Path, locations: &mut Vec) { +fn discover_amp(home: &Path, locations: &mut Vec) { let paths = find_amp_threads(home); if !paths.is_empty() { locations.push(SessionLocation { @@ -101,45 +117,38 @@ fn discover_amp(home: &std::path::Path, locations: &mut Vec) { } } -fn discover_gemini(home: &std::path::Path, locations: &mut Vec) { - let paths = find_gemini_sessions(home); +fn discover_cursor(home: &Path, locations: &mut Vec) { + let paths = find_cursor_vscdb(home); if !paths.is_empty() { locations.push(SessionLocation { - tool: "gemini".to_string(), + tool: "cursor".to_string(), paths, }); } } -fn discover_cursor(home: &std::path::Path, locations: &mut Vec) { - let paths = find_cursor_vscdb(home); +fn discover_gemini(home: &Path, locations: &mut Vec) { + let paths = find_gemini_sessions(home); if !paths.is_empty() { locations.push(SessionLocation { - tool: "cursor".to_string(), + tool: "gemini".to_string(), paths, }); } } -// ── Utilities ─────────────────────────────────────────────────────────────── - fn dirs_home() -> PathBuf { - std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .map(PathBuf::from) - .unwrap_or_else(|_| PathBuf::from(".")) + opensession_paths::home_dir().unwrap_or_else(|_| PathBuf::from(".")) } -/// Recursively find files with a given extension under a directory. -fn find_files_with_ext(dir: &std::path::Path, ext: &str) -> Vec { +fn find_files_with_ext(dir: &Path, ext: &str) -> Vec { let pattern = format!("{}/**/*.{}", dir.display(), ext); glob::glob(&pattern) .map(|paths| paths.filter_map(Result::ok).collect()) .unwrap_or_default() } -/// Codex stores sessions as JSONL files under ~/.codex/sessions/YYYY/MM/DD/rollout-*.jsonl -fn find_codex_sessions(home: &std::path::Path) -> Vec { +fn find_codex_sessions(home: &Path) -> Vec { let mut roots = Vec::new(); if let Ok(codex_home) = std::env::var("CODEX_HOME") { let codex_home = codex_home.trim(); @@ -167,7 +176,7 @@ fn find_codex_sessions(home: &std::path::Path) -> Vec { out } -fn is_codex_rollout_session_file(path: &std::path::Path) -> bool { +fn is_codex_rollout_session_file(path: &Path) -> bool { path.file_name() .and_then(|name| name.to_str()) .map(|name| { @@ -177,9 +186,7 @@ fn is_codex_rollout_session_file(path: &std::path::Path) -> bool { .unwrap_or(false) } -/// OpenCode stores session info as JSON files under -/// ~/.local/share/opencode/storage/session//.json -fn find_opencode_sessions(home: &std::path::Path) -> Vec { +fn find_opencode_sessions(home: &Path) -> Vec { let session_path = home .join(".local") .join("share") @@ -195,9 +202,7 @@ fn find_opencode_sessions(home: &std::path::Path) -> Vec { .unwrap_or_default() } -/// Cline stores sessions as task directories under ~/.cline/data/tasks/{taskId}/ -/// Each task has api_conversation_history.json as the entry point. -fn find_cline_sessions(home: &std::path::Path) -> Vec { +fn find_cline_sessions(home: &Path) -> Vec { let tasks_dir = home.join(".cline").join("data").join("tasks"); if !tasks_dir.exists() { return Vec::new(); @@ -208,8 +213,7 @@ fn find_cline_sessions(home: &std::path::Path) -> Vec { .unwrap_or_default() } -/// Amp stores threads as JSON files under ~/.local/share/amp/threads/T-{uuid}.json -fn find_amp_threads(home: &std::path::Path) -> Vec { +fn find_amp_threads(home: &Path) -> Vec { let threads_dir = home .join(".local") .join("share") @@ -224,17 +228,7 @@ fn find_amp_threads(home: &std::path::Path) -> Vec { .unwrap_or_default() } -/// Discover sessions matching an external parser's glob pattern. -pub fn discover_external(glob_pattern: &str) -> Vec { - let expanded = shellexpand::tilde(glob_pattern).to_string(); - glob::glob(&expanded) - .map(|paths| paths.filter_map(Result::ok).collect()) - .unwrap_or_default() -} - -/// Gemini CLI stores sessions as JSON or JSONL files under -/// ~/.gemini/tmp//chats/session-*.{json,jsonl} -fn find_gemini_sessions(home: &std::path::Path) -> Vec { +fn find_gemini_sessions(home: &Path) -> Vec { let gemini_path = home.join(".gemini").join("tmp"); if !gemini_path.exists() { return Vec::new(); @@ -249,20 +243,14 @@ fn find_gemini_sessions(home: &std::path::Path) -> Vec { results } -/// Cursor stores conversation data in SQLite databases (state.vscdb). -/// Global: ~/Library/Application Support/Cursor/User/globalStorage/state.vscdb -/// Per-workspace: ~/Library/Application Support/Cursor/User/workspaceStorage//state.vscdb -fn find_cursor_vscdb(home: &std::path::Path) -> Vec { +fn find_cursor_vscdb(home: &Path) -> Vec { let mut results = Vec::new(); - // macOS path let cursor_base = home .join("Library") .join("Application Support") .join("Cursor") .join("User"); - - // Linux path (XDG) let cursor_base_linux = home.join(".config").join("Cursor").join("User"); for base in &[&cursor_base, &cursor_base_linux] { @@ -270,13 +258,11 @@ fn find_cursor_vscdb(home: &std::path::Path) -> Vec { continue; } - // Global state.vscdb let global_db = base.join("globalStorage").join("state.vscdb"); if global_db.exists() && cursor_db_has_composer_data(&global_db) { results.push(global_db); } - // Per-workspace state.vscdb files let workspace_dir = base.join("workspaceStorage"); if workspace_dir.exists() { let pattern = format!("{}/*/state.vscdb", workspace_dir.display()); @@ -293,7 +279,7 @@ fn find_cursor_vscdb(home: &std::path::Path) -> Vec { results } -fn cursor_db_has_composer_data(path: &std::path::Path) -> bool { +fn cursor_db_has_composer_data(path: &Path) -> bool { let conn = match Connection::open_with_flags(path, OpenFlags::SQLITE_OPEN_READ_ONLY) { Ok(conn) => conn, Err(_) => return false, @@ -328,10 +314,31 @@ fn has_cursor_rows(conn: &Connection, table: &str) -> bool { conn.query_row(&sql, [], |row| row.get(0)).unwrap_or(false) } +fn is_auxiliary_session_path(path: &Path) -> bool { + let path_text = path.to_string_lossy(); + if path_text.contains("/subagents/") || path_text.contains("\\subagents\\") { + return true; + } + + let Some(name) = path.file_name().and_then(|name| name.to_str()) else { + return false; + }; + let lower = name.to_ascii_lowercase(); + lower.starts_with("agent-") + || lower.starts_with("agent_") + || lower.starts_with("subagent-") + || lower.starts_with("subagent_") +} + #[cfg(test)] mod tests { - use super::{find_codex_sessions, is_codex_rollout_session_file}; - use std::path::Path; + use super::{ + SessionLocation, discover_for_tool_in, discover_sessions_from_home, find_codex_sessions, + is_codex_rollout_session_file, + }; + use rusqlite::Connection; + use std::fs; + use std::path::{Path, PathBuf}; use std::sync::{Mutex, OnceLock}; fn env_test_lock() -> &'static Mutex<()> { @@ -355,7 +362,7 @@ mod tests { impl Drop for EnvVarRestore { fn drop(&mut self) { - if let Some(ref previous) = self.previous { + if let Some(previous) = self.previous.as_ref() { set_env_for_test(self.key, previous); } else { remove_env_for_test(self.key); @@ -364,17 +371,54 @@ mod tests { } fn set_env_for_test(key: &str, value: impl AsRef) { - // SAFETY: these tests hold env_test_lock() while mutating process environment, so the - // mutation is serialized within the module and not concurrent with other test env access. + // SAFETY: tests hold env_test_lock() while mutating process environment. unsafe { std::env::set_var(key, value) }; } fn remove_env_for_test(key: &str) { - // SAFETY: these tests hold env_test_lock() while mutating process environment, so the - // mutation is serialized within the module and not concurrent with other test env access. + // SAFETY: tests hold env_test_lock() while mutating process environment. unsafe { std::env::remove_var(key) }; } + fn unique_temp_dir(prefix: &str) -> PathBuf { + let root = std::env::temp_dir().join(format!( + "{prefix}-{}", + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .expect("clock") + .as_nanos() + )); + fs::create_dir_all(&root).expect("create temp dir"); + root + } + + fn write_cursor_fixture_db(path: &Path) { + if let Some(parent) = path.parent() { + fs::create_dir_all(parent).expect("create cursor parent"); + } + let conn = Connection::open(path).expect("create cursor db"); + conn.execute( + "CREATE TABLE cursorDiskKV (key TEXT PRIMARY KEY, value TEXT)", + [], + ) + .expect("create cursorDiskKV"); + conn.execute( + "INSERT INTO cursorDiskKV (key, value) VALUES (?1, ?2)", + ( + "composerData:test", + r#"{"composerId":"comp-1","conversation":[{"type":1,"text":"hello"}]}"#, + ), + ) + .expect("insert composer row"); + } + + fn collect_tools(locations: &[SessionLocation]) -> Vec<&str> { + locations + .iter() + .map(|location| location.tool.as_str()) + .collect() + } + #[test] fn codex_rollout_matcher_only_accepts_rollout_files() { assert!(is_codex_rollout_session_file(Path::new( @@ -395,21 +439,14 @@ mod tests { fn codex_discovery_ignores_non_rollout_jsonl() { let _guard = env_test_lock().lock().expect("env lock"); let restore = EnvVarRestore::capture("CODEX_HOME"); - let unique = format!( - "opensession-codex-discover-{}", - std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .expect("time") - .as_nanos() - ); - let root = std::env::temp_dir().join(unique); + let root = unique_temp_dir("opensession-codex-discover"); let sessions_dir = root.join("sessions").join("2026").join("02").join("20"); - std::fs::create_dir_all(&sessions_dir).expect("mkdir"); + fs::create_dir_all(&sessions_dir).expect("mkdir"); - std::fs::write(sessions_dir.join("rollout-1.jsonl"), "{}\n").expect("rollout"); - std::fs::write(sessions_dir.join("rollout.jsonl"), "{}\n").expect("rollout base"); - std::fs::write(sessions_dir.join("summary.jsonl"), "{}\n").expect("summary"); - std::fs::write(sessions_dir.join("notes.jsonl"), "{}\n").expect("notes"); + fs::write(sessions_dir.join("rollout-1.jsonl"), "{}\n").expect("rollout"); + fs::write(sessions_dir.join("rollout.jsonl"), "{}\n").expect("rollout base"); + fs::write(sessions_dir.join("summary.jsonl"), "{}\n").expect("summary"); + fs::write(sessions_dir.join("notes.jsonl"), "{}\n").expect("notes"); set_env_for_test("CODEX_HOME", &root); let found = find_codex_sessions(Path::new("/this/home/path/does/not/exist")); @@ -435,7 +472,67 @@ mod tests { .any(|path| path.ends_with(Path::new("notes.jsonl"))) ); - std::fs::remove_dir_all(&root).ok(); + fs::remove_dir_all(&root).ok(); drop(restore); } + + #[test] + fn discover_sessions_preserves_tool_order() { + let home = unique_temp_dir("opensession-discovery-order"); + let claude_dir = home.join(".claude/projects/demo"); + fs::create_dir_all(claude_dir.join("subagents")).expect("create claude dir"); + fs::write(claude_dir.join("session.jsonl"), "{}\n").expect("write claude"); + fs::write(claude_dir.join("subagents/agent-1.jsonl"), "{}\n").expect("write subagent"); + + let codex_dir = home.join(".codex/sessions/2026/02/20"); + fs::create_dir_all(&codex_dir).expect("create codex dir"); + fs::write(codex_dir.join("rollout.jsonl"), "{}\n").expect("write codex"); + + let opencode_dir = home.join(".local/share/opencode/storage/session/project"); + fs::create_dir_all(&opencode_dir).expect("create opencode dir"); + fs::write(opencode_dir.join("ses.json"), "{}\n").expect("write opencode"); + + let cline_dir = home.join(".cline/data/tasks/task-1"); + fs::create_dir_all(&cline_dir).expect("create cline dir"); + fs::write(cline_dir.join("api_conversation_history.json"), "{}\n").expect("write cline"); + + let amp_dir = home.join(".local/share/amp/threads"); + fs::create_dir_all(&_dir).expect("create amp dir"); + fs::write(amp_dir.join("T-1.json"), "{}\n").expect("write amp"); + + let cursor_db = home.join(".config/Cursor/User/workspaceStorage/test/state.vscdb"); + write_cursor_fixture_db(&cursor_db); + + let gemini_dir = home.join(".gemini/tmp/demo/chats"); + fs::create_dir_all(&gemini_dir).expect("create gemini dir"); + fs::write(gemini_dir.join("session-demo.json"), "{}\n").expect("write gemini"); + + let locations = discover_sessions_from_home(&home); + assert_eq!( + collect_tools(&locations), + vec![ + "claude-code", + "codex", + "opencode", + "cline", + "amp", + "cursor", + "gemini" + ] + ); + assert_eq!(locations[0].paths.len(), 1); + } + + #[test] + fn discover_for_tool_filters_auxiliary_claude_sessions() { + let home = unique_temp_dir("opensession-discovery-claude"); + let project_dir = home.join(".claude/projects/demo"); + fs::create_dir_all(project_dir.join("subagents")).expect("create claude project"); + fs::write(project_dir.join("session.jsonl"), "{}\n").expect("write primary"); + fs::write(project_dir.join("subagents/agent-123.jsonl"), "{}\n").expect("write agent"); + fs::write(project_dir.join("subagent-123.jsonl"), "{}\n").expect("write sibling"); + + let found = discover_for_tool_in(&home, "claude-code"); + assert_eq!(found, vec![project_dir.join("session.jsonl")]); + } } diff --git a/crates/parsers/Cargo.toml b/crates/parsers/Cargo.toml index bb2c18a5..06b174ea 100644 --- a/crates/parsers/Cargo.toml +++ b/crates/parsers/Cargo.toml @@ -28,3 +28,6 @@ regex = { workspace = true } toml = { workspace = true } rusqlite = { workspace = true } tempfile = { workspace = true } + +[dev-dependencies] +opensession-parser-discovery = { workspace = true } diff --git a/crates/parsers/src/claude_code/mod.rs b/crates/parsers/src/claude_code/mod.rs index fc16b2f0..d0728b21 100644 --- a/crates/parsers/src/claude_code/mod.rs +++ b/crates/parsers/src/claude_code/mod.rs @@ -1,4 +1,6 @@ mod parse; +mod raw; +mod subagent; mod transform; use crate::SessionParser; @@ -57,9 +59,8 @@ impl ClaudeCodeParser { } // Re-export pub(crate) items needed by incremental.rs -pub(crate) use parse::{ - RawConversationEntry, RawEntry, parse_timestamp, process_assistant_entry, process_user_entry, -}; +pub(crate) use parse::{parse_timestamp, process_assistant_entry, process_user_entry}; +pub(crate) use raw::{RawConversationEntry, RawEntry}; pub fn is_claude_subagent_path(path: &Path) -> bool { let path_text = path.to_string_lossy(); diff --git a/crates/parsers/src/claude_code/parse.rs b/crates/parsers/src/claude_code/parse.rs index a599184e..35197c16 100644 --- a/crates/parsers/src/claude_code/parse.rs +++ b/crates/parsers/src/claude_code/parse.rs @@ -1,4 +1,11 @@ use super::transform::{build_cc_tool_result_content, classify_tool_use, tool_use_content}; +use super::{ + raw::{ + RawContent, RawContentBlock, RawConversationEntry, RawEntry, RawProgressEntry, + RawQueueOperationEntry, RawSummaryEntry, RawSystemEntry, + }, + subagent::{merge_subagent_sessions, read_subagent_meta}, +}; use crate::common::{ ToolUseInfo, attach_semantic_attrs, attach_source_attrs, infer_tool_kind, set_first, strip_system_reminders, @@ -6,220 +13,9 @@ use crate::common::{ use anyhow::{Context, Result}; use chrono::{DateTime, Utc}; use opensession_core::trace::{Agent, Content, Event, EventType, Session, SessionContext}; -use serde::Deserialize; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::io::BufRead; -use std::path::{Path, PathBuf}; - -// ── Raw JSONL deserialization types ────────────────────────────────────────── - -/// Top-level entry in the Claude Code JSONL file. -/// Each line is one of these. -#[derive(Debug, Deserialize)] -#[serde(tag = "type")] -pub(crate) enum RawEntry { - #[serde(rename = "user")] - User(RawConversationEntry), - #[serde(rename = "assistant")] - Assistant(RawConversationEntry), - #[serde(rename = "file-history-snapshot")] - FileHistorySnapshot {}, - #[serde(rename = "system")] - System(RawSystemEntry), - #[serde(rename = "progress")] - Progress(RawProgressEntry), - #[serde(rename = "queue-operation")] - QueueOperation(RawQueueOperationEntry), - #[serde(rename = "summary")] - Summary(RawSummaryEntry), - // Catch-all for unknown types we want to skip - #[serde(other)] - Unknown, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct RawConversationEntry { - pub(crate) uuid: String, - #[serde(default)] - pub(crate) session_id: Option, - pub(crate) timestamp: String, - pub(crate) message: RawMessage, - #[serde(default)] - pub(crate) cwd: Option, - #[serde(default)] - pub(crate) git_branch: Option, - #[serde(default)] - pub(crate) version: Option, - #[allow(dead_code)] - #[serde(default)] - agent_id: Option, - #[allow(dead_code)] - #[serde(default)] - slug: Option, - #[allow(dead_code)] - #[serde(default, rename = "costUSD")] - cost_usd: Option, - #[serde(default)] - pub(crate) usage: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct RawSystemEntry { - #[serde(default)] - pub(crate) uuid: Option, - #[serde(default)] - pub(crate) session_id: Option, - #[serde(default)] - pub(crate) timestamp: Option, - #[serde(default)] - pub(crate) content: Option, - #[serde(default)] - pub(crate) subtype: Option, - #[serde(default)] - pub(crate) level: Option, - #[serde(default)] - pub(crate) cwd: Option, - #[serde(default)] - pub(crate) git_branch: Option, - #[serde(default)] - pub(crate) version: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct RawProgressEntry { - #[serde(default)] - pub(crate) uuid: Option, - #[serde(default)] - pub(crate) session_id: Option, - #[serde(default)] - pub(crate) timestamp: Option, - #[serde(default)] - pub(crate) data: Option, - #[serde(default)] - pub(crate) tool_use_id: Option, - #[serde(default)] - pub(crate) parent_tool_use_id: Option, - #[serde(default)] - pub(crate) cwd: Option, - #[serde(default)] - pub(crate) git_branch: Option, - #[serde(default)] - pub(crate) version: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct RawQueueOperationEntry { - #[serde(default)] - pub(crate) session_id: Option, - #[serde(default)] - pub(crate) timestamp: Option, - #[serde(default)] - pub(crate) operation: Option, - #[serde(default)] - pub(crate) content: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct RawSummaryEntry { - #[serde(default)] - pub(crate) uuid: Option, - #[serde(default)] - pub(crate) session_id: Option, - #[serde(default)] - pub(crate) timestamp: Option, - #[serde(default)] - pub(crate) leaf_uuid: Option, - #[serde(default)] - pub(crate) summary: Option, -} - -#[derive(Debug, Deserialize)] -#[allow(dead_code)] -pub(crate) struct RawUsage { - #[serde(default)] - pub(crate) input_tokens: u64, - #[serde(default)] - pub(crate) output_tokens: u64, - #[serde(default)] - cache_read_input_tokens: u64, - #[serde(default)] - cache_creation_input_tokens: u64, -} - -#[derive(Debug, Deserialize)] -#[allow(dead_code)] -pub(crate) struct RawMessage { - pub(crate) role: String, - pub(crate) content: RawContent, - #[serde(default)] - pub(crate) model: Option, -} - -/// Claude Code represents user message content as either a plain string -/// or an array of content blocks. -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub(crate) enum RawContent { - Text(String), - Blocks(Vec), -} - -#[derive(Debug, Deserialize)] -#[serde(tag = "type")] -pub(crate) enum RawContentBlock { - #[serde(rename = "text")] - Text { text: String }, - #[serde(rename = "thinking")] - Thinking { - #[serde(default)] - thinking: Option, - }, - #[serde(rename = "tool_use")] - ToolUse { - #[serde(default)] - id: Option, - name: String, - #[serde(default)] - input: serde_json::Value, - }, - #[serde(rename = "tool_result")] - ToolResult { - #[serde(default)] - tool_use_id: Option, - #[serde(default)] - content: ToolResultContent, - #[serde(default)] - is_error: bool, - }, - // Skip unknown block types gracefully - #[serde(other)] - Other, -} - -/// tool_result content can be a string, array of blocks, or absent -#[derive(Debug, Deserialize)] -#[serde(untagged)] -#[derive(Default)] -pub(crate) enum ToolResultContent { - Text(String), - Blocks(Vec), - #[default] - Null, -} - -#[derive(Debug, Deserialize)] -#[serde(tag = "type")] -pub(crate) enum ToolResultBlock { - #[serde(rename = "text")] - Text { text: String }, - #[serde(other)] - Other, -} +use std::path::Path; // ── Parsing logic ─────────────────────────────────────────────────────────── @@ -452,398 +248,6 @@ pub(super) fn parse_claude_code_jsonl(path: &Path) -> Result { Ok(session) } -fn is_subagent_file_name(name: &str) -> bool { - let lower = name.to_ascii_lowercase(); - lower.starts_with("agent-") - || lower.starts_with("agent_") - || lower.starts_with("subagent-") - || lower.starts_with("subagent_") -} - -fn collect_subagent_dirs(parent_path: &Path) -> Vec { - let mut dirs = Vec::new(); - let mut seen = HashSet::new(); - let mut push_unique = |path: PathBuf| { - if seen.insert(path.clone()) { - dirs.push(path); - } - }; - - // Parent default layout: `/subagents/*.jsonl` - push_unique(parent_path.with_extension("").join("subagents")); - - // Fallback for legacy/alternate layouts in the same project folder. - if let Some(parent_dir) = parent_path.parent() { - push_unique(parent_dir.join("subagents")); - // Newer Claude Code layouts can place child logs directly beside the parent. - push_unique(parent_dir.to_path_buf()); - } - - dirs -} - -fn merge_subagent_session_ids_match(parent_session_id: &str, meta: &SubagentMeta) -> bool { - meta.session_id - .as_deref() - .is_some_and(|id| id == parent_session_id) - || meta - .parent_session_id - .as_deref() - .is_some_and(|id| id == parent_session_id) -} - -/// Look for likely subagent files and merge their events into the parent session. -fn merge_subagent_sessions(parent_path: &Path, parent_session_id: &str, session: &mut Session) { - let mut subagent_files: Vec<_> = collect_subagent_dirs(parent_path) - .into_iter() - .filter(|dir| dir.is_dir()) - .flat_map(|dir| match std::fs::read_dir(dir) { - Ok(entries) => entries - .filter_map(|entry| entry.ok()) - .map(|entry| entry.path()) - .filter(|p| p.extension().is_some_and(|ext| ext == "jsonl")) - .collect(), - Err(_) => Vec::new(), - }) - .collect(); - - if subagent_files.is_empty() { - return; - } - - subagent_files.retain(|path| { - if path == parent_path { - return false; - } - - let file_name = match path.file_name().and_then(|n| n.to_str()) { - Some(name) => name, - None => return false, - }; - - if file_name.starts_with('.') { - return false; - } - - let in_subagents_dir = path - .parent() - .and_then(|dir| dir.file_name()) - .and_then(|name| name.to_str()) - .is_some_and(|name| name.eq_ignore_ascii_case("subagents")); - if in_subagents_dir && is_subagent_file_name(file_name) { - return true; - } - - let meta = read_subagent_meta(path); - matches!( - meta, - Some(meta) if merge_subagent_session_ids_match(parent_session_id, &meta) - ) - }); - - subagent_files.sort(); - if subagent_files.is_empty() { - return; - } - - for subagent_path in subagent_files { - let meta = read_subagent_meta(&subagent_path).unwrap_or(SubagentMeta { - slug: None, - agent_id: None, - session_id: None, - parent_session_id: None, - }); - let file_agent_id = subagent_path - .file_stem() - .and_then(|s| s.to_str()) - .unwrap_or("unknown") - .to_string(); - - let task_id = meta - .agent_id - .as_ref() - .cloned() - .unwrap_or_else(|| file_agent_id.clone()); - - // Parse the subagent JSONL (same format as parent, no recursive subagent merging) - let sub_session = match parse_subagent_jsonl(&subagent_path) { - Ok(s) => s, - Err(e) => { - tracing::warn!( - "Failed to parse subagent {}: {}", - subagent_path.display(), - e - ); - continue; - } - }; - - if sub_session.events.is_empty() { - continue; - } - let task_title = meta - .slug - .as_ref() - .cloned() - .unwrap_or_else(|| task_id.clone()); - - let sub_model = if sub_session.agent.model != "unknown" { - Some(sub_session.agent.model.clone()) - } else { - None - }; - - // TaskStart event at the subagent's first event timestamp - let start_ts = sub_session.events.first().unwrap().timestamp; - let end_ts = sub_session.events.last().unwrap().timestamp; - - let mut start_attrs = HashMap::new(); - start_attrs.insert( - "subagent_id".to_string(), - serde_json::Value::String(task_id.clone()), - ); - start_attrs.insert("merged_subagent".to_string(), serde_json::Value::Bool(true)); - if let Some(ref model) = sub_model { - start_attrs.insert( - "model".to_string(), - serde_json::Value::String(model.clone()), - ); - } - - session.events.push(Event { - event_id: format!("{}-start", task_id), - timestamp: start_ts, - event_type: EventType::TaskStart { - title: Some(task_title), - }, - task_id: Some(task_id.clone()), - content: Content::text(""), - duration_ms: None, - attributes: start_attrs, - }); - - // Add all subagent events with task_id set - for mut event in sub_session.events { - event.task_id = Some(task_id.clone()); - // Prefix event_id to avoid collisions with parent - event.event_id = format!("{}:{}", task_id, event.event_id); - event.attributes.insert( - "subagent_id".to_string(), - serde_json::Value::String(task_id.clone()), - ); - event - .attributes - .insert("merged_subagent".to_string(), serde_json::Value::Bool(true)); - session.events.push(event); - } - - // TaskEnd event - let duration = (end_ts - start_ts).num_milliseconds().max(0) as u64; - let mut end_attrs = HashMap::new(); - end_attrs.insert( - "subagent_id".to_string(), - serde_json::Value::String(task_id.clone()), - ); - end_attrs.insert("merged_subagent".to_string(), serde_json::Value::Bool(true)); - session.events.push(Event { - event_id: format!("{}-end", task_id), - timestamp: end_ts, - event_type: EventType::TaskEnd { - summary: Some(format!( - "{} events, {}", - sub_session.stats.event_count, sub_session.agent.model - )), - }, - task_id: Some(task_id), - content: Content::text(""), - duration_ms: Some(duration), - attributes: end_attrs, - }); - } - - // Re-sort all events by timestamp - session.events.sort_by_key(|e| e.timestamp); -} - -/// Metadata extracted from the first line of a subagent JSONL -struct SubagentMeta { - slug: Option, - agent_id: Option, - session_id: Option, - parent_session_id: Option, -} - -fn read_subagent_meta(path: &Path) -> Option { - let file = std::fs::File::open(path).ok()?; - let mut reader = std::io::BufReader::new(file); - let mut first_line = String::new(); - reader.read_line(&mut first_line).ok()?; - - #[derive(Deserialize)] - #[serde(rename_all = "camelCase")] - struct FirstLine { - #[serde(default)] - slug: Option, - #[serde(default)] - agent_id: Option, - #[serde(default)] - session_id: Option, - #[serde(default, alias = "parentUuid", alias = "parentID", alias = "parentId")] - parent_session_id: Option, - } - - let parsed: FirstLine = serde_json::from_str(&first_line).ok()?; - Some(SubagentMeta { - slug: parsed.slug, - agent_id: parsed.agent_id, - session_id: parsed.session_id, - parent_session_id: parsed.parent_session_id, - }) -} - -/// Parse a subagent JSONL file (same format, but no recursive subagent merging) -fn parse_subagent_jsonl(path: &Path) -> Result { - let meta = read_subagent_meta(path); - let file = std::fs::File::open(path) - .with_context(|| format!("Failed to open subagent JSONL: {}", path.display()))?; - let reader = std::io::BufReader::new(file); - - let mut events: Vec = Vec::new(); - let mut model_name: Option = None; - let mut tool_version: Option = None; - let mut session_id: Option = None; - let mut cwd: Option = None; - let mut git_branch: Option = None; - let mut tool_use_info: HashMap = HashMap::new(); - - for line_result in reader.lines() { - let line = match line_result { - Ok(l) => l, - Err(_) => continue, - }; - if line.trim().is_empty() { - continue; - } - - let entry: RawEntry = match serde_json::from_str(&line) { - Ok(e) => e, - Err(_) => continue, - }; - - match entry { - RawEntry::FileHistorySnapshot {} | RawEntry::Unknown => continue, - RawEntry::System(system) => { - set_first(&mut session_id, system.session_id.clone()); - set_first(&mut tool_version, system.version.clone()); - set_first(&mut cwd, system.cwd.clone()); - set_first(&mut git_branch, system.git_branch.clone()); - events.push(system_entry_to_event(&system, &events)); - } - RawEntry::Progress(progress) => { - set_first(&mut session_id, progress.session_id.clone()); - set_first(&mut tool_version, progress.version.clone()); - set_first(&mut cwd, progress.cwd.clone()); - set_first(&mut git_branch, progress.git_branch.clone()); - events.push(progress_entry_to_event(&progress, &events)); - } - RawEntry::QueueOperation(queue_op) => { - set_first(&mut session_id, queue_op.session_id.clone()); - events.push(queue_operation_entry_to_event(&queue_op, &events)); - } - RawEntry::Summary(summary) => { - set_first(&mut session_id, summary.session_id.clone()); - events.push(summary_entry_to_event(&summary, &events)); - } - RawEntry::User(conv) => { - set_first(&mut session_id, conv.session_id.clone()); - set_first(&mut tool_version, conv.version.clone()); - set_first(&mut cwd, conv.cwd.clone()); - set_first(&mut git_branch, conv.git_branch.clone()); - if let Ok(ts) = parse_timestamp(&conv.timestamp) { - process_user_entry(&conv, ts, &mut events, &tool_use_info); - } - } - RawEntry::Assistant(conv) => { - set_first(&mut session_id, conv.session_id.clone()); - set_first(&mut tool_version, conv.version.clone()); - set_first(&mut model_name, conv.message.model.clone()); - set_first(&mut git_branch, conv.git_branch.clone()); - if let Ok(ts) = parse_timestamp(&conv.timestamp) { - process_assistant_entry(&conv, ts, &mut events, &mut tool_use_info); - } - } - } - } - - let session_id = session_id.unwrap_or_else(|| { - path.file_stem() - .and_then(|s| s.to_str()) - .unwrap_or("unknown") - .to_string() - }); - - let agent = Agent { - provider: "anthropic".to_string(), - model: model_name.unwrap_or_else(|| "unknown".to_string()), - tool: "claude-code".to_string(), - tool_version, - }; - - let (created_at, updated_at) = - if let (Some(first), Some(last)) = (events.first(), events.last()) { - (first.timestamp, last.timestamp) - } else { - let now = Utc::now(); - (now, now) - }; - - let parent_session_id = meta - .as_ref() - .and_then(|value| value.parent_session_id.clone()) - .map(|value| value.trim().to_string()) - .filter(|value| !value.is_empty()); - let mut attributes = HashMap::from([( - "source_path".to_string(), - serde_json::Value::String(path.to_string_lossy().to_string()), - )]); - attributes.insert( - "session_role".to_string(), - serde_json::Value::String(if parent_session_id.is_some() { - "auxiliary".to_string() - } else { - "primary".to_string() - }), - ); - if let Some(parent_session_id) = parent_session_id.as_ref() { - attributes.insert( - "parent_session_id".to_string(), - serde_json::Value::String(parent_session_id.clone()), - ); - } - if let Some(branch) = git_branch.as_ref() { - attributes.insert( - "git_branch".to_string(), - serde_json::Value::String(branch.clone()), - ); - } - - let context = SessionContext { - title: None, - description: None, - tags: vec!["claude-code".to_string()], - created_at, - updated_at, - related_session_ids: parent_session_id.clone().into_iter().collect(), - attributes, - }; - - let mut session = Session::new(session_id, agent); - session.context = context; - session.events = events; - session.recompute_stats(); - Ok(session) -} - pub(crate) fn parse_timestamp(ts: &str) -> Result> { // Claude Code timestamps are ISO 8601, e.g. "2026-02-06T04:46:17.839Z" DateTime::parse_from_rfc3339(ts) @@ -907,7 +311,7 @@ fn progress_text(data: Option<&serde_json::Value>) -> String { format!("Progress: {data_type}") } -fn system_entry_to_event(entry: &RawSystemEntry, events: &[Event]) -> Event { +pub(super) fn system_entry_to_event(entry: &RawSystemEntry, events: &[Event]) -> Event { let fallback = fallback_timestamp(events); let timestamp = parse_timestamp_with_fallback(entry.timestamp.as_deref(), fallback); let subtype = entry.subtype.as_deref().unwrap_or("unknown"); @@ -947,7 +351,7 @@ fn system_entry_to_event(entry: &RawSystemEntry, events: &[Event]) -> Event { } } -fn progress_entry_to_event(entry: &RawProgressEntry, events: &[Event]) -> Event { +pub(super) fn progress_entry_to_event(entry: &RawProgressEntry, events: &[Event]) -> Event { let fallback = fallback_timestamp(events); let timestamp = parse_timestamp_with_fallback(entry.timestamp.as_deref(), fallback); let mut attrs = HashMap::new(); @@ -987,7 +391,10 @@ fn progress_entry_to_event(entry: &RawProgressEntry, events: &[Event]) -> Event } } -fn queue_operation_entry_to_event(entry: &RawQueueOperationEntry, events: &[Event]) -> Event { +pub(super) fn queue_operation_entry_to_event( + entry: &RawQueueOperationEntry, + events: &[Event], +) -> Event { let fallback = fallback_timestamp(events); let timestamp = parse_timestamp_with_fallback(entry.timestamp.as_deref(), fallback); let operation = entry @@ -1027,7 +434,7 @@ fn queue_operation_entry_to_event(entry: &RawQueueOperationEntry, events: &[Even } } -fn summary_entry_to_event(entry: &RawSummaryEntry, events: &[Event]) -> Event { +pub(super) fn summary_entry_to_event(entry: &RawSummaryEntry, events: &[Event]) -> Event { let fallback = fallback_timestamp(events); let timestamp = parse_timestamp_with_fallback(entry.timestamp.as_deref(), fallback); let summary_text = event_text_or_default(entry.summary.as_deref(), "Summary"); @@ -1545,493 +952,4 @@ pub(super) fn parse_lines_impl(lines: &[String]) -> ParsedLines { } #[cfg(test)] -mod tests { - use super::*; - use chrono::Datelike; - use chrono::Duration; - use std::collections::HashMap; - use std::fs::{create_dir_all, write}; - use std::time::{SystemTime, UNIX_EPOCH}; - - fn test_temp_root() -> std::path::PathBuf { - let nanos = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("clock") - .as_nanos(); - let path = std::env::temp_dir().join(format!("opensession-claude-parser-{nanos}")); - create_dir_all(&path).expect("create test temp root"); - path - } - - #[test] - fn test_parse_timestamp() { - let ts = parse_timestamp("2026-02-06T04:46:17.839Z").unwrap(); - assert_eq!(ts.year(), 2026); - } - - #[test] - fn test_raw_entry_deserialization_user_string() { - let json = r#"{"type":"user","uuid":"abc","sessionId":"s1","timestamp":"2026-01-01T00:00:00Z","message":{"role":"user","content":"hello"}}"#; - let entry: RawEntry = serde_json::from_str(json).unwrap(); - match entry { - RawEntry::User(conv) => { - assert_eq!(conv.uuid, "abc"); - match conv.message.content { - RawContent::Text(t) => assert_eq!(t, "hello"), - _ => panic!("Expected text content"), - } - } - _ => panic!("Expected User entry"), - } - } - - #[test] - fn test_raw_entry_deserialization_assistant() { - let json = r#"{"type":"assistant","uuid":"def","sessionId":"s1","timestamp":"2026-01-01T00:00:00Z","message":{"role":"assistant","model":"claude-opus-4-6","content":[{"type":"text","text":"hi"}]}}"#; - let entry: RawEntry = serde_json::from_str(json).unwrap(); - match entry { - RawEntry::Assistant(conv) => { - assert_eq!(conv.message.model.as_deref(), Some("claude-opus-4-6")); - } - _ => panic!("Expected Assistant entry"), - } - } - - #[test] - fn test_raw_entry_skip_file_history() { - let json = r#"{"type":"file-history-snapshot","messageId":"abc","snapshot":{},"isSnapshotUpdate":false}"#; - let entry: RawEntry = serde_json::from_str(json).unwrap(); - matches!(entry, RawEntry::FileHistorySnapshot { .. }); - } - - #[test] - fn test_raw_entry_deserialization_queue_operation_and_summary() { - let queue_json = r#"{"type":"queue-operation","operation":"enqueue","timestamp":"2026-01-01T00:00:01Z","sessionId":"s1","content":"queued"}"#; - let queue_entry: RawEntry = serde_json::from_str(queue_json).unwrap(); - match queue_entry { - RawEntry::QueueOperation(entry) => { - assert_eq!(entry.operation.as_deref(), Some("enqueue")); - assert_eq!(entry.content.as_deref(), Some("queued")); - assert_eq!(entry.session_id.as_deref(), Some("s1")); - } - _ => panic!("Expected QueueOperation entry"), - } - - let summary_json = - r#"{"type":"summary","summary":"Fix parser edge case","leafUuid":"leaf-1"}"#; - let summary_entry: RawEntry = serde_json::from_str(summary_json).unwrap(); - match summary_entry { - RawEntry::Summary(entry) => { - assert_eq!(entry.summary.as_deref(), Some("Fix parser edge case")); - assert_eq!(entry.leaf_uuid.as_deref(), Some("leaf-1")); - } - _ => panic!("Expected Summary entry"), - } - } - - #[test] - fn test_parse_lines_includes_system_progress_queue_and_summary_events() { - let lines = vec![ - serde_json::json!({ - "type": "system", - "uuid": "sys-1", - "sessionId": "s1", - "timestamp": "2026-01-01T00:00:00Z", - "gitBranch": "feature/session-branch", - "subtype": "local_command", - "content": "/usage" - }) - .to_string(), - serde_json::json!({ - "type": "progress", - "uuid": "prog-1", - "sessionId": "s1", - "timestamp": "2026-01-01T00:00:01Z", - "toolUseID": "tool-123", - "data": { - "type": "hook_progress", - "hookEvent": "PreToolUse", - "hookName": "PreToolUse:Task" - } - }) - .to_string(), - serde_json::json!({ - "type": "queue-operation", - "sessionId": "s1", - "timestamp": "2026-01-01T00:00:02Z", - "operation": "enqueue", - "content": "queued input" - }) - .to_string(), - serde_json::json!({ - "type": "summary", - "sessionId": "s1", - "leafUuid": "leaf-1", - "summary": "Fix parser edge case" - }) - .to_string(), - ]; - - let parsed = parse_lines_impl(&lines); - assert_eq!(parsed.events.len(), 4); - assert_eq!(parsed.session_id.as_deref(), Some("s1")); - assert!( - parsed - .events - .iter() - .all(|event| matches!(event.event_type, EventType::SystemMessage)) - ); - - let mut seen_raw_types = HashMap::new(); - for event in &parsed.events { - let raw_type = event - .attributes - .get("source.raw_type") - .and_then(|value| value.as_str()) - .unwrap_or("") - .to_string(); - seen_raw_types.insert(raw_type, event.event_id.clone()); - } - - assert!(seen_raw_types.contains_key("system")); - assert!(seen_raw_types.contains_key("progress")); - assert!(seen_raw_types.contains_key("queue-operation")); - assert!(seen_raw_types.contains_key("summary")); - let context = parsed.context.expect("context from parsed lines"); - assert_eq!( - context - .attributes - .get("git_branch") - .and_then(|value| value.as_str()), - Some("feature/session-branch") - ); - } - - #[test] - fn test_tool_result_without_tool_use_id_falls_back_to_recent_tool_use() { - let assistant_json = r#"{ - "type":"assistant", - "uuid":"a1", - "sessionId":"s1", - "timestamp":"2026-02-01T00:00:00Z", - "message":{ - "role":"assistant", - "model":"claude-opus-4-6", - "content":[ - {"type":"tool_use","name":"Read","input":{"file_path":"src/main.rs"}} - ] - } - }"#; - let user_json = r#"{ - "type":"user", - "uuid":"u1", - "sessionId":"s1", - "timestamp":"2026-02-01T00:00:01Z", - "message":{ - "role":"user", - "content":[ - {"type":"tool_result","content":"ok","is_error":false} - ] - } - }"#; - - let assistant_entry: RawEntry = serde_json::from_str(assistant_json).unwrap(); - let user_entry: RawEntry = serde_json::from_str(user_json).unwrap(); - let mut events = Vec::new(); - let mut tool_use_info = HashMap::new(); - - match assistant_entry { - RawEntry::Assistant(conv) => { - process_assistant_entry( - &conv, - parse_timestamp(&conv.timestamp).unwrap(), - &mut events, - &mut tool_use_info, - ); - } - _ => panic!("expected assistant entry"), - } - match user_entry { - RawEntry::User(conv) => { - process_user_entry( - &conv, - parse_timestamp(&conv.timestamp).unwrap(), - &mut events, - &tool_use_info, - ); - } - _ => panic!("expected user entry"), - } - - let result_event = events - .iter() - .find(|event| matches!(event.event_type, EventType::ToolResult { .. })) - .expect("tool result exists"); - match &result_event.event_type { - EventType::ToolResult { name, .. } => assert_eq!(name, "Read"), - _ => unreachable!(), - } - } - - #[test] - fn test_subagent_file_merge_handles_file_name_without_meta() { - let dir = test_temp_root(); - let parent_path = dir.as_path().join("session-parent.jsonl"); - let subagent_dir = parent_path.with_extension("").join("subagents"); - create_dir_all(&subagent_dir).unwrap(); - - let parent_session = "sess-parent"; - let subagent_session = "agent-abc123"; - - let parent_entry = serde_json::json!({ - "type": "user", - "uuid": "u1", - "sessionId": parent_session, - "timestamp": Utc::now().to_rfc3339(), - "message": { - "role": "user", - "content": "parent prompt" - } - }) - .to_string(); - write(&parent_path, parent_entry).unwrap(); - - let subagent_entry = serde_json::json!({ - "type": "assistant", - "uuid": "a1", - "sessionId": subagent_session, - "timestamp": Utc::now() - .checked_add_signed(Duration::seconds(1)) - .unwrap() - .to_rfc3339(), - "message": { - "role": "assistant", - "model": "claude-3-opus", - "content": [{ - "type": "text", - "text": "subagent reply" - }] - } - }) - .to_string(); - write( - subagent_dir.join(format!("{subagent_session}.jsonl")), - subagent_entry, - ) - .unwrap(); - - let session = parse_claude_code_jsonl(&parent_path).unwrap(); - assert_eq!(session.events.len(), 4); - assert!( - session - .events - .iter() - .any(|e| matches!(e.event_type, EventType::TaskStart { .. })) - ); - assert!(session.events.iter().any(|e| { - e.attributes - .get("merged_subagent") - .and_then(|v| v.as_bool()) - == Some(true) - })); - assert!( - session - .events - .iter() - .any(|e| matches!(e.event_type, EventType::AgentMessage)) - ); - assert!( - session - .events - .iter() - .any(|e| matches!(e.event_type, EventType::TaskEnd { .. })) - ); - // message_count includes user+agent messages and TaskEnd summaries. - assert_eq!(session.stats.message_count, 3); - } - - #[test] - fn test_subagent_file_merge_handles_sibling_layout_with_parent_id_meta() { - let dir = test_temp_root(); - let parent_path = dir.as_path().join("session-parent-sibling.jsonl"); - let parent_session = "sess-parent-sibling"; - - let parent_entry = serde_json::json!({ - "type": "user", - "uuid": "u1", - "sessionId": parent_session, - "timestamp": Utc::now().to_rfc3339(), - "message": { - "role": "user", - "content": "parent prompt" - } - }) - .to_string(); - write(&parent_path, parent_entry).unwrap(); - - let sibling_subagent_path = dir - .as_path() - .join("70dafb43-dbdd-4009-beb0-b6ac2bd9c4d1.jsonl"); - let subagent_entry = serde_json::json!({ - "type": "assistant", - "uuid": "a1", - "sessionId": "subagent-random", - "parentUuid": parent_session, - "timestamp": Utc::now() - .checked_add_signed(Duration::seconds(1)) - .unwrap() - .to_rfc3339(), - "message": { - "role": "assistant", - "model": "claude-3-opus", - "content": [{ - "type": "text", - "text": "sibling subagent reply" - }] - } - }) - .to_string(); - write(&sibling_subagent_path, subagent_entry).unwrap(); - - let session = parse_claude_code_jsonl(&parent_path).unwrap(); - assert!(session.events.iter().any(|event| { - event - .attributes - .get("merged_subagent") - .and_then(|value| value.as_bool()) - == Some(true) - })); - assert!(session.events.iter().any(|event| { - matches!(event.event_type, EventType::TaskStart { .. }) - && event - .attributes - .get("subagent_id") - .and_then(|value| value.as_str()) - .is_some() - })); - assert!(session.events.iter().any(|event| { - matches!(event.event_type, EventType::AgentMessage) - && event.content.blocks.iter().any(|block| { - matches!(block, opensession_core::trace::ContentBlock::Text { text } if text.contains("sibling subagent reply")) - }) - })); - } - - #[test] - fn test_parent_id_meta_marks_main_parser_session_as_auxiliary() { - let dir = test_temp_root(); - let path = dir - .as_path() - .join("70dafb43-dbdd-4009-beb0-b6ac2bd9c4d1.jsonl"); - let entry = serde_json::json!({ - "type": "assistant", - "uuid": "a1", - "sessionId": "subagent-random", - "parentId": "parent-main", - "timestamp": Utc::now().to_rfc3339(), - "message": { - "role": "assistant", - "model": "claude-3-opus", - "content": [{ - "type": "text", - "text": "sub" - }] - } - }) - .to_string(); - write(&path, entry).unwrap(); - - let parsed = parse_claude_code_jsonl(&path).unwrap(); - assert_eq!( - parsed - .context - .attributes - .get("session_role") - .and_then(|value| value.as_str()), - Some("auxiliary") - ); - assert_eq!( - parsed - .context - .attributes - .get("parent_session_id") - .and_then(|value| value.as_str()), - Some("parent-main") - ); - assert_eq!( - parsed.context.related_session_ids, - vec!["parent-main".to_string()] - ); - } - - #[test] - fn test_subagent_meta_reads_parent_uuid_aliases() { - let dir = test_temp_root(); - let subagent_path = dir.as_path().join("agent-xyz.jsonl"); - let subagent_entry = serde_json::json!({ - "type": "assistant", - "uuid": "a1", - "sessionId": "sub-1", - "timestamp": Utc::now().to_rfc3339(), - "parentId": "parent-1", - "message": { - "role": "assistant", - "model": "claude-3-opus", - "content": [{ - "type": "text", - "text": "sub" - }] - } - }) - .to_string(); - write(&subagent_path, subagent_entry).unwrap(); - - let meta = read_subagent_meta(&subagent_path).unwrap(); - assert_eq!(meta.parent_session_id.as_deref(), Some("parent-1")); - } - - #[test] - fn test_subagent_parse_sets_related_parent_session_id() { - let dir = test_temp_root(); - let subagent_path = dir.as_path().join("agent-related.jsonl"); - let subagent_entry = serde_json::json!({ - "type": "assistant", - "uuid": "a1", - "sessionId": "sub-2", - "timestamp": Utc::now().to_rfc3339(), - "parentId": "parent-2", - "message": { - "role": "assistant", - "model": "claude-3-opus", - "content": [{ - "type": "text", - "text": "sub" - }] - } - }) - .to_string(); - write(&subagent_path, subagent_entry).unwrap(); - - let parsed = parse_subagent_jsonl(&subagent_path).unwrap(); - assert_eq!( - parsed.context.related_session_ids, - vec!["parent-2".to_string()] - ); - assert_eq!( - parsed - .context - .attributes - .get("session_role") - .and_then(|value| value.as_str()), - Some("auxiliary") - ); - assert_eq!( - parsed - .context - .attributes - .get("parent_session_id") - .and_then(|value| value.as_str()), - Some("parent-2") - ); - } -} +mod tests; diff --git a/crates/parsers/src/claude_code/parse/tests.rs b/crates/parsers/src/claude_code/parse/tests.rs new file mode 100644 index 00000000..117e52f5 --- /dev/null +++ b/crates/parsers/src/claude_code/parse/tests.rs @@ -0,0 +1,487 @@ +use super::*; +use chrono::Datelike; +use chrono::Duration; +use std::collections::HashMap; +use std::fs::{create_dir_all, write}; +use std::time::{SystemTime, UNIX_EPOCH}; + +fn test_temp_root() -> std::path::PathBuf { + let nanos = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("clock") + .as_nanos(); + let path = std::env::temp_dir().join(format!("opensession-claude-parser-{nanos}")); + create_dir_all(&path).expect("create test temp root"); + path +} + +#[test] +fn test_parse_timestamp() { + let ts = parse_timestamp("2026-02-06T04:46:17.839Z").unwrap(); + assert_eq!(ts.year(), 2026); +} + +#[test] +fn test_raw_entry_deserialization_user_string() { + let json = r#"{"type":"user","uuid":"abc","sessionId":"s1","timestamp":"2026-01-01T00:00:00Z","message":{"role":"user","content":"hello"}}"#; + let entry: RawEntry = serde_json::from_str(json).unwrap(); + match entry { + RawEntry::User(conv) => { + assert_eq!(conv.uuid, "abc"); + match conv.message.content { + RawContent::Text(text) => assert_eq!(text, "hello"), + _ => panic!("Expected text content"), + } + } + _ => panic!("Expected User entry"), + } +} + +#[test] +fn test_raw_entry_deserialization_assistant() { + let json = r#"{"type":"assistant","uuid":"def","sessionId":"s1","timestamp":"2026-01-01T00:00:00Z","message":{"role":"assistant","model":"claude-opus-4-6","content":[{"type":"text","text":"hi"}]}}"#; + let entry: RawEntry = serde_json::from_str(json).unwrap(); + match entry { + RawEntry::Assistant(conv) => { + assert_eq!(conv.message.model.as_deref(), Some("claude-opus-4-6")); + } + _ => panic!("Expected Assistant entry"), + } +} + +#[test] +fn test_raw_entry_skip_file_history() { + let json = r#"{"type":"file-history-snapshot","messageId":"abc","snapshot":{},"isSnapshotUpdate":false}"#; + let entry: RawEntry = serde_json::from_str(json).unwrap(); + matches!(entry, RawEntry::FileHistorySnapshot { .. }); +} + +#[test] +fn test_raw_entry_deserialization_queue_operation_and_summary() { + let queue_json = r#"{"type":"queue-operation","operation":"enqueue","timestamp":"2026-01-01T00:00:01Z","sessionId":"s1","content":"queued"}"#; + let queue_entry: RawEntry = serde_json::from_str(queue_json).unwrap(); + match queue_entry { + RawEntry::QueueOperation(entry) => { + assert_eq!(entry.operation.as_deref(), Some("enqueue")); + assert_eq!(entry.content.as_deref(), Some("queued")); + assert_eq!(entry.session_id.as_deref(), Some("s1")); + } + _ => panic!("Expected QueueOperation entry"), + } + + let summary_json = r#"{"type":"summary","summary":"Fix parser edge case","leafUuid":"leaf-1"}"#; + let summary_entry: RawEntry = serde_json::from_str(summary_json).unwrap(); + match summary_entry { + RawEntry::Summary(entry) => { + assert_eq!(entry.summary.as_deref(), Some("Fix parser edge case")); + assert_eq!(entry.leaf_uuid.as_deref(), Some("leaf-1")); + } + _ => panic!("Expected Summary entry"), + } +} + +#[test] +fn test_parse_lines_includes_system_progress_queue_and_summary_events() { + let lines = vec![ + serde_json::json!({ + "type": "system", + "uuid": "sys-1", + "sessionId": "s1", + "timestamp": "2026-01-01T00:00:00Z", + "gitBranch": "feature/session-branch", + "subtype": "local_command", + "content": "/usage" + }) + .to_string(), + serde_json::json!({ + "type": "progress", + "uuid": "prog-1", + "sessionId": "s1", + "timestamp": "2026-01-01T00:00:01Z", + "toolUseID": "tool-123", + "data": { + "type": "hook_progress", + "hookEvent": "PreToolUse", + "hookName": "PreToolUse:Task" + } + }) + .to_string(), + serde_json::json!({ + "type": "queue-operation", + "sessionId": "s1", + "timestamp": "2026-01-01T00:00:02Z", + "operation": "enqueue", + "content": "queued input" + }) + .to_string(), + serde_json::json!({ + "type": "summary", + "sessionId": "s1", + "leafUuid": "leaf-1", + "summary": "Fix parser edge case" + }) + .to_string(), + ]; + + let parsed = parse_lines_impl(&lines); + assert_eq!(parsed.events.len(), 4); + assert_eq!(parsed.session_id.as_deref(), Some("s1")); + assert!( + parsed + .events + .iter() + .all(|event| matches!(event.event_type, EventType::SystemMessage)) + ); + + let mut seen_raw_types = HashMap::new(); + for event in &parsed.events { + let raw_type = event + .attributes + .get("source.raw_type") + .and_then(|value| value.as_str()) + .unwrap_or("") + .to_string(); + seen_raw_types.insert(raw_type, event.event_id.clone()); + } + + assert!(seen_raw_types.contains_key("system")); + assert!(seen_raw_types.contains_key("progress")); + assert!(seen_raw_types.contains_key("queue-operation")); + assert!(seen_raw_types.contains_key("summary")); + let context = parsed.context.expect("context from parsed lines"); + assert_eq!( + context + .attributes + .get("git_branch") + .and_then(|value| value.as_str()), + Some("feature/session-branch") + ); +} + +#[test] +fn test_tool_result_without_tool_use_id_falls_back_to_recent_tool_use() { + let assistant_json = r#"{ + "type":"assistant", + "uuid":"a1", + "sessionId":"s1", + "timestamp":"2026-02-01T00:00:00Z", + "message":{ + "role":"assistant", + "model":"claude-opus-4-6", + "content":[ + {"type":"tool_use","name":"Read","input":{"file_path":"src/main.rs"}} + ] + } + }"#; + let user_json = r#"{ + "type":"user", + "uuid":"u1", + "sessionId":"s1", + "timestamp":"2026-02-01T00:00:01Z", + "message":{ + "role":"user", + "content":[ + {"type":"tool_result","content":"ok","is_error":false} + ] + } + }"#; + + let assistant_entry: RawEntry = serde_json::from_str(assistant_json).unwrap(); + let user_entry: RawEntry = serde_json::from_str(user_json).unwrap(); + let mut events = Vec::new(); + let mut tool_use_info = HashMap::new(); + + match assistant_entry { + RawEntry::Assistant(conv) => { + process_assistant_entry( + &conv, + parse_timestamp(&conv.timestamp).unwrap(), + &mut events, + &mut tool_use_info, + ); + } + _ => panic!("expected assistant entry"), + } + match user_entry { + RawEntry::User(conv) => { + process_user_entry( + &conv, + parse_timestamp(&conv.timestamp).unwrap(), + &mut events, + &tool_use_info, + ); + } + _ => panic!("expected user entry"), + } + + let result_event = events + .iter() + .find(|event| matches!(event.event_type, EventType::ToolResult { .. })) + .expect("tool result exists"); + match &result_event.event_type { + EventType::ToolResult { name, .. } => assert_eq!(name, "Read"), + _ => unreachable!(), + } +} + +#[test] +fn test_subagent_file_merge_handles_file_name_without_meta() { + let dir = test_temp_root(); + let parent_path = dir.as_path().join("session-parent.jsonl"); + let subagent_dir = parent_path.with_extension("").join("subagents"); + create_dir_all(&subagent_dir).unwrap(); + + let parent_session = "sess-parent"; + let subagent_session = "agent-abc123"; + + let parent_entry = serde_json::json!({ + "type": "user", + "uuid": "u1", + "sessionId": parent_session, + "timestamp": Utc::now().to_rfc3339(), + "message": { + "role": "user", + "content": "parent prompt" + } + }) + .to_string(); + write(&parent_path, parent_entry).unwrap(); + + let subagent_entry = serde_json::json!({ + "type": "assistant", + "uuid": "a1", + "sessionId": subagent_session, + "timestamp": Utc::now() + .checked_add_signed(Duration::seconds(1)) + .unwrap() + .to_rfc3339(), + "message": { + "role": "assistant", + "model": "claude-3-opus", + "content": [{ + "type": "text", + "text": "subagent reply" + }] + } + }) + .to_string(); + write( + subagent_dir.join(format!("{subagent_session}.jsonl")), + subagent_entry, + ) + .unwrap(); + + let session = parse_claude_code_jsonl(&parent_path).unwrap(); + assert_eq!(session.events.len(), 4); + assert!( + session + .events + .iter() + .any(|event| matches!(event.event_type, EventType::TaskStart { .. })) + ); + assert!(session.events.iter().any(|event| { + event + .attributes + .get("merged_subagent") + .and_then(|value| value.as_bool()) + == Some(true) + })); + assert!( + session + .events + .iter() + .any(|event| matches!(event.event_type, EventType::AgentMessage)) + ); + assert!( + session + .events + .iter() + .any(|event| matches!(event.event_type, EventType::TaskEnd { .. })) + ); + assert_eq!(session.stats.message_count, 3); +} + +#[test] +fn test_subagent_file_merge_handles_sibling_layout_with_parent_id_meta() { + let dir = test_temp_root(); + let parent_path = dir.as_path().join("session-parent-sibling.jsonl"); + let parent_session = "sess-parent-sibling"; + + let parent_entry = serde_json::json!({ + "type": "user", + "uuid": "u1", + "sessionId": parent_session, + "timestamp": Utc::now().to_rfc3339(), + "message": { + "role": "user", + "content": "parent prompt" + } + }) + .to_string(); + write(&parent_path, parent_entry).unwrap(); + + let sibling_subagent_path = dir + .as_path() + .join("70dafb43-dbdd-4009-beb0-b6ac2bd9c4d1.jsonl"); + let subagent_entry = serde_json::json!({ + "type": "assistant", + "uuid": "a1", + "sessionId": "subagent-random", + "parentUuid": parent_session, + "timestamp": Utc::now() + .checked_add_signed(Duration::seconds(1)) + .unwrap() + .to_rfc3339(), + "message": { + "role": "assistant", + "model": "claude-3-opus", + "content": [{ + "type": "text", + "text": "sibling subagent reply" + }] + } + }) + .to_string(); + write(&sibling_subagent_path, subagent_entry).unwrap(); + + let session = parse_claude_code_jsonl(&parent_path).unwrap(); + assert!(session.events.iter().any(|event| { + event + .attributes + .get("merged_subagent") + .and_then(|value| value.as_bool()) + == Some(true) + })); + assert!(session.events.iter().any(|event| { + matches!(event.event_type, EventType::TaskStart { .. }) + && event + .attributes + .get("subagent_id") + .and_then(|value| value.as_str()) + .is_some() + })); + assert!(session.events.iter().any(|event| { + matches!(event.event_type, EventType::AgentMessage) + && event.content.blocks.iter().any(|block| { + matches!(block, opensession_core::trace::ContentBlock::Text { text } if text.contains("sibling subagent reply")) + }) + })); +} + +#[test] +fn test_parent_id_meta_marks_main_parser_session_as_auxiliary() { + let dir = test_temp_root(); + let path = dir + .as_path() + .join("70dafb43-dbdd-4009-beb0-b6ac2bd9c4d1.jsonl"); + let entry = serde_json::json!({ + "type": "assistant", + "uuid": "a1", + "sessionId": "subagent-random", + "parentId": "parent-main", + "timestamp": Utc::now().to_rfc3339(), + "message": { + "role": "assistant", + "model": "claude-3-opus", + "content": [{ + "type": "text", + "text": "sub" + }] + } + }) + .to_string(); + write(&path, entry).unwrap(); + + let parsed = parse_claude_code_jsonl(&path).unwrap(); + assert_eq!( + parsed + .context + .attributes + .get("session_role") + .and_then(|value| value.as_str()), + Some("auxiliary") + ); + assert_eq!( + parsed + .context + .attributes + .get("parent_session_id") + .and_then(|value| value.as_str()), + Some("parent-main") + ); + assert_eq!( + parsed.context.related_session_ids, + vec!["parent-main".to_string()] + ); +} + +#[test] +fn test_subagent_meta_reads_parent_uuid_aliases() { + let dir = test_temp_root(); + let subagent_path = dir.as_path().join("agent-xyz.jsonl"); + let subagent_entry = serde_json::json!({ + "type": "assistant", + "uuid": "a1", + "sessionId": "sub-1", + "timestamp": Utc::now().to_rfc3339(), + "parentId": "parent-1", + "message": { + "role": "assistant", + "model": "claude-3-opus", + "content": [{ + "type": "text", + "text": "sub" + }] + } + }) + .to_string(); + write(&subagent_path, subagent_entry).unwrap(); + + let meta = read_subagent_meta(&subagent_path).unwrap(); + assert_eq!(meta.parent_session_id.as_deref(), Some("parent-1")); +} + +#[test] +fn test_subagent_parse_sets_related_parent_session_id() { + let dir = test_temp_root(); + let subagent_path = dir.as_path().join("agent-related.jsonl"); + let subagent_entry = serde_json::json!({ + "type": "assistant", + "uuid": "a1", + "sessionId": "sub-2", + "timestamp": Utc::now().to_rfc3339(), + "parentId": "parent-2", + "message": { + "role": "assistant", + "model": "claude-3-opus", + "content": [{ + "type": "text", + "text": "sub" + }] + } + }) + .to_string(); + write(&subagent_path, subagent_entry).unwrap(); + + let parsed = super::super::subagent::parse_subagent_jsonl(&subagent_path).unwrap(); + assert_eq!( + parsed.context.related_session_ids, + vec!["parent-2".to_string()] + ); + assert_eq!( + parsed + .context + .attributes + .get("session_role") + .and_then(|value| value.as_str()), + Some("auxiliary") + ); + assert_eq!( + parsed + .context + .attributes + .get("parent_session_id") + .and_then(|value| value.as_str()), + Some("parent-2") + ); +} diff --git a/crates/parsers/src/claude_code/raw.rs b/crates/parsers/src/claude_code/raw.rs new file mode 100644 index 00000000..53986f62 --- /dev/null +++ b/crates/parsers/src/claude_code/raw.rs @@ -0,0 +1,203 @@ +use serde::Deserialize; + +/// Top-level entry in the Claude Code JSONL file. +/// Each line is one of these. +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub(crate) enum RawEntry { + #[serde(rename = "user")] + User(RawConversationEntry), + #[serde(rename = "assistant")] + Assistant(RawConversationEntry), + #[serde(rename = "file-history-snapshot")] + FileHistorySnapshot {}, + #[serde(rename = "system")] + System(RawSystemEntry), + #[serde(rename = "progress")] + Progress(RawProgressEntry), + #[serde(rename = "queue-operation")] + QueueOperation(RawQueueOperationEntry), + #[serde(rename = "summary")] + Summary(RawSummaryEntry), + #[serde(other)] + Unknown, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct RawConversationEntry { + pub(crate) uuid: String, + #[serde(default)] + pub(crate) session_id: Option, + pub(crate) timestamp: String, + pub(crate) message: RawMessage, + #[serde(default)] + pub(crate) cwd: Option, + #[serde(default)] + pub(crate) git_branch: Option, + #[serde(default)] + pub(crate) version: Option, + #[allow(dead_code)] + #[serde(default)] + agent_id: Option, + #[allow(dead_code)] + #[serde(default)] + slug: Option, + #[allow(dead_code)] + #[serde(default, rename = "costUSD")] + cost_usd: Option, + #[serde(default)] + pub(crate) usage: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct RawSystemEntry { + #[serde(default)] + pub(crate) uuid: Option, + #[serde(default)] + pub(crate) session_id: Option, + #[serde(default)] + pub(crate) timestamp: Option, + #[serde(default)] + pub(crate) content: Option, + #[serde(default)] + pub(crate) subtype: Option, + #[serde(default)] + pub(crate) level: Option, + #[serde(default)] + pub(crate) cwd: Option, + #[serde(default)] + pub(crate) git_branch: Option, + #[serde(default)] + pub(crate) version: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct RawProgressEntry { + #[serde(default)] + pub(crate) uuid: Option, + #[serde(default)] + pub(crate) session_id: Option, + #[serde(default)] + pub(crate) timestamp: Option, + #[serde(default)] + pub(crate) data: Option, + #[serde(default)] + pub(crate) tool_use_id: Option, + #[serde(default)] + pub(crate) parent_tool_use_id: Option, + #[serde(default)] + pub(crate) cwd: Option, + #[serde(default)] + pub(crate) git_branch: Option, + #[serde(default)] + pub(crate) version: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct RawQueueOperationEntry { + #[serde(default)] + pub(crate) session_id: Option, + #[serde(default)] + pub(crate) timestamp: Option, + #[serde(default)] + pub(crate) operation: Option, + #[serde(default)] + pub(crate) content: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct RawSummaryEntry { + #[serde(default)] + pub(crate) uuid: Option, + #[serde(default)] + pub(crate) session_id: Option, + #[serde(default)] + pub(crate) timestamp: Option, + #[serde(default)] + pub(crate) leaf_uuid: Option, + #[serde(default)] + pub(crate) summary: Option, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub(crate) struct RawUsage { + #[serde(default)] + pub(crate) input_tokens: u64, + #[serde(default)] + pub(crate) output_tokens: u64, + #[serde(default)] + cache_read_input_tokens: u64, + #[serde(default)] + cache_creation_input_tokens: u64, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub(crate) struct RawMessage { + pub(crate) role: String, + pub(crate) content: RawContent, + #[serde(default)] + pub(crate) model: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(untagged)] +pub(crate) enum RawContent { + Text(String), + Blocks(Vec), +} + +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub(crate) enum RawContentBlock { + #[serde(rename = "text")] + Text { text: String }, + #[serde(rename = "thinking")] + Thinking { + #[serde(default)] + thinking: Option, + }, + #[serde(rename = "tool_use")] + ToolUse { + #[serde(default)] + id: Option, + name: String, + #[serde(default)] + input: serde_json::Value, + }, + #[serde(rename = "tool_result")] + ToolResult { + #[serde(default)] + tool_use_id: Option, + #[serde(default)] + content: ToolResultContent, + #[serde(default)] + is_error: bool, + }, + #[serde(other)] + Other, +} + +#[derive(Debug, Default, Deserialize)] +#[serde(untagged)] +pub(crate) enum ToolResultContent { + Text(String), + Blocks(Vec), + #[default] + Null, +} + +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +pub(crate) enum ToolResultBlock { + #[serde(rename = "text")] + Text { text: String }, + #[serde(other)] + Other, +} diff --git a/crates/parsers/src/claude_code/subagent.rs b/crates/parsers/src/claude_code/subagent.rs new file mode 100644 index 00000000..5765e90e --- /dev/null +++ b/crates/parsers/src/claude_code/subagent.rs @@ -0,0 +1,402 @@ +use super::parse::{parse_timestamp, process_assistant_entry, process_user_entry}; +use super::raw::RawEntry; +use crate::common::ToolUseInfo; +use crate::common::set_first; +use anyhow::{Context, Result}; +use chrono::Utc; +use opensession_core::trace::{Agent, Event, EventType, Session, SessionContext}; +use serde::Deserialize; +use std::collections::{HashMap, HashSet}; +use std::io::BufRead; +use std::path::{Path, PathBuf}; + +fn is_subagent_file_name(name: &str) -> bool { + let lower = name.to_ascii_lowercase(); + lower.starts_with("agent-") + || lower.starts_with("agent_") + || lower.starts_with("subagent-") + || lower.starts_with("subagent_") +} + +fn collect_subagent_dirs(parent_path: &Path) -> Vec { + let mut dirs = Vec::new(); + let mut seen = HashSet::new(); + let mut push_unique = |path: PathBuf| { + if seen.insert(path.clone()) { + dirs.push(path); + } + }; + + push_unique(parent_path.with_extension("").join("subagents")); + + if let Some(parent_dir) = parent_path.parent() { + push_unique(parent_dir.join("subagents")); + push_unique(parent_dir.to_path_buf()); + } + + dirs +} + +fn merge_subagent_session_ids_match(parent_session_id: &str, meta: &SubagentMeta) -> bool { + meta.session_id + .as_deref() + .is_some_and(|id| id == parent_session_id) + || meta + .parent_session_id + .as_deref() + .is_some_and(|id| id == parent_session_id) +} + +pub(super) fn merge_subagent_sessions( + parent_path: &Path, + parent_session_id: &str, + session: &mut Session, +) { + let mut subagent_files: Vec<_> = collect_subagent_dirs(parent_path) + .into_iter() + .filter(|dir| dir.is_dir()) + .flat_map(|dir| match std::fs::read_dir(dir) { + Ok(entries) => entries + .filter_map(|entry| entry.ok()) + .map(|entry| entry.path()) + .filter(|path| path.extension().is_some_and(|ext| ext == "jsonl")) + .collect(), + Err(_) => Vec::new(), + }) + .collect(); + + if subagent_files.is_empty() { + return; + } + + subagent_files.retain(|path| { + if path == parent_path { + return false; + } + + let file_name = match path.file_name().and_then(|name| name.to_str()) { + Some(name) => name, + None => return false, + }; + + if file_name.starts_with('.') { + return false; + } + + let in_subagents_dir = path + .parent() + .and_then(|dir| dir.file_name()) + .and_then(|name| name.to_str()) + .is_some_and(|name| name.eq_ignore_ascii_case("subagents")); + if in_subagents_dir && is_subagent_file_name(file_name) { + return true; + } + + let meta = read_subagent_meta(path); + matches!( + meta, + Some(meta) if merge_subagent_session_ids_match(parent_session_id, &meta) + ) + }); + + subagent_files.sort(); + if subagent_files.is_empty() { + return; + } + + for subagent_path in subagent_files { + let meta = read_subagent_meta(&subagent_path).unwrap_or(SubagentMeta { + slug: None, + agent_id: None, + session_id: None, + parent_session_id: None, + }); + let file_agent_id = subagent_path + .file_stem() + .and_then(|stem| stem.to_str()) + .unwrap_or("unknown") + .to_string(); + + let task_id = meta + .agent_id + .as_ref() + .cloned() + .unwrap_or_else(|| file_agent_id.clone()); + + let sub_session = match parse_subagent_jsonl(&subagent_path) { + Ok(session) => session, + Err(error) => { + tracing::warn!( + "Failed to parse subagent {}: {}", + subagent_path.display(), + error + ); + continue; + } + }; + + if sub_session.events.is_empty() { + continue; + } + let task_title = meta + .slug + .as_ref() + .cloned() + .unwrap_or_else(|| task_id.clone()); + + let sub_model = if sub_session.agent.model != "unknown" { + Some(sub_session.agent.model.clone()) + } else { + None + }; + + let start_ts = sub_session + .events + .first() + .expect("subagent start") + .timestamp; + let end_ts = sub_session.events.last().expect("subagent end").timestamp; + + let mut start_attrs = HashMap::new(); + start_attrs.insert( + "subagent_id".to_string(), + serde_json::Value::String(task_id.clone()), + ); + start_attrs.insert("merged_subagent".to_string(), serde_json::Value::Bool(true)); + if let Some(model) = sub_model.as_ref() { + start_attrs.insert( + "model".to_string(), + serde_json::Value::String(model.clone()), + ); + } + + session.events.push(Event { + event_id: format!("{task_id}-start"), + timestamp: start_ts, + event_type: EventType::TaskStart { + title: Some(task_title), + }, + task_id: Some(task_id.clone()), + content: opensession_core::trace::Content::text(""), + duration_ms: None, + attributes: start_attrs, + }); + + for mut event in sub_session.events { + event.task_id = Some(task_id.clone()); + event.event_id = format!("{}:{}", task_id, event.event_id); + event.attributes.insert( + "subagent_id".to_string(), + serde_json::Value::String(task_id.clone()), + ); + event + .attributes + .insert("merged_subagent".to_string(), serde_json::Value::Bool(true)); + session.events.push(event); + } + + let duration = (end_ts - start_ts).num_milliseconds().max(0) as u64; + let mut end_attrs = HashMap::new(); + end_attrs.insert( + "subagent_id".to_string(), + serde_json::Value::String(task_id.clone()), + ); + end_attrs.insert("merged_subagent".to_string(), serde_json::Value::Bool(true)); + session.events.push(Event { + event_id: format!("{task_id}-end"), + timestamp: end_ts, + event_type: EventType::TaskEnd { + summary: Some(format!( + "{} events, {}", + sub_session.stats.event_count, sub_session.agent.model + )), + }, + task_id: Some(task_id), + content: opensession_core::trace::Content::text(""), + duration_ms: Some(duration), + attributes: end_attrs, + }); + } + + session.events.sort_by_key(|event| event.timestamp); +} + +#[derive(Debug)] +pub(super) struct SubagentMeta { + pub(super) slug: Option, + pub(super) agent_id: Option, + pub(super) session_id: Option, + pub(super) parent_session_id: Option, +} + +pub(super) fn read_subagent_meta(path: &Path) -> Option { + let file = std::fs::File::open(path).ok()?; + let mut reader = std::io::BufReader::new(file); + let mut first_line = String::new(); + reader.read_line(&mut first_line).ok()?; + + #[derive(Deserialize)] + #[serde(rename_all = "camelCase")] + struct FirstLine { + #[serde(default)] + slug: Option, + #[serde(default)] + agent_id: Option, + #[serde(default)] + session_id: Option, + #[serde(default, alias = "parentUuid", alias = "parentID", alias = "parentId")] + parent_session_id: Option, + } + + let parsed: FirstLine = serde_json::from_str(&first_line).ok()?; + Some(SubagentMeta { + slug: parsed.slug, + agent_id: parsed.agent_id, + session_id: parsed.session_id, + parent_session_id: parsed.parent_session_id, + }) +} + +pub(super) fn parse_subagent_jsonl(path: &Path) -> Result { + let meta = read_subagent_meta(path); + let file = std::fs::File::open(path) + .with_context(|| format!("Failed to open subagent JSONL: {}", path.display()))?; + let reader = std::io::BufReader::new(file); + + let mut events: Vec = Vec::new(); + let mut model_name: Option = None; + let mut tool_version: Option = None; + let mut session_id: Option = None; + let mut cwd: Option = None; + let mut git_branch: Option = None; + let mut tool_use_info: HashMap = HashMap::new(); + + for line_result in reader.lines() { + let line = match line_result { + Ok(line) => line, + Err(_) => continue, + }; + if line.trim().is_empty() { + continue; + } + + let entry: RawEntry = match serde_json::from_str(&line) { + Ok(entry) => entry, + Err(_) => continue, + }; + + match entry { + RawEntry::FileHistorySnapshot {} | RawEntry::Unknown => continue, + RawEntry::System(system) => { + set_first(&mut session_id, system.session_id.clone()); + set_first(&mut tool_version, system.version.clone()); + set_first(&mut cwd, system.cwd.clone()); + set_first(&mut git_branch, system.git_branch.clone()); + events.push(super::parse::system_entry_to_event(&system, &events)); + } + RawEntry::Progress(progress) => { + set_first(&mut session_id, progress.session_id.clone()); + set_first(&mut tool_version, progress.version.clone()); + set_first(&mut cwd, progress.cwd.clone()); + set_first(&mut git_branch, progress.git_branch.clone()); + events.push(super::parse::progress_entry_to_event(&progress, &events)); + } + RawEntry::QueueOperation(queue_op) => { + set_first(&mut session_id, queue_op.session_id.clone()); + events.push(super::parse::queue_operation_entry_to_event( + &queue_op, &events, + )); + } + RawEntry::Summary(summary) => { + set_first(&mut session_id, summary.session_id.clone()); + events.push(super::parse::summary_entry_to_event(&summary, &events)); + } + RawEntry::User(conv) => { + set_first(&mut session_id, conv.session_id.clone()); + set_first(&mut tool_version, conv.version.clone()); + set_first(&mut cwd, conv.cwd.clone()); + set_first(&mut git_branch, conv.git_branch.clone()); + if let Ok(ts) = parse_timestamp(&conv.timestamp) { + process_user_entry(&conv, ts, &mut events, &tool_use_info); + } + } + RawEntry::Assistant(conv) => { + set_first(&mut session_id, conv.session_id.clone()); + set_first(&mut tool_version, conv.version.clone()); + set_first(&mut model_name, conv.message.model.clone()); + set_first(&mut git_branch, conv.git_branch.clone()); + if let Ok(ts) = parse_timestamp(&conv.timestamp) { + process_assistant_entry(&conv, ts, &mut events, &mut tool_use_info); + } + } + } + } + + let session_id = session_id.unwrap_or_else(|| { + path.file_stem() + .and_then(|stem| stem.to_str()) + .unwrap_or("unknown") + .to_string() + }); + + let agent = Agent { + provider: "anthropic".to_string(), + model: model_name.unwrap_or_else(|| "unknown".to_string()), + tool: "claude-code".to_string(), + tool_version, + }; + + let (created_at, updated_at) = + if let (Some(first), Some(last)) = (events.first(), events.last()) { + (first.timestamp, last.timestamp) + } else { + let now = Utc::now(); + (now, now) + }; + + let parent_session_id = meta + .as_ref() + .and_then(|value| value.parent_session_id.clone()) + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()); + let mut attributes = HashMap::from([( + "source_path".to_string(), + serde_json::Value::String(path.to_string_lossy().to_string()), + )]); + attributes.insert( + "session_role".to_string(), + serde_json::Value::String(if parent_session_id.is_some() { + "auxiliary".to_string() + } else { + "primary".to_string() + }), + ); + if let Some(parent_session_id) = parent_session_id.as_ref() { + attributes.insert( + "parent_session_id".to_string(), + serde_json::Value::String(parent_session_id.clone()), + ); + } + if let Some(branch) = git_branch.as_ref() { + attributes.insert( + "git_branch".to_string(), + serde_json::Value::String(branch.clone()), + ); + } + + let context = SessionContext { + title: None, + description: None, + tags: vec!["claude-code".to_string()], + created_at, + updated_at, + related_session_ids: parent_session_id.clone().into_iter().collect(), + attributes, + }; + + let mut session = Session::new(session_id, agent); + session.context = context; + session.events = events; + session.recompute_stats(); + Ok(session) +} diff --git a/crates/parsers/src/claude_code/transform.rs b/crates/parsers/src/claude_code/transform.rs index a435ba39..184203a3 100644 --- a/crates/parsers/src/claude_code/transform.rs +++ b/crates/parsers/src/claude_code/transform.rs @@ -4,8 +4,8 @@ use opensession_core::trace::{Content, ContentBlock, EventType}; // ── Content transformation helpers ────────────────────────────────────────── /// Extract raw text from ToolResult content -pub(super) fn tool_result_content_to_string(content: &super::parse::ToolResultContent) -> String { - use super::parse::{ToolResultBlock, ToolResultContent}; +pub(super) fn tool_result_content_to_string(content: &super::raw::ToolResultContent) -> String { + use super::raw::{ToolResultBlock, ToolResultContent}; match content { ToolResultContent::Text(text) => text.clone(), ToolResultContent::Blocks(blocks) => { @@ -23,7 +23,7 @@ pub(super) fn tool_result_content_to_string(content: &super::parse::ToolResultCo /// Build structured Content for a ToolResult event (delegates to common helper). pub(super) fn build_cc_tool_result_content( - raw_content: &super::parse::ToolResultContent, + raw_content: &super::raw::ToolResultContent, tool_info: &ToolUseInfo, ) -> Content { let raw_text = tool_result_content_to_string(raw_content); @@ -231,7 +231,7 @@ pub(super) fn tool_use_content(name: &str, input: &serde_json::Value) -> Content #[cfg(test)] mod tests { - use super::super::parse::ToolResultContent; + use super::super::raw::ToolResultContent; use super::*; use crate::common::ToolUseInfo; @@ -304,7 +304,7 @@ mod tests { #[test] fn test_tool_result_content_blocks() { - use super::super::parse::ToolResultBlock; + use super::super::raw::ToolResultBlock; let content = ToolResultContent::Blocks(vec![ToolResultBlock::Text { text: "line1".to_string(), }]); diff --git a/crates/parsers/src/cursor/mod.rs b/crates/parsers/src/cursor/mod.rs index e555cace..9992041d 100644 --- a/crates/parsers/src/cursor/mod.rs +++ b/crates/parsers/src/cursor/mod.rs @@ -1,5 +1,7 @@ mod parse; +mod time; mod transform; +mod types; use crate::SessionParser; use anyhow::Result; diff --git a/crates/parsers/src/cursor/parse.rs b/crates/parsers/src/cursor/parse.rs index 5f7deba3..5940b249 100644 --- a/crates/parsers/src/cursor/parse.rs +++ b/crates/parsers/src/cursor/parse.rs @@ -1,162 +1,19 @@ +use super::time::parse_timestamp; use super::transform::{ classify_cursor_tool, extract_model_from_signature, infer_provider, parse_tool_result, resolve_tool_name, tool_call_content, }; +use super::types::{RawBubble, RawComposerData, RawComposerIndex, RawComposerMeta}; +#[cfg(test)] +use super::types::{RawBubbleHeader, RawThinking, RawToolFormerData}; use crate::common::{attach_semantic_attrs, attach_source_attrs, infer_tool_kind}; use anyhow::{Context, Result}; use chrono::{DateTime, Utc}; use opensession_core::trace::{Agent, Content, Event, EventType, Session, SessionContext}; use rusqlite::Connection; -use serde::Deserialize; use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; -// ── Raw deserialization types for Cursor's composerData JSON ──────────────── - -/// Serde helper: deserialize a value that may be a JSON string or number into an Option. -/// Cursor stores timestamps as integers in some versions and strings in others. -mod string_or_number { - use serde::{self, Deserialize, Deserializer}; - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - #[serde(untagged)] - enum StringOrNumber { - String(String), - Integer(i64), - Float(f64), - } - - match Option::::deserialize(deserializer)? { - Some(StringOrNumber::String(s)) => Ok(Some(s)), - Some(StringOrNumber::Integer(n)) => Ok(Some(n.to_string())), - Some(StringOrNumber::Float(n)) => Ok(Some(n.to_string())), - None => Ok(None), - } - } -} - -/// Top-level composerData JSON stored in cursorDiskKV -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RawComposerData { - composer_id: String, - #[serde(default)] - name: Option, - #[serde(default, deserialize_with = "string_or_number::deserialize")] - created_at: Option, - #[serde(default, deserialize_with = "string_or_number::deserialize")] - last_updated_at: Option, - #[serde(default)] - conversation: Vec, - #[serde(default)] - is_agentic: Option, - /// Cursor v3: version field for format detection - #[serde(default, rename = "_v")] - version: Option, - /// Cursor v3: bubble headers (conversation stored separately in bubbleId:* keys) - #[serde(default)] - full_conversation_headers_only: Option>, -} - -/// Cursor modern workspace index: `composer.composerData` (metadata-only list). -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RawComposerIndex { - #[serde(default)] - all_composers: Vec, -} - -/// Metadata-only composer entry referenced by modern Cursor workspace DBs. -#[derive(Debug, Clone, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RawComposerMeta { - composer_id: String, - #[serde(default)] - name: Option, - #[serde(default, deserialize_with = "string_or_number::deserialize")] - created_at: Option, - #[serde(default, deserialize_with = "string_or_number::deserialize")] - last_updated_at: Option, -} - -/// Cursor v3: header reference to a separately-stored bubble -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RawBubbleHeader { - bubble_id: String, - #[serde(rename = "type")] - #[allow(dead_code)] - bubble_type: u8, -} - -/// A single "bubble" in the conversation array -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -#[allow(dead_code)] -struct RawBubble { - /// 1 = user, 2 = assistant - #[serde(rename = "type")] - bubble_type: u8, - #[serde(default)] - bubble_id: Option, - #[serde(default)] - text: Option, - #[serde(default)] - thinking: Option, - #[serde(default)] - tool_former_data: Option, - #[serde(default)] - timing_info: Option, - #[serde(default)] - model_type: Option, - #[serde(default)] - checkpoint: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RawThinking { - #[serde(default)] - text: Option, - #[serde(default)] - signature: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RawToolFormerData { - /// Numeric tool ID (e.g., 7 = edit_file) - #[serde(default)] - tool: Option, - #[serde(default)] - name: Option, - #[serde(default)] - status: Option, - #[serde(default)] - raw_args: Option, - #[serde(default)] - result: Option, - #[serde(default)] - user_decision: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct RawTimingInfo { - #[serde(default)] - start_time: Option, - #[serde(default)] - end_time: Option, - #[serde(default)] - client_start_time: Option, - #[serde(default)] - client_end_time: Option, -} - // ── Core parsing logic ───────────────────────────────────────────────────── pub(super) fn parse_cursor_vscdb(path: &Path) -> Result { @@ -831,25 +688,6 @@ fn convert_bubbles_to_events( // ── Timestamp parsing ────────────────────────────────────────────────────── -fn parse_timestamp(ts: &str) -> Result> { - // ISO 8601 format: "2025-10-03T12:34:56.789Z" - DateTime::parse_from_rfc3339(ts) - .map(|dt| dt.with_timezone(&Utc)) - .or_else(|_| { - // Try without timezone - chrono::NaiveDateTime::parse_from_str(ts, "%Y-%m-%dT%H:%M:%S%.f") - .map(|ndt| ndt.and_utc()) - }) - .or_else(|_| { - // Try as epoch milliseconds (sometimes Cursor uses numeric timestamps as strings) - ts.parse::() - .ok() - .and_then(|ms| DateTime::from_timestamp_millis(ms as i64)) - .ok_or_else(|| anyhow::anyhow!("Not a timestamp")) - }) - .with_context(|| format!("Failed to parse Cursor timestamp: {}", ts)) -} - #[cfg(test)] mod tests { use super::*; diff --git a/crates/parsers/src/cursor/time.rs b/crates/parsers/src/cursor/time.rs new file mode 100644 index 00000000..48e02a97 --- /dev/null +++ b/crates/parsers/src/cursor/time.rs @@ -0,0 +1,18 @@ +use anyhow::{Context, Result}; +use chrono::{DateTime, Utc}; + +pub(super) fn parse_timestamp(ts: &str) -> Result> { + DateTime::parse_from_rfc3339(ts) + .map(|dt| dt.with_timezone(&Utc)) + .or_else(|_| { + chrono::NaiveDateTime::parse_from_str(ts, "%Y-%m-%dT%H:%M:%S%.f") + .map(|ndt| ndt.and_utc()) + }) + .or_else(|_| { + ts.parse::() + .ok() + .and_then(|ms| DateTime::from_timestamp_millis(ms as i64)) + .ok_or_else(|| anyhow::anyhow!("Not a timestamp")) + }) + .with_context(|| format!("Failed to parse Cursor timestamp: {ts}")) +} diff --git a/crates/parsers/src/cursor/types.rs b/crates/parsers/src/cursor/types.rs new file mode 100644 index 00000000..3b9f6c8a --- /dev/null +++ b/crates/parsers/src/cursor/types.rs @@ -0,0 +1,134 @@ +use serde::Deserialize; + +pub(super) mod string_or_number { + use serde::{self, Deserialize, Deserializer}; + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + #[serde(untagged)] + enum StringOrNumber { + String(String), + Integer(i64), + Float(f64), + } + + match Option::::deserialize(deserializer)? { + Some(StringOrNumber::String(value)) => Ok(Some(value)), + Some(StringOrNumber::Integer(value)) => Ok(Some(value.to_string())), + Some(StringOrNumber::Float(value)) => Ok(Some(value.to_string())), + None => Ok(None), + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) struct RawComposerData { + pub(super) composer_id: String, + #[serde(default)] + pub(super) name: Option, + #[serde(default, deserialize_with = "string_or_number::deserialize")] + pub(super) created_at: Option, + #[serde(default, deserialize_with = "string_or_number::deserialize")] + pub(super) last_updated_at: Option, + #[serde(default)] + pub(super) conversation: Vec, + #[serde(default)] + pub(super) is_agentic: Option, + #[serde(default, rename = "_v")] + pub(super) version: Option, + #[serde(default)] + pub(super) full_conversation_headers_only: Option>, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) struct RawComposerIndex { + #[serde(default)] + pub(super) all_composers: Vec, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) struct RawComposerMeta { + pub(super) composer_id: String, + #[serde(default)] + pub(super) name: Option, + #[serde(default, deserialize_with = "string_or_number::deserialize")] + pub(super) created_at: Option, + #[serde(default, deserialize_with = "string_or_number::deserialize")] + pub(super) last_updated_at: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) struct RawBubbleHeader { + pub(super) bubble_id: String, + #[serde(rename = "type")] + #[allow(dead_code)] + pub(super) bubble_type: u8, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +#[allow(dead_code)] +pub(super) struct RawBubble { + #[serde(rename = "type")] + pub(super) bubble_type: u8, + #[serde(default)] + pub(super) bubble_id: Option, + #[serde(default)] + pub(super) text: Option, + #[serde(default)] + pub(super) thinking: Option, + #[serde(default)] + pub(super) tool_former_data: Option, + #[serde(default)] + pub(super) timing_info: Option, + #[serde(default)] + pub(super) model_type: Option, + #[serde(default)] + pub(super) checkpoint: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) struct RawThinking { + #[serde(default)] + pub(super) text: Option, + #[serde(default)] + pub(super) signature: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) struct RawToolFormerData { + #[serde(default)] + pub(super) tool: Option, + #[serde(default)] + pub(super) name: Option, + #[serde(default)] + pub(super) status: Option, + #[serde(default)] + pub(super) raw_args: Option, + #[serde(default)] + pub(super) result: Option, + #[serde(default)] + pub(super) user_decision: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(super) struct RawTimingInfo { + #[serde(default)] + pub(super) start_time: Option, + #[serde(default)] + pub(super) end_time: Option, + #[serde(default)] + pub(super) client_start_time: Option, + #[serde(default)] + pub(super) client_end_time: Option, +} diff --git a/crates/parsers/src/gemini.rs b/crates/parsers/src/gemini.rs index d4f082aa..7867096c 100644 --- a/crates/parsers/src/gemini.rs +++ b/crates/parsers/src/gemini.rs @@ -892,359 +892,4 @@ fn parse_timestamp(ts: &str) -> Result> { } #[cfg(test)] -mod tests { - use super::*; - use std::fs::write; - - #[test] - fn test_can_parse() { - let parser = GeminiParser; - assert!(parser.can_parse(Path::new( - "/Users/test/.gemini/tmp/abc123/chats/session-2026-02-09T15-11-8205f040.json" - ))); - assert!(parser.can_parse(Path::new( - "/Users/test/.gemini/tmp/abc123/chats/session-2026-02-09T15-11-8205f040.jsonl" - ))); - assert!(!parser.can_parse(Path::new("/tmp/random.json"))); - assert!(!parser.can_parse(Path::new("/tmp/random.jsonl"))); - assert!(!parser.can_parse(Path::new("/Users/test/.gemini/settings.json"))); - } - - #[test] - fn test_parse_session() { - let json = r#"{ - "sessionId": "test-123", - "projectHash": "abc", - "startTime": "2026-02-09T15:11:31.319Z", - "lastUpdated": "2026-02-09T15:14:17.522Z", - "messages": [ - { - "id": "m1", - "timestamp": "2026-02-09T15:11:31.319Z", - "type": "user", - "content": "hello gemini" - }, - { - "id": "m2", - "timestamp": "2026-02-09T15:12:00.000Z", - "type": "gemini", - "content": "Hello! How can I help?", - "thoughts": [ - {"subject": "Greeting", "description": "User says hello"} - ], - "tokens": {"input": 100, "output": 50, "total": 150}, - "model": "gemini-2.5-pro" - } - ] - }"#; - - let session: GeminiSession = serde_json::from_str(json).unwrap(); - assert_eq!(session.session_id, "test-123"); - assert_eq!(session.messages.len(), 2); - assert_eq!(session.messages[0].msg_type, "user"); - assert_eq!(session.messages[1].msg_type, "gemini"); - assert_eq!(session.messages[1].model.as_deref(), Some("gemini-2.5-pro")); - assert_eq!(session.messages[1].thoughts.as_ref().unwrap().len(), 1); - } - - #[test] - fn test_parse_error_message() { - let json = r#"{ - "sessionId": "test-err", - "projectHash": "abc", - "startTime": "2026-02-09T15:10:00.000Z", - "lastUpdated": "2026-02-09T15:10:30.000Z", - "messages": [ - { - "id": "m1", - "timestamp": "2026-02-09T15:10:00.000Z", - "type": "user", - "content": "test" - }, - { - "id": "m2", - "timestamp": "2026-02-09T15:10:30.000Z", - "type": "error", - "content": "[API Error: No capacity]" - } - ] - }"#; - - let session: GeminiSession = serde_json::from_str(json).unwrap(); - assert_eq!(session.messages[1].msg_type, "error"); - } - - #[test] - fn test_parse_jsonl_records() { - // Test that GeminiRecord types deserialize correctly - let metadata_line = r#"{"type":"session_metadata","sessionId":"sess-1","startTime":"2026-02-09T15:00:00.000Z"}"#; - let record: GeminiRecord = serde_json::from_str(metadata_line).unwrap(); - match record { - GeminiRecord::SessionMetadata { - session_id, - start_time, - } => { - assert_eq!(session_id, "sess-1"); - assert_eq!(start_time.as_deref(), Some("2026-02-09T15:00:00.000Z")); - } - _ => panic!("Expected SessionMetadata"), - } - - let user_line = r#"{"type":"user","id":"u1","timestamp":"2026-02-09T15:01:00.000Z","content":[{"type":"text","text":"hello gemini"}]}"#; - let record: GeminiRecord = serde_json::from_str(user_line).unwrap(); - match record { - GeminiRecord::User { id, content, .. } => { - assert_eq!(id.as_deref(), Some("u1")); - assert_eq!(content.len(), 1); - match &content[0] { - GeminiContentBlock::Text { text } => assert_eq!(text, "hello gemini"), - _ => panic!("Expected Text block"), - } - } - _ => panic!("Expected User"), - } - - let gemini_line = r#"{"type":"gemini","id":"g1","timestamp":"2026-02-09T15:02:00.000Z","content":[{"type":"thinking","text":"analyzing..."},{"type":"text","text":"Here is my answer"},{"type":"functionCall","name":"readFile","args":{"path":"/tmp/x.rs"}}],"model":"gemini-2.5-pro"}"#; - let record: GeminiRecord = serde_json::from_str(gemini_line).unwrap(); - match record { - GeminiRecord::Gemini { - id, content, model, .. - } => { - assert_eq!(id.as_deref(), Some("g1")); - assert_eq!(model.as_deref(), Some("gemini-2.5-pro")); - assert_eq!(content.len(), 3); - assert!(matches!(&content[0], GeminiContentBlock::Thinking { .. })); - assert!(matches!(&content[1], GeminiContentBlock::Text { .. })); - assert!(matches!( - &content[2], - GeminiContentBlock::FunctionCall { .. } - )); - } - _ => panic!("Expected Gemini"), - } - - let update_line = - r#"{"type":"message_update","id":"g1","tokens":{"input":100,"output":50,"total":150}}"#; - let record: GeminiRecord = serde_json::from_str(update_line).unwrap(); - match record { - GeminiRecord::MessageUpdate { id, tokens } => { - assert_eq!(id.as_deref(), Some("g1")); - let tokens = tokens.unwrap(); - assert_eq!(tokens.input, Some(100)); - assert_eq!(tokens.output, Some(50)); - } - _ => panic!("Expected MessageUpdate"), - } - } - - #[test] - fn test_parse_jsonl_function_response() { - let user_with_response = r#"{"type":"user","id":"u2","timestamp":"2026-02-09T15:03:00.000Z","content":[{"type":"functionResponse","name":"readFile","response":{"content":"fn main() {}"}}]}"#; - let record: GeminiRecord = serde_json::from_str(user_with_response).unwrap(); - match record { - GeminiRecord::User { content, .. } => { - assert_eq!(content.len(), 1); - match &content[0] { - GeminiContentBlock::FunctionResponse { name, response } => { - assert_eq!(name, "readFile"); - assert!(response.is_some()); - } - _ => panic!("Expected FunctionResponse block"), - } - } - _ => panic!("Expected User"), - } - } - - #[test] - fn test_parse_jsonl_unknown_content_block() { - // Unknown content block types should be deserialized without error - let gemini_line = r#"{"type":"gemini","id":"g2","content":[{"type":"unknownFutureType","data":"something"}]}"#; - let record: GeminiRecord = serde_json::from_str(gemini_line).unwrap(); - match record { - GeminiRecord::Gemini { content, .. } => { - assert_eq!(content.len(), 1); - assert!(matches!(content[0], GeminiContentBlock::Unknown)); - } - _ => panic!("Expected Gemini"), - } - } - - #[test] - fn test_info_message_skipped() { - let json = r#"{ - "sessionId": "test-info", - "projectHash": "abc", - "startTime": "2026-02-09T15:10:00.000Z", - "lastUpdated": "2026-02-09T15:10:00.000Z", - "messages": [ - { - "id": "m1", - "timestamp": "2026-02-09T15:10:00.000Z", - "type": "info", - "content": "Authentication succeeded" - } - ] - }"#; - - // info messages should deserialize but produce no events - let session: GeminiSession = serde_json::from_str(json).unwrap(); - assert_eq!(session.messages.len(), 1); - assert_eq!(session.messages[0].msg_type, "info"); - } - - #[test] - fn test_parse_legacy_content_parts_variant() { - let content = GeminiMessageContent::Parts(vec![serde_json::json!({ - "text": "hello from parts" - })]); - let parsed = parse_legacy_content(Some(&content)); - assert_eq!(parsed.schema_variant, "parts"); - assert_eq!(parsed.texts, vec!["hello from parts".to_string()]); - } - - #[test] - fn test_parse_legacy_content_single_part_variant() { - let content = GeminiMessageContent::Part(serde_json::json!({ - "functionCall": { - "name": "read_file", - "args": {"path": "/tmp/a.txt"} - } - })); - let parsed = parse_legacy_content(Some(&content)); - assert_eq!(parsed.schema_variant, "part"); - assert!(parsed.texts.is_empty()); - } - - #[test] - fn test_parse_json_tool_calls_field() { - let dir = std::env::temp_dir().join(format!( - "opensession-gemini-toolcalls-{}", - std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .expect("clock") - .as_nanos() - )); - std::fs::create_dir_all(&dir).unwrap(); - let path = dir.join("session-2026-02-16T09-10-toolcalls.json"); - let json = r#"{ - "sessionId": "toolcalls-123", - "projectHash": "abc", - "startTime": "2026-02-16T09:10:00.000Z", - "lastUpdated": "2026-02-16T09:10:10.000Z", - "messages": [ - { - "id": "u1", - "timestamp": "2026-02-16T09:10:01.000Z", - "type": "user", - "content": [{"text":"version"}] - }, - { - "id": "g1", - "timestamp": "2026-02-16T09:10:03.000Z", - "type": "gemini", - "content": "running tool", - "model": "gemini-2.5-flash", - "toolCalls": [ - { - "id": "call-1", - "name": "run_shell_command", - "args": {"command":"git status"}, - "result": [ - { - "functionResponse": { - "id": "call-1", - "name": "run_shell_command", - "response": {"output":"clean"} - } - } - ], - "status": "success" - } - ], - "tokens": {"input": 11, "output": 7, "total": 18} - } - ] - }"#; - write(&path, json).unwrap(); - - let parsed = parse_json(&path).expect("parse gemini json with toolCalls"); - assert!(parsed.events.iter().any(|event| { - matches!( - &event.event_type, - EventType::ToolCall { name } if name == "run_shell_command" - ) - })); - assert!(parsed.events.iter().any(|event| { - matches!( - &event.event_type, - EventType::ToolResult { name, call_id, is_error } - if name == "run_shell_command" - && call_id.as_deref() == Some("call-1") - && !is_error - ) - })); - assert!(parsed.events.iter().any(|event| { - event - .attributes - .get("source.schema_version") - .and_then(|v| v.as_str()) - == Some("gemini-json-v3-toolcalls") - })); - } - - #[test] - fn test_parse_json_parts_content_file() { - let dir = std::env::temp_dir().join(format!( - "opensession-gemini-parts-{}", - std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .expect("clock") - .as_nanos() - )); - std::fs::create_dir_all(&dir).unwrap(); - let path = dir.join("session-2026-02-14T09-36-test.json"); - let json = r#"{ - "sessionId": "parts-123", - "startTime": "2026-02-14T09:36:00.000Z", - "lastUpdated": "2026-02-14T09:36:05.000Z", - "messages": [ - { - "id": "u1", - "timestamp": "2026-02-14T09:36:01.000Z", - "type": "user", - "content": [{"text":"inspect this repo"}] - }, - { - "id": "g1", - "timestamp": "2026-02-14T09:36:03.000Z", - "type": "gemini", - "content": [{"text":"done"}], - "model": "gemini-2.5-pro" - } - ] - }"#; - write(&path, json).unwrap(); - - let parsed = parse_json(&path).expect("parse gemini json"); - assert_eq!(parsed.session_id, "parts-123"); - assert!( - parsed - .events - .iter() - .any(|e| matches!(e.event_type, EventType::UserMessage)) - ); - assert!( - parsed - .events - .iter() - .any(|e| matches!(e.event_type, EventType::AgentMessage)) - ); - assert!(parsed.events.iter().all(|e| { - e.attributes - .get("source.schema_version") - .and_then(|v| v.as_str()) - .is_some() - })); - } -} +mod tests; diff --git a/crates/parsers/src/gemini/tests.rs b/crates/parsers/src/gemini/tests.rs new file mode 100644 index 00000000..01587c9a --- /dev/null +++ b/crates/parsers/src/gemini/tests.rs @@ -0,0 +1,356 @@ +use super::*; +use std::fs::write; + +#[test] +fn test_can_parse() { + let parser = GeminiParser; + assert!(parser.can_parse(Path::new( + "/Users/test/.gemini/tmp/abc123/chats/session-2026-02-09T15-11-8205f040.json" + ))); + assert!(parser.can_parse(Path::new( + "/Users/test/.gemini/tmp/abc123/chats/session-2026-02-09T15-11-8205f040.jsonl" + ))); + assert!(!parser.can_parse(Path::new("/tmp/random.json"))); + assert!(!parser.can_parse(Path::new("/tmp/random.jsonl"))); + assert!(!parser.can_parse(Path::new("/Users/test/.gemini/settings.json"))); +} + +#[test] +fn test_parse_session() { + let json = r#"{ + "sessionId": "test-123", + "projectHash": "abc", + "startTime": "2026-02-09T15:11:31.319Z", + "lastUpdated": "2026-02-09T15:14:17.522Z", + "messages": [ + { + "id": "m1", + "timestamp": "2026-02-09T15:11:31.319Z", + "type": "user", + "content": "hello gemini" + }, + { + "id": "m2", + "timestamp": "2026-02-09T15:12:00.000Z", + "type": "gemini", + "content": "Hello! How can I help?", + "thoughts": [ + {"subject": "Greeting", "description": "User says hello"} + ], + "tokens": {"input": 100, "output": 50, "total": 150}, + "model": "gemini-2.5-pro" + } + ] + }"#; + + let session: GeminiSession = serde_json::from_str(json).unwrap(); + assert_eq!(session.session_id, "test-123"); + assert_eq!(session.messages.len(), 2); + assert_eq!(session.messages[0].msg_type, "user"); + assert_eq!(session.messages[1].msg_type, "gemini"); + assert_eq!(session.messages[1].model.as_deref(), Some("gemini-2.5-pro")); + assert_eq!( + session.messages[1] + .thoughts + .as_ref() + .expect("thoughts") + .len(), + 1 + ); +} + +#[test] +fn test_parse_error_message() { + let json = r#"{ + "sessionId": "test-err", + "projectHash": "abc", + "startTime": "2026-02-09T15:10:00.000Z", + "lastUpdated": "2026-02-09T15:10:30.000Z", + "messages": [ + { + "id": "m1", + "timestamp": "2026-02-09T15:10:00.000Z", + "type": "user", + "content": "test" + }, + { + "id": "m2", + "timestamp": "2026-02-09T15:10:30.000Z", + "type": "error", + "content": "[API Error: No capacity]" + } + ] + }"#; + + let session: GeminiSession = serde_json::from_str(json).unwrap(); + assert_eq!(session.messages[1].msg_type, "error"); +} + +#[test] +fn test_parse_jsonl_records() { + let metadata_line = r#"{"type":"session_metadata","sessionId":"sess-1","startTime":"2026-02-09T15:00:00.000Z"}"#; + let record: GeminiRecord = serde_json::from_str(metadata_line).unwrap(); + match record { + GeminiRecord::SessionMetadata { + session_id, + start_time, + } => { + assert_eq!(session_id, "sess-1"); + assert_eq!(start_time.as_deref(), Some("2026-02-09T15:00:00.000Z")); + } + _ => panic!("Expected SessionMetadata"), + } + + let user_line = r#"{"type":"user","id":"u1","timestamp":"2026-02-09T15:01:00.000Z","content":[{"type":"text","text":"hello gemini"}]}"#; + let record: GeminiRecord = serde_json::from_str(user_line).unwrap(); + match record { + GeminiRecord::User { id, content, .. } => { + assert_eq!(id.as_deref(), Some("u1")); + assert_eq!(content.len(), 1); + match &content[0] { + GeminiContentBlock::Text { text } => assert_eq!(text, "hello gemini"), + _ => panic!("Expected Text block"), + } + } + _ => panic!("Expected User"), + } + + let gemini_line = r#"{"type":"gemini","id":"g1","timestamp":"2026-02-09T15:02:00.000Z","content":[{"type":"thinking","text":"analyzing..."},{"type":"text","text":"Here is my answer"},{"type":"functionCall","name":"readFile","args":{"path":"/tmp/x.rs"}}],"model":"gemini-2.5-pro"}"#; + let record: GeminiRecord = serde_json::from_str(gemini_line).unwrap(); + match record { + GeminiRecord::Gemini { + id, content, model, .. + } => { + assert_eq!(id.as_deref(), Some("g1")); + assert_eq!(model.as_deref(), Some("gemini-2.5-pro")); + assert_eq!(content.len(), 3); + assert!(matches!(&content[0], GeminiContentBlock::Thinking { .. })); + assert!(matches!(&content[1], GeminiContentBlock::Text { .. })); + assert!(matches!( + &content[2], + GeminiContentBlock::FunctionCall { .. } + )); + } + _ => panic!("Expected Gemini"), + } + + let update_line = + r#"{"type":"message_update","id":"g1","tokens":{"input":100,"output":50,"total":150}}"#; + let record: GeminiRecord = serde_json::from_str(update_line).unwrap(); + match record { + GeminiRecord::MessageUpdate { id, tokens } => { + assert_eq!(id.as_deref(), Some("g1")); + let tokens = tokens.expect("tokens"); + assert_eq!(tokens.input, Some(100)); + assert_eq!(tokens.output, Some(50)); + } + _ => panic!("Expected MessageUpdate"), + } +} + +#[test] +fn test_parse_jsonl_function_response() { + let user_with_response = r#"{"type":"user","id":"u2","timestamp":"2026-02-09T15:03:00.000Z","content":[{"type":"functionResponse","name":"readFile","response":{"content":"fn main() {}"}}]}"#; + let record: GeminiRecord = serde_json::from_str(user_with_response).unwrap(); + match record { + GeminiRecord::User { content, .. } => { + assert_eq!(content.len(), 1); + match &content[0] { + GeminiContentBlock::FunctionResponse { name, response } => { + assert_eq!(name, "readFile"); + assert!(response.is_some()); + } + _ => panic!("Expected FunctionResponse block"), + } + } + _ => panic!("Expected User"), + } +} + +#[test] +fn test_parse_jsonl_unknown_content_block() { + let gemini_line = r#"{"type":"gemini","id":"g2","content":[{"type":"unknownFutureType","data":"something"}]}"#; + let record: GeminiRecord = serde_json::from_str(gemini_line).unwrap(); + match record { + GeminiRecord::Gemini { content, .. } => { + assert_eq!(content.len(), 1); + assert!(matches!(content[0], GeminiContentBlock::Unknown)); + } + _ => panic!("Expected Gemini"), + } +} + +#[test] +fn test_info_message_skipped() { + let json = r#"{ + "sessionId": "test-info", + "projectHash": "abc", + "startTime": "2026-02-09T15:10:00.000Z", + "lastUpdated": "2026-02-09T15:10:00.000Z", + "messages": [ + { + "id": "m1", + "timestamp": "2026-02-09T15:10:00.000Z", + "type": "info", + "content": "Authentication succeeded" + } + ] + }"#; + + let session: GeminiSession = serde_json::from_str(json).unwrap(); + assert_eq!(session.messages.len(), 1); + assert_eq!(session.messages[0].msg_type, "info"); +} + +#[test] +fn test_parse_legacy_content_parts_variant() { + let content = GeminiMessageContent::Parts(vec![serde_json::json!({ + "text": "hello from parts" + })]); + let parsed = parse_legacy_content(Some(&content)); + assert_eq!(parsed.schema_variant, "parts"); + assert_eq!(parsed.texts, vec!["hello from parts".to_string()]); +} + +#[test] +fn test_parse_legacy_content_single_part_variant() { + let content = GeminiMessageContent::Part(serde_json::json!({ + "functionCall": { + "name": "read_file", + "args": {"path": "/tmp/a.txt"} + } + })); + let parsed = parse_legacy_content(Some(&content)); + assert_eq!(parsed.schema_variant, "part"); + assert!(parsed.texts.is_empty()); +} + +#[test] +fn test_parse_json_tool_calls_field() { + let dir = std::env::temp_dir().join(format!( + "opensession-gemini-toolcalls-{}", + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .expect("clock") + .as_nanos() + )); + std::fs::create_dir_all(&dir).unwrap(); + let path = dir.join("session-2026-02-16T09-10-toolcalls.json"); + let json = r#"{ + "sessionId": "toolcalls-123", + "projectHash": "abc", + "startTime": "2026-02-16T09:10:00.000Z", + "lastUpdated": "2026-02-16T09:10:10.000Z", + "messages": [ + { + "id": "u1", + "timestamp": "2026-02-16T09:10:01.000Z", + "type": "user", + "content": [{"text":"version"}] + }, + { + "id": "g1", + "timestamp": "2026-02-16T09:10:03.000Z", + "type": "gemini", + "content": "running tool", + "model": "gemini-2.5-flash", + "toolCalls": [ + { + "id": "call-1", + "name": "run_shell_command", + "args": {"command":"git status"}, + "result": [ + { + "functionResponse": { + "id": "call-1", + "name": "run_shell_command", + "response": {"output":"clean"} + } + } + ], + "status": "success" + } + ], + "tokens": {"input": 11, "output": 7, "total": 18} + } + ] + }"#; + write(&path, json).unwrap(); + + let parsed = parse_json(&path).expect("parse gemini json with toolCalls"); + assert!(parsed.events.iter().any(|event| { + matches!(&event.event_type, EventType::ToolCall { name } if name == "run_shell_command") + })); + assert!(parsed.events.iter().any(|event| { + matches!( + &event.event_type, + EventType::ToolResult { name, call_id, is_error } + if name == "run_shell_command" + && call_id.as_deref() == Some("call-1") + && !is_error + ) + })); + assert!(parsed.events.iter().any(|event| { + event + .attributes + .get("source.schema_version") + .and_then(|value| value.as_str()) + == Some("gemini-json-v3-toolcalls") + })); +} + +#[test] +fn test_parse_json_parts_content_file() { + let dir = std::env::temp_dir().join(format!( + "opensession-gemini-parts-{}", + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .expect("clock") + .as_nanos() + )); + std::fs::create_dir_all(&dir).unwrap(); + let path = dir.join("session-2026-02-14T09-36-test.json"); + let json = r#"{ + "sessionId": "parts-123", + "startTime": "2026-02-14T09:36:00.000Z", + "lastUpdated": "2026-02-14T09:36:05.000Z", + "messages": [ + { + "id": "u1", + "timestamp": "2026-02-14T09:36:01.000Z", + "type": "user", + "content": [{"text":"inspect this repo"}] + }, + { + "id": "g1", + "timestamp": "2026-02-14T09:36:03.000Z", + "type": "gemini", + "content": [{"text":"done"}], + "model": "gemini-2.5-pro" + } + ] + }"#; + write(&path, json).unwrap(); + + let parsed = parse_json(&path).expect("parse gemini json"); + assert_eq!(parsed.session_id, "parts-123"); + assert!( + parsed + .events + .iter() + .any(|event| matches!(event.event_type, EventType::UserMessage)) + ); + assert!( + parsed + .events + .iter() + .any(|event| matches!(event.event_type, EventType::AgentMessage)) + ); + assert!(parsed.events.iter().all(|event| { + event + .attributes + .get("source.schema_version") + .and_then(|value| value.as_str()) + .is_some() + })); +} diff --git a/crates/parsers/src/lib.rs b/crates/parsers/src/lib.rs index 4e9f44a5..d3d46277 100644 --- a/crates/parsers/src/lib.rs +++ b/crates/parsers/src/lib.rs @@ -1,5 +1,4 @@ pub mod claude_code; -pub mod discover; pub mod incremental; mod amp; diff --git a/crates/parsers/src/opencode.rs b/crates/parsers/src/opencode.rs index 9280b11c..8c2dabc1 100644 --- a/crates/parsers/src/opencode.rs +++ b/crates/parsers/src/opencode.rs @@ -987,487 +987,4 @@ fn json_find_path(value: &serde_json::Value) -> Option { } #[cfg(test)] -mod tests { - use super::*; - use std::fs::{create_dir_all, write}; - use std::time::{SystemTime, UNIX_EPOCH}; - - #[test] - fn test_millis_to_datetime() { - let dt = millis_to_datetime(1753359830903); - assert!(dt.year() >= 2025); - } - - #[test] - fn test_classify_bash() { - let input = Some(serde_json::json!({"command": "ls -la"})); - let et = classify_opencode_tool("bash", &input); - match et { - EventType::ShellCommand { command, .. } => assert_eq!(command, "ls -la"), - _ => panic!("Expected ShellCommand"), - } - } - - #[test] - fn test_classify_read_with_camel_case_path() { - let input = Some(serde_json::json!({"filePath": "/tmp/demo.rs"})); - let et = classify_opencode_tool("read", &input); - match et { - EventType::FileRead { path } => assert_eq!(path, "/tmp/demo.rs"), - _ => panic!("Expected FileRead"), - } - } - - #[test] - fn test_tool_status_terminal_variants() { - assert!(is_terminal_tool_status("completed")); - assert!(is_terminal_tool_status("FAILED")); - assert!(is_terminal_tool_status("canceled")); - assert!(!is_terminal_tool_status("running")); - } - - #[test] - fn test_normalized_call_id_trims_whitespace() { - assert_eq!( - normalized_call_id(Some(" functions.edit:27 ")).as_deref(), - Some("functions.edit:27") - ); - assert_eq!(normalized_call_id(Some(" ")), None); - } - - #[test] - fn test_extract_tool_output_text_fallbacks() { - let state_output = ToolState { - status: Some("completed".to_string()), - input: None, - output: Some(serde_json::json!("done")), - error: Some(serde_json::json!("ignored error")), - metadata: None, - title: None, - time: None, - }; - assert_eq!( - extract_tool_output_text(Some(&state_output)).as_deref(), - Some("done") - ); - - let state_error = ToolState { - status: Some("error".to_string()), - input: None, - output: None, - error: Some(serde_json::json!("failed")), - metadata: Some(serde_json::json!({"output": "metadata output"})), - title: None, - time: None, - }; - assert_eq!( - extract_tool_output_text(Some(&state_error)).as_deref(), - Some("failed") - ); - - let state_meta = ToolState { - status: Some("error".to_string()), - input: None, - output: None, - error: None, - metadata: Some(serde_json::json!({"output": "metadata output"})), - title: None, - time: None, - }; - assert_eq!( - extract_tool_output_text(Some(&state_meta)).as_deref(), - Some("metadata output") - ); - } - - #[test] - fn test_session_info_deser() { - let json = r#"{"id":"ses_abc","version":"1.1.30","title":"Test session","projectID":"abc123","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; - let info: SessionInfo = serde_json::from_str(json).unwrap(); - assert_eq!(info.id, "ses_abc"); - assert_eq!(info.title, Some("Test session".to_string())); - assert_eq!(info.directory, Some("/tmp/proj".to_string())); - } - - #[test] - fn test_session_info_parent_id_deser() { - let json = r#"{"id":"ses_child","version":"1.1.30","parentID":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; - let info: SessionInfo = serde_json::from_str(json).unwrap(); - assert_eq!(info.parent_id, Some("ses_parent".to_string())); - } - - #[test] - fn test_session_info_parent_id_alias_deser() { - let json = r#"{"id":"ses_child","version":"1.1.30","parentId":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; - let info: SessionInfo = serde_json::from_str(json).unwrap(); - assert_eq!(info.parent_id, Some("ses_parent".to_string())); - } - - #[test] - fn test_session_info_parent_uuid_alias_deser() { - let json = r#"{"id":"ses_child","version":"1.1.30","parentUUID":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; - let info: SessionInfo = serde_json::from_str(json).unwrap(); - assert_eq!(info.parent_id, Some("ses_parent".to_string())); - } - - #[test] - fn test_session_context_has_source_path() { - let temp_dir = std::env::temp_dir().join(format!( - "opensession-opencode-parser-source-path-{}", - std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .expect("clock ok") - .as_nanos() - )); - std::fs::create_dir_all(&temp_dir).unwrap(); - let session_path = temp_dir.join("session.json"); - write( - &session_path, - r#"{"id":"ses_parent","version":"1.1.30","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, - ) - .unwrap(); - - let session = parse_opencode_session(&session_path).expect("parse session"); - assert_eq!( - session - .context - .attributes - .get("source_path") - .and_then(|value| value.as_str()), - Some(session_path.to_str().unwrap()) - ); - } - - #[test] - fn test_message_info_deser() { - let json = r#"{"id":"msg_abc","sessionID":"ses_abc","role":"user","model":{"providerID":"openai","modelID":"gpt-5.2-codex"},"time":{"created":1753359830903}}"#; - let msg: MessageInfo = serde_json::from_str(json).unwrap(); - assert_eq!(msg.id, "msg_abc"); - assert_eq!(msg.role, "user"); - let model = msg.model.unwrap(); - assert_eq!(model.provider_id, Some("openai".to_string())); - assert_eq!(model.model_id, Some("gpt-5.2-codex".to_string())); - } - - #[test] - fn test_message_info_deser_top_level_model_fields() { - let json = r#"{"id":"msg_xyz","sessionID":"ses_abc","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359830903}}"#; - let msg: MessageInfo = serde_json::from_str(json).unwrap(); - assert_eq!(msg.id, "msg_xyz"); - assert_eq!(msg.provider_id.as_deref(), Some("openai")); - assert_eq!(msg.model_id.as_deref(), Some("gpt-5.2-codex")); - } - - #[test] - fn test_can_parse() { - let parser = OpenCodeParser; - assert!(parser.can_parse(Path::new( - "/Users/test/.local/share/opencode/storage/session/abc123/ses_xyz.json" - ))); - assert!(!parser.can_parse(Path::new( - "/Users/test/.local/share/opencode/storage/message/ses_xyz/msg_abc.json" - ))); - } - - fn tmp_test_root() -> std::path::PathBuf { - let since_epoch = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("system time should be valid") - .as_nanos(); - let root = std::env::temp_dir().join(format!("opensession-opencode-parser-{since_epoch}")); - std::fs::create_dir_all(&root).expect("create temp dir"); - root - } - - #[test] - fn test_parse_relates_child_session_to_parent() { - let root = tmp_test_root(); - let project = root.join("proj-test"); - let session_dir = project.join("storage").join("session").join("example"); - let message_dir = project.join("storage").join("message").join("ses_child"); - let part_dir = project.join("storage").join("part").join("msg_001"); - create_dir_all(&session_dir).expect("create session dir"); - create_dir_all(&message_dir).expect("create message dir"); - create_dir_all(&part_dir).expect("create part dir"); - - write( - session_dir.join("ses_child.json"), - r#"{"id":"ses_child","version":"1.1.30","parentID":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, - ) - .expect("write session file"); - write( - message_dir.join("msg_001.json"), - r#"{"id":"msg_001","sessionID":"ses_child","role":"user","time":{"created":1753359831000}}"#, - ) - .expect("write message file"); - write( - part_dir.join("part_001.json"), - r#"{"id":"part_001","messageID":"msg_001","type":"text","text":"hello","time":{"start":1753359831000,"end":1753359831000}}"#, - ) - .expect("write part file"); - - let session = - parse_opencode_session(&session_dir.join("ses_child.json")).expect("parse session"); - assert_eq!( - session.context.related_session_ids, - vec!["ses_parent".to_string()] - ); - assert_eq!( - session - .context - .attributes - .get("parent_session_id") - .and_then(|v| v.as_str()), - Some("ses_parent") - ); - assert_eq!( - session - .context - .attributes - .get("session_role") - .and_then(|v| v.as_str()), - Some("auxiliary") - ); - assert_eq!(session.stats.event_count, 1); - } - - #[test] - fn test_parse_part_dir_prefixed_msg_fallback() { - let root = tmp_test_root(); - let project = root.join("proj-prefixed"); - let session_dir = project.join("storage").join("session").join("example"); - let message_dir = project.join("storage").join("message").join("ses_fallback"); - let part_dir = project.join("storage").join("part").join("msg_abc123"); - create_dir_all(&session_dir).expect("create session dir"); - create_dir_all(&message_dir).expect("create message dir"); - create_dir_all(&part_dir).expect("create part dir"); - - write( - session_dir.join("ses_fallback.json"), - r#"{"id":"ses_fallback","version":"1.1.30","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, - ) - .expect("write session file"); - write( - message_dir.join("abc123.json"), - r#"{"id":"abc123","sessionID":"ses_fallback","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359831000}}"#, - ) - .expect("write message file"); - write( - part_dir.join("part_001.json"), - r#"{"id":"part_001","messageID":"abc123","type":"text","text":"assistant reply","time":{"start":1753359831000,"end":1753359831200}}"#, - ) - .expect("write part file"); - - let session = - parse_opencode_session(&session_dir.join("ses_fallback.json")).expect("parse session"); - assert!( - session - .events - .iter() - .any(|event| matches!(event.event_type, EventType::AgentMessage)) - ); - assert_eq!(session.agent.provider, "openai"); - assert_eq!(session.agent.model, "gpt-5.2-codex"); - assert_eq!( - session - .context - .attributes - .get("session_role") - .and_then(|v| v.as_str()), - Some("primary") - ); - } - - #[test] - fn test_parse_reasoning_and_call_id_normalization() { - let root = tmp_test_root(); - let project = root.join("proj-company"); - let session_dir = project.join("storage").join("session").join("example"); - let message_dir = project.join("storage").join("message").join("ses_company"); - let user_part_dir = project.join("storage").join("part").join("msg_user"); - let assistant_part_dir = project.join("storage").join("part").join("msg_assistant"); - create_dir_all(&session_dir).expect("create session dir"); - create_dir_all(&message_dir).expect("create message dir"); - create_dir_all(&user_part_dir).expect("create user part dir"); - create_dir_all(&assistant_part_dir).expect("create assistant part dir"); - - write( - session_dir.join("ses_company.json"), - r#"{"id":"ses_company","version":"1.2.0","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, - ) - .expect("write session"); - write( - message_dir.join("msg_user.json"), - r#"{"id":"msg_user","sessionID":"ses_company","role":"user","model":{"providerID":"openai","modelID":"gpt-5.2-codex"},"time":{"created":1753359831000}}"#, - ) - .expect("write user message"); - write( - message_dir.join("msg_assistant.json"), - r#"{"id":"msg_assistant","sessionID":"ses_company","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359832000,"completed":1753359835000}}"#, - ) - .expect("write assistant message"); - - write( - user_part_dir.join("part_user_text.json"), - r#"{"id":"part_user_text","messageID":"msg_user","type":"text","text":"run diagnostics","time":{"start":1753359831000,"end":1753359831100}}"#, - ) - .expect("write user text part"); - write( - user_part_dir.join("part_user_file.json"), - r#"{"id":"part_user_file","messageID":"msg_user","type":"file","filename":"notes.md","url":"file:///tmp/proj/notes.md","time":{"start":1753359831050,"end":1753359831050}}"#, - ) - .expect("write user file part"); - write( - assistant_part_dir.join("part_reasoning.json"), - r#"{"id":"part_reasoning","messageID":"msg_assistant","type":"reasoning","text":"","metadata":{"openai":{"reasoningEncryptedContent":"abc"}},"time":{"start":1753359832100,"end":1753359832200}}"#, - ) - .expect("write reasoning part"); - write( - assistant_part_dir.join("part_tool_done.json"), - r#"{"id":"part_tool_done","messageID":"msg_assistant","type":"tool","callID":"call_abc","tool":"grep","state":{"status":"Completed","input":{"pattern":"todo","path":"/tmp/proj"},"output":"Found 1 match","title":"todo","time":{"start":1753359832300,"end":1753359832400}}}"#, - ) - .expect("write completed tool part"); - write( - assistant_part_dir.join("part_tool_running.json"), - r#"{"id":"part_tool_running","messageID":"msg_assistant","type":"tool","callID":" functions.edit:27 ","tool":"edit","state":{"status":"running","input":{"filePath":"/tmp/proj/main.rs"},"time":{"start":1753359832500}}}"#, - ) - .expect("write running tool part"); - write( - assistant_part_dir.join("part_patch.json"), - r#"{"id":"part_patch","messageID":"msg_assistant","type":"patch","hash":"abc123","files":["/tmp/proj/main.rs","/tmp/proj/lib.rs"],"time":{"start":1753359832450,"end":1753359832450}}"#, - ) - .expect("write patch part"); - - let session = - parse_opencode_session(&session_dir.join("ses_company.json")).expect("parse session"); - - let thinking = session - .events - .iter() - .find(|event| matches!(event.event_type, EventType::Thinking)) - .expect("thinking event"); - assert_eq!( - thinking - .content - .blocks - .first() - .and_then(|block| match block { - opensession_core::trace::ContentBlock::Text { text } => Some(text.as_str()), - _ => None, - }), - Some("Encrypted reasoning") - ); - - assert!(session.events.iter().any(|event| { - matches!( - &event.event_type, - EventType::ToolResult { name, call_id, .. } - if name == "grep" && call_id.as_deref() == Some("call_abc") - ) - })); - - assert!(session.events.iter().any(|event| { - matches!(event.event_type, EventType::UserMessage) - && event.content.blocks.iter().any(|block| { - matches!( - block, - opensession_core::trace::ContentBlock::Text { text } - if text == "Attached file: notes.md" - ) - }) - })); - - assert!(session.events.iter().any(|event| { - matches!( - &event.event_type, - EventType::FileEdit { path, .. } if path == "/tmp/proj/lib.rs" - ) && event - .attributes - .get("source.raw_type") - .and_then(|v| v.as_str()) - == Some("part:patch:file") - })); - - assert!(session.events.iter().any(|event| { - matches!(&event.event_type, EventType::FileEdit { .. }) - && event - .attributes - .get("semantic.call_id") - .and_then(|v| v.as_str()) - == Some("functions.edit:27") - })); - - assert!(session.events.iter().any(|event| { - matches!(&event.event_type, EventType::TaskEnd { .. }) - && event.task_id.as_deref() == Some("functions.edit:27") - && event - .attributes - .get("source.raw_type") - .and_then(|v| v.as_str()) - == Some("synthetic:task-end") - })); - } - - #[test] - fn test_patch_with_many_files_emits_summary_event() { - let root = tmp_test_root(); - let project = root.join("proj-patch-summary"); - let session_dir = project.join("storage").join("session").join("example"); - let message_dir = project - .join("storage") - .join("message") - .join("ses_patch_summary"); - let part_dir = project.join("storage").join("part").join("msg_assistant"); - create_dir_all(&session_dir).expect("create session dir"); - create_dir_all(&message_dir).expect("create message dir"); - create_dir_all(&part_dir).expect("create part dir"); - - write( - session_dir.join("ses_patch_summary.json"), - r#"{"id":"ses_patch_summary","version":"1.2.0","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, - ) - .expect("write session"); - write( - message_dir.join("msg_assistant.json"), - r#"{"id":"msg_assistant","sessionID":"ses_patch_summary","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359832000,"completed":1753359835000}}"#, - ) - .expect("write assistant message"); - write( - part_dir.join("part_patch_many.json"), - r#"{"id":"part_patch_many","messageID":"msg_assistant","type":"patch","hash":"manyhash","files":["/tmp/proj/f1.rs","/tmp/proj/f2.rs","/tmp/proj/f3.rs","/tmp/proj/f4.rs","/tmp/proj/f5.rs","/tmp/proj/f6.rs","/tmp/proj/f7.rs","/tmp/proj/f8.rs","/tmp/proj/f9.rs"],"time":{"start":1753359832100,"end":1753359832200}}"#, - ) - .expect("write patch part"); - - let session = parse_opencode_session(&session_dir.join("ses_patch_summary.json")) - .expect("parse patch summary session"); - - assert!( - !session - .events - .iter() - .any(|event| matches!(&event.event_type, EventType::FileEdit { .. })) - ); - assert!(session.events.iter().any(|event| { - matches!( - &event.event_type, - EventType::Custom { kind } if kind == "patch" - ) && event - .attributes - .get("source.raw_type") - .and_then(|v| v.as_str()) - == Some("part:patch:summary") - && event.content.blocks.iter().any(|block| { - matches!( - block, - opensession_core::trace::ContentBlock::Json { data } - if data - .get("file_count") - .and_then(|v| v.as_u64()) - == Some(9) - ) - }) - })); - } - - use chrono::Datelike; -} +mod tests; diff --git a/crates/parsers/src/opencode/tests.rs b/crates/parsers/src/opencode/tests.rs new file mode 100644 index 00000000..b14a0d00 --- /dev/null +++ b/crates/parsers/src/opencode/tests.rs @@ -0,0 +1,473 @@ +use super::*; +use chrono::Datelike; +use std::fs::{create_dir_all, write}; +use std::time::{SystemTime, UNIX_EPOCH}; + +#[test] +fn test_millis_to_datetime() { + let dt = millis_to_datetime(1753359830903); + assert!(dt.year() >= 2025); +} + +#[test] +fn test_classify_bash() { + let input = Some(serde_json::json!({"command": "ls -la"})); + let et = classify_opencode_tool("bash", &input); + match et { + EventType::ShellCommand { command, .. } => assert_eq!(command, "ls -la"), + _ => panic!("Expected ShellCommand"), + } +} + +#[test] +fn test_classify_read_with_camel_case_path() { + let input = Some(serde_json::json!({"filePath": "/tmp/demo.rs"})); + let et = classify_opencode_tool("read", &input); + match et { + EventType::FileRead { path } => assert_eq!(path, "/tmp/demo.rs"), + _ => panic!("Expected FileRead"), + } +} + +#[test] +fn test_tool_status_terminal_variants() { + assert!(is_terminal_tool_status("completed")); + assert!(is_terminal_tool_status("FAILED")); + assert!(is_terminal_tool_status("canceled")); + assert!(!is_terminal_tool_status("running")); +} + +#[test] +fn test_normalized_call_id_trims_whitespace() { + assert_eq!( + normalized_call_id(Some(" functions.edit:27 ")).as_deref(), + Some("functions.edit:27") + ); + assert_eq!(normalized_call_id(Some(" ")), None); +} + +#[test] +fn test_extract_tool_output_text_fallbacks() { + let state_output = ToolState { + status: Some("completed".to_string()), + input: None, + output: Some(serde_json::json!("done")), + error: Some(serde_json::json!("ignored error")), + metadata: None, + title: None, + time: None, + }; + assert_eq!( + extract_tool_output_text(Some(&state_output)).as_deref(), + Some("done") + ); + + let state_error = ToolState { + status: Some("error".to_string()), + input: None, + output: None, + error: Some(serde_json::json!("failed")), + metadata: Some(serde_json::json!({"output": "metadata output"})), + title: None, + time: None, + }; + assert_eq!( + extract_tool_output_text(Some(&state_error)).as_deref(), + Some("failed") + ); + + let state_meta = ToolState { + status: Some("error".to_string()), + input: None, + output: None, + error: None, + metadata: Some(serde_json::json!({"output": "metadata output"})), + title: None, + time: None, + }; + assert_eq!( + extract_tool_output_text(Some(&state_meta)).as_deref(), + Some("metadata output") + ); +} + +#[test] +fn test_session_info_deser() { + let json = r#"{"id":"ses_abc","version":"1.1.30","title":"Test session","projectID":"abc123","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; + let info: SessionInfo = serde_json::from_str(json).unwrap(); + assert_eq!(info.id, "ses_abc"); + assert_eq!(info.title, Some("Test session".to_string())); + assert_eq!(info.directory, Some("/tmp/proj".to_string())); +} + +#[test] +fn test_session_info_parent_id_deser() { + let json = r#"{"id":"ses_child","version":"1.1.30","parentID":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; + let info: SessionInfo = serde_json::from_str(json).unwrap(); + assert_eq!(info.parent_id, Some("ses_parent".to_string())); +} + +#[test] +fn test_session_info_parent_id_alias_deser() { + let json = r#"{"id":"ses_child","version":"1.1.30","parentId":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; + let info: SessionInfo = serde_json::from_str(json).unwrap(); + assert_eq!(info.parent_id, Some("ses_parent".to_string())); +} + +#[test] +fn test_session_info_parent_uuid_alias_deser() { + let json = r#"{"id":"ses_child","version":"1.1.30","parentUUID":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#; + let info: SessionInfo = serde_json::from_str(json).unwrap(); + assert_eq!(info.parent_id, Some("ses_parent".to_string())); +} + +#[test] +fn test_session_context_has_source_path() { + let temp_dir = std::env::temp_dir().join(format!( + "opensession-opencode-parser-source-path-{}", + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .expect("clock ok") + .as_nanos() + )); + std::fs::create_dir_all(&temp_dir).unwrap(); + let session_path = temp_dir.join("session.json"); + write( + &session_path, + r#"{"id":"ses_parent","version":"1.1.30","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, + ) + .unwrap(); + + let session = parse_opencode_session(&session_path).expect("parse session"); + assert_eq!( + session + .context + .attributes + .get("source_path") + .and_then(|value| value.as_str()), + Some(session_path.to_str().expect("path to str")) + ); +} + +#[test] +fn test_message_info_deser() { + let json = r#"{"id":"msg_abc","sessionID":"ses_abc","role":"user","model":{"providerID":"openai","modelID":"gpt-5.2-codex"},"time":{"created":1753359830903}}"#; + let msg: MessageInfo = serde_json::from_str(json).unwrap(); + assert_eq!(msg.id, "msg_abc"); + assert_eq!(msg.role, "user"); + let model = msg.model.expect("model ref"); + assert_eq!(model.provider_id, Some("openai".to_string())); + assert_eq!(model.model_id, Some("gpt-5.2-codex".to_string())); +} + +#[test] +fn test_message_info_deser_top_level_model_fields() { + let json = r#"{"id":"msg_xyz","sessionID":"ses_abc","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359830903}}"#; + let msg: MessageInfo = serde_json::from_str(json).unwrap(); + assert_eq!(msg.id, "msg_xyz"); + assert_eq!(msg.provider_id.as_deref(), Some("openai")); + assert_eq!(msg.model_id.as_deref(), Some("gpt-5.2-codex")); +} + +#[test] +fn test_can_parse() { + let parser = OpenCodeParser; + assert!(parser.can_parse(Path::new( + "/Users/test/.local/share/opencode/storage/session/abc123/ses_xyz.json" + ))); + assert!(!parser.can_parse(Path::new( + "/Users/test/.local/share/opencode/storage/message/ses_xyz/msg_abc.json" + ))); +} + +fn tmp_test_root() -> std::path::PathBuf { + let since_epoch = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time should be valid") + .as_nanos(); + let root = std::env::temp_dir().join(format!("opensession-opencode-parser-{since_epoch}")); + std::fs::create_dir_all(&root).expect("create temp dir"); + root +} + +#[test] +fn test_parse_relates_child_session_to_parent() { + let root = tmp_test_root(); + let project = root.join("proj-test"); + let session_dir = project.join("storage").join("session").join("example"); + let message_dir = project.join("storage").join("message").join("ses_child"); + let part_dir = project.join("storage").join("part").join("msg_001"); + create_dir_all(&session_dir).expect("create session dir"); + create_dir_all(&message_dir).expect("create message dir"); + create_dir_all(&part_dir).expect("create part dir"); + + write( + session_dir.join("ses_child.json"), + r#"{"id":"ses_child","version":"1.1.30","parentID":"ses_parent","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, + ) + .expect("write session file"); + write( + message_dir.join("msg_001.json"), + r#"{"id":"msg_001","sessionID":"ses_child","role":"user","time":{"created":1753359831000}}"#, + ) + .expect("write message file"); + write( + part_dir.join("part_001.json"), + r#"{"id":"part_001","messageID":"msg_001","type":"text","text":"hello","time":{"start":1753359831000,"end":1753359831000}}"#, + ) + .expect("write part file"); + + let session = + parse_opencode_session(&session_dir.join("ses_child.json")).expect("parse session"); + assert_eq!( + session.context.related_session_ids, + vec!["ses_parent".to_string()] + ); + assert_eq!( + session + .context + .attributes + .get("parent_session_id") + .and_then(|value| value.as_str()), + Some("ses_parent") + ); + assert_eq!( + session + .context + .attributes + .get("session_role") + .and_then(|value| value.as_str()), + Some("auxiliary") + ); + assert_eq!(session.stats.event_count, 1); +} + +#[test] +fn test_parse_part_dir_prefixed_msg_fallback() { + let root = tmp_test_root(); + let project = root.join("proj-prefixed"); + let session_dir = project.join("storage").join("session").join("example"); + let message_dir = project.join("storage").join("message").join("ses_fallback"); + let part_dir = project.join("storage").join("part").join("msg_abc123"); + create_dir_all(&session_dir).expect("create session dir"); + create_dir_all(&message_dir).expect("create message dir"); + create_dir_all(&part_dir).expect("create part dir"); + + write( + session_dir.join("ses_fallback.json"), + r#"{"id":"ses_fallback","version":"1.1.30","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, + ) + .expect("write session file"); + write( + message_dir.join("abc123.json"), + r#"{"id":"abc123","sessionID":"ses_fallback","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359831000}}"#, + ) + .expect("write message file"); + write( + part_dir.join("part_001.json"), + r#"{"id":"part_001","messageID":"abc123","type":"text","text":"assistant reply","time":{"start":1753359831000,"end":1753359831200}}"#, + ) + .expect("write part file"); + + let session = + parse_opencode_session(&session_dir.join("ses_fallback.json")).expect("parse session"); + assert!( + session + .events + .iter() + .any(|event| matches!(event.event_type, EventType::AgentMessage)) + ); + assert_eq!(session.agent.provider, "openai"); + assert_eq!(session.agent.model, "gpt-5.2-codex"); + assert_eq!( + session + .context + .attributes + .get("session_role") + .and_then(|value| value.as_str()), + Some("primary") + ); +} + +#[test] +fn test_parse_reasoning_and_call_id_normalization() { + let root = tmp_test_root(); + let project = root.join("proj-company"); + let session_dir = project.join("storage").join("session").join("example"); + let message_dir = project.join("storage").join("message").join("ses_company"); + let user_part_dir = project.join("storage").join("part").join("msg_user"); + let assistant_part_dir = project.join("storage").join("part").join("msg_assistant"); + create_dir_all(&session_dir).expect("create session dir"); + create_dir_all(&message_dir).expect("create message dir"); + create_dir_all(&user_part_dir).expect("create user part dir"); + create_dir_all(&assistant_part_dir).expect("create assistant part dir"); + + write( + session_dir.join("ses_company.json"), + r#"{"id":"ses_company","version":"1.2.0","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, + ) + .expect("write session"); + write( + message_dir.join("msg_user.json"), + r#"{"id":"msg_user","sessionID":"ses_company","role":"user","model":{"providerID":"openai","modelID":"gpt-5.2-codex"},"time":{"created":1753359831000}}"#, + ) + .expect("write user message"); + write( + message_dir.join("msg_assistant.json"), + r#"{"id":"msg_assistant","sessionID":"ses_company","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359832000,"completed":1753359835000}}"#, + ) + .expect("write assistant message"); + + write( + user_part_dir.join("part_user_text.json"), + r#"{"id":"part_user_text","messageID":"msg_user","type":"text","text":"run diagnostics","time":{"start":1753359831000,"end":1753359831100}}"#, + ) + .expect("write user text part"); + write( + user_part_dir.join("part_user_file.json"), + r#"{"id":"part_user_file","messageID":"msg_user","type":"file","filename":"notes.md","url":"file:///tmp/proj/notes.md","time":{"start":1753359831050,"end":1753359831050}}"#, + ) + .expect("write user file part"); + write( + assistant_part_dir.join("part_reasoning.json"), + r#"{"id":"part_reasoning","messageID":"msg_assistant","type":"reasoning","text":"","metadata":{"openai":{"reasoningEncryptedContent":"abc"}},"time":{"start":1753359832100,"end":1753359832200}}"#, + ) + .expect("write reasoning part"); + write( + assistant_part_dir.join("part_tool_done.json"), + r#"{"id":"part_tool_done","messageID":"msg_assistant","type":"tool","callID":"call_abc","tool":"grep","state":{"status":"Completed","input":{"pattern":"todo","path":"/tmp/proj"},"output":"Found 1 match","title":"todo","time":{"start":1753359832300,"end":1753359832400}}}"#, + ) + .expect("write completed tool part"); + write( + assistant_part_dir.join("part_tool_running.json"), + r#"{"id":"part_tool_running","messageID":"msg_assistant","type":"tool","callID":" functions.edit:27 ","tool":"edit","state":{"status":"running","input":{"filePath":"/tmp/proj/main.rs"},"time":{"start":1753359832500}}}"#, + ) + .expect("write running tool part"); + write( + assistant_part_dir.join("part_patch.json"), + r#"{"id":"part_patch","messageID":"msg_assistant","type":"patch","hash":"abc123","files":["/tmp/proj/main.rs","/tmp/proj/lib.rs"],"time":{"start":1753359832450,"end":1753359832450}}"#, + ) + .expect("write patch part"); + + let session = + parse_opencode_session(&session_dir.join("ses_company.json")).expect("parse session"); + + let thinking = session + .events + .iter() + .find(|event| matches!(event.event_type, EventType::Thinking)) + .expect("thinking event"); + assert_eq!( + thinking + .content + .blocks + .first() + .and_then(|block| match block { + opensession_core::trace::ContentBlock::Text { text } => Some(text.as_str()), + _ => None, + }), + Some("Encrypted reasoning") + ); + + assert!(session.events.iter().any(|event| { + matches!( + &event.event_type, + EventType::ToolResult { name, call_id, .. } + if name == "grep" && call_id.as_deref() == Some("call_abc") + ) + })); + + assert!(session.events.iter().any(|event| { + matches!(event.event_type, EventType::UserMessage) + && event.content.blocks.iter().any(|block| { + matches!( + block, + opensession_core::trace::ContentBlock::Text { text } if text == "Attached file: notes.md" + ) + }) + })); + + assert!(session.events.iter().any(|event| { + matches!(&event.event_type, EventType::FileEdit { path, .. } if path == "/tmp/proj/lib.rs") + && event + .attributes + .get("source.raw_type") + .and_then(|value| value.as_str()) + == Some("part:patch:file") + })); + + assert!(session.events.iter().any(|event| { + matches!(&event.event_type, EventType::FileEdit { .. }) + && event + .attributes + .get("semantic.call_id") + .and_then(|value| value.as_str()) + == Some("functions.edit:27") + })); + + assert!(session.events.iter().any(|event| { + matches!(&event.event_type, EventType::TaskEnd { .. }) + && event.task_id.as_deref() == Some("functions.edit:27") + && event + .attributes + .get("source.raw_type") + .and_then(|value| value.as_str()) + == Some("synthetic:task-end") + })); +} + +#[test] +fn test_patch_with_many_files_emits_summary_event() { + let root = tmp_test_root(); + let project = root.join("proj-patch-summary"); + let session_dir = project.join("storage").join("session").join("example"); + let message_dir = project + .join("storage") + .join("message") + .join("ses_patch_summary"); + let part_dir = project.join("storage").join("part").join("msg_assistant"); + create_dir_all(&session_dir).expect("create session dir"); + create_dir_all(&message_dir).expect("create message dir"); + create_dir_all(&part_dir).expect("create part dir"); + + write( + session_dir.join("ses_patch_summary.json"), + r#"{"id":"ses_patch_summary","version":"1.2.0","directory":"/tmp/proj","time":{"created":1753359830903,"updated":1753360246507}}"#, + ) + .expect("write session"); + write( + message_dir.join("msg_assistant.json"), + r#"{"id":"msg_assistant","sessionID":"ses_patch_summary","role":"assistant","providerID":"openai","modelID":"gpt-5.2-codex","time":{"created":1753359832000,"completed":1753359835000}}"#, + ) + .expect("write assistant message"); + write( + part_dir.join("part_patch_many.json"), + r#"{"id":"part_patch_many","messageID":"msg_assistant","type":"patch","hash":"manyhash","files":["/tmp/proj/f1.rs","/tmp/proj/f2.rs","/tmp/proj/f3.rs","/tmp/proj/f4.rs","/tmp/proj/f5.rs","/tmp/proj/f6.rs","/tmp/proj/f7.rs","/tmp/proj/f8.rs","/tmp/proj/f9.rs"],"time":{"start":1753359832100,"end":1753359832200}}"#, + ) + .expect("write patch part"); + + let session = parse_opencode_session(&session_dir.join("ses_patch_summary.json")) + .expect("parse patch summary session"); + + assert!( + !session + .events + .iter() + .any(|event| matches!(&event.event_type, EventType::FileEdit { .. })) + ); + assert!(session.events.iter().any(|event| { + matches!(&event.event_type, EventType::Custom { kind } if kind == "patch") + && event + .attributes + .get("source.raw_type") + .and_then(|value| value.as_str()) + == Some("part:patch:summary") + && event.content.blocks.iter().any(|block| { + matches!( + block, + opensession_core::trace::ContentBlock::Json { data } + if data.get("file_count").and_then(|value| value.as_u64()) == Some(9) + ) + }) + })); +} diff --git a/crates/parsers/tests/real_data.rs b/crates/parsers/tests/real_data.rs index b53da3c4..c9d4a457 100644 --- a/crates/parsers/tests/real_data.rs +++ b/crates/parsers/tests/real_data.rs @@ -5,7 +5,7 @@ #[ignore = "requires real Codex session files"] fn parse_real_codex_session() { let registry = opensession_parsers::ParserRegistry::default(); - let paths = opensession_parsers::discover::discover_for_tool("codex"); + let paths = opensession_parser_discovery::discover_for_tool("codex"); assert!(!paths.is_empty(), "No Codex session files found"); for path in &paths { @@ -38,7 +38,7 @@ fn parse_real_codex_session() { #[ignore = "requires real Gemini session files"] fn parse_real_gemini_session() { let registry = opensession_parsers::ParserRegistry::default(); - let paths = opensession_parsers::discover::discover_for_tool("gemini"); + let paths = opensession_parser_discovery::discover_for_tool("gemini"); assert!(!paths.is_empty(), "No Gemini session files found"); for path in &paths { @@ -70,7 +70,7 @@ fn parse_real_gemini_session() { #[ignore = "requires real OpenCode session files"] fn parse_real_opencode_session() { let registry = opensession_parsers::ParserRegistry::default(); - let paths = opensession_parsers::discover::discover_for_tool("opencode"); + let paths = opensession_parser_discovery::discover_for_tool("opencode"); assert!(!paths.is_empty(), "No OpenCode session files found"); for path in &paths { diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml new file mode 100644 index 00000000..a8c14ca0 --- /dev/null +++ b/crates/paths/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "opensession-paths" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true +description = "Centralized OpenSession runtime path helpers" +include = ["src/**/*.rs", "Cargo.toml", "LICENSE", "README.md"] + +[lib] +doctest = false + +[lints] +workspace = true + +[dependencies] +directories = { workspace = true } +opensession-runtime-config = { workspace = true } +thiserror = { workspace = true } diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs new file mode 100644 index 00000000..11328c4b --- /dev/null +++ b/crates/paths/src/lib.rs @@ -0,0 +1,161 @@ +use directories::BaseDirs; +use opensession_runtime_config::CONFIG_FILE_NAME; +use std::path::{Path, PathBuf}; + +#[derive(Debug, thiserror::Error)] +pub enum PathError { + #[error("could not determine home directory")] + HomeUnavailable, +} + +fn base_dirs() -> Result { + BaseDirs::new().ok_or(PathError::HomeUnavailable) +} + +fn join_segments(base: &Path, segments: &[&str]) -> PathBuf { + segments + .iter() + .fold(base.to_path_buf(), |path, segment| path.join(segment)) +} + +pub fn home_dir() -> Result { + Ok(base_dirs()?.home_dir().to_path_buf()) +} + +pub fn config_dir() -> Result { + Ok(join_segments(&home_dir()?, &[".config", "opensession"])) +} + +pub fn data_dir() -> Result { + Ok(join_segments( + &home_dir()?, + &[".local", "share", "opensession"], + )) +} + +pub fn runtime_config_path() -> Result { + Ok(config_dir()?.join(CONFIG_FILE_NAME)) +} + +pub fn local_db_path() -> Result { + if let Some(path) = std::env::var_os("OPENSESSION_LOCAL_DB_PATH") + .map(PathBuf::from) + .filter(|path| !path.as_os_str().is_empty()) + { + return Ok(path); + } + Ok(data_dir()?.join("local.db")) +} + +pub fn local_store_root() -> Result { + Ok(data_dir()?.join("objects")) +} + +#[cfg(test)] +mod tests { + use super::{ + config_dir, data_dir, home_dir, local_db_path, local_store_root, runtime_config_path, + }; + use opensession_runtime_config::CONFIG_FILE_NAME; + use std::path::PathBuf; + use std::sync::{Mutex, OnceLock}; + + fn env_test_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + struct EnvVarGuard { + key: &'static str, + previous: Option, + } + + impl EnvVarGuard { + fn set(key: &'static str, value: &str) -> Self { + let previous = std::env::var_os(key); + // SAFETY: tests serialize environment mutation with `env_test_lock`, so no + // concurrent readers/writers observe partially updated process env state. + unsafe { std::env::set_var(key, value) }; + Self { key, previous } + } + + fn clear(key: &'static str) -> Self { + let previous = std::env::var_os(key); + // SAFETY: tests serialize environment mutation with `env_test_lock`, so no + // concurrent readers/writers observe partially updated process env state. + unsafe { std::env::remove_var(key) }; + Self { key, previous } + } + } + + impl Drop for EnvVarGuard { + fn drop(&mut self) { + if let Some(value) = &self.previous { + // SAFETY: tests serialize environment mutation with `env_test_lock`. + unsafe { std::env::set_var(self.key, value) }; + } else { + // SAFETY: tests serialize environment mutation with `env_test_lock`. + unsafe { std::env::remove_var(self.key) }; + } + } + } + + #[test] + fn config_path_uses_opensession_suffix() { + let path = config_dir().expect("config dir"); + assert_eq!( + path, + home_dir() + .expect("home dir") + .join(".config") + .join("opensession") + ); + assert_eq!( + runtime_config_path() + .expect("runtime config path") + .file_name() + .expect("runtime config filename") + .to_string_lossy(), + CONFIG_FILE_NAME + ); + } + + #[test] + fn data_paths_use_opensession_suffix() { + let _lock = env_test_lock().lock().expect("env lock"); + let _guard = EnvVarGuard::clear("OPENSESSION_LOCAL_DB_PATH"); + let path = data_dir().expect("data dir"); + assert_eq!( + path, + home_dir() + .expect("home dir") + .join(".local") + .join("share") + .join("opensession") + ); + assert_eq!( + local_db_path() + .expect("local db path") + .parent() + .expect("local db parent"), + path.as_path() + ); + assert_eq!( + local_store_root() + .expect("local store root") + .parent() + .expect("local store parent"), + path.as_path() + ); + } + + #[test] + fn local_db_path_prefers_env_override() { + let _lock = env_test_lock().lock().expect("env lock"); + let _guard = EnvVarGuard::set("OPENSESSION_LOCAL_DB_PATH", "/tmp/opensession-test.db"); + assert_eq!( + local_db_path().expect("local db path"), + PathBuf::from("/tmp/opensession-test.db") + ); + } +} diff --git a/crates/server/src/routes/admin.rs b/crates/server/src/routes/admin.rs index d062c9f9..f804fa80 100644 --- a/crates/server/src/routes/admin.rs +++ b/crates/server/src/routes/admin.rs @@ -3,11 +3,11 @@ use axum::{ extract::{Path, State}, http::HeaderMap, }; -use opensession_api::{OkResponse, SessionSummary, db}; +use opensession_api::OkResponse; use crate::AppConfig; use crate::error::ApiErr; -use crate::storage::{Db, session_from_row, sq_execute, sq_query_row}; +use crate::storage::Db; /// DELETE /api/admin/sessions/:id — delete a session (admin key required). pub async fn delete_session( @@ -26,14 +26,12 @@ pub async fn delete_session( return Err(ApiErr::unauthorized("invalid admin key")); } - let conn = db.conn(); - let _summary: SessionSummary = - sq_query_row(&conn, db::sessions::get_by_id(&id), session_from_row) - .map_err(|_| ApiErr::not_found("session not found"))?; - - sq_execute(&conn, db::sessions::delete_links(&id)).map_err(ApiErr::from_db("delete links"))?; - sq_execute(&conn, db::sessions::delete(&id)).map_err(ApiErr::from_db("delete session"))?; - - let _ = sq_execute(&conn, db::sessions::delete_fts(&id)); + let deleted = db + .delete_session(&id) + .await + .map_err(ApiErr::from_db("delete session"))?; + if !deleted { + return Err(ApiErr::not_found("session not found")); + } Ok(Json(OkResponse { ok: true })) } diff --git a/crates/server/src/routes/auth.rs b/crates/server/src/routes/auth.rs index 4ff3d368..9e18d379 100644 --- a/crates/server/src/routes/auth.rs +++ b/crates/server/src/routes/auth.rs @@ -11,13 +11,13 @@ use uuid::Uuid; use opensession_api::{ AuthRegisterRequest, AuthTokenResponse, ChangePasswordRequest, CreateGitCredentialRequest, GitCredentialSummary, IssueApiKeyResponse, ListGitCredentialsResponse, LoginRequest, - OkResponse, RefreshRequest, UserSettingsResponse, VerifyResponse, crypto, db as dbq, oauth, - service, service::AuthToken, + OkResponse, RefreshRequest, UserSettingsResponse, VerifyResponse, crypto, service, + service::AuthToken, }; use crate::AppConfig; use crate::error::ApiErr; -use crate::storage::{Db, sq_execute, sq_query_map, sq_query_row}; +use crate::storage::{Db, NewGitCredentialRecord}; const ACCESS_COOKIE_NAME: &str = "opensession_access_token"; const REFRESH_COOKIE_NAME: &str = "opensession_refresh_token"; @@ -41,7 +41,7 @@ pub struct AuthUser { pub email: Option, } -fn resolve_auth_user( +async fn resolve_auth_user( token: &str, db: &Db, config: &AppConfig, @@ -51,33 +51,32 @@ fn resolve_auth_user( let resolved = service::resolve_auth_token(token, &config.jwt_secret, now) .map_err(|e| ApiErr::unauthorized(e.message()))?; - let conn = db.conn(); match resolved { AuthToken::ApiKey(key) => { let key_hash = service::hash_api_key(&key); - sq_query_row( - &conn, - dbq::api_keys::get_user_by_valid_key_hash(&key_hash), - |row| { - Ok(AuthUser { - user_id: row.get(0)?, - nickname: row.get(1)?, - auth_via_cookie, - email: row.get(2)?, - }) - }, - ) - .map_err(|_| ApiErr::unauthorized("invalid API key")) + let user = db + .get_auth_user_by_api_key_hash(&key_hash) + .await + .map_err(|_| ApiErr::unauthorized("invalid API key"))?; + Ok(AuthUser { + user_id: user.user_id, + nickname: user.nickname, + auth_via_cookie, + email: user.email, + }) } - AuthToken::Jwt(user_id) => sq_query_row(&conn, dbq::users::get_by_id(&user_id), |row| { + AuthToken::Jwt(user_id) => { + let user = db + .get_auth_user_by_id(&user_id) + .await + .map_err(|_| ApiErr::unauthorized("user not found"))?; Ok(AuthUser { - user_id: row.get(0)?, - nickname: row.get(1)?, + user_id: user.user_id, + nickname: user.nickname, auth_via_cookie, - email: row.get(2)?, + email: user.email, }) - }) - .map_err(|_| ApiErr::unauthorized("user not found")), + } } } @@ -107,16 +106,16 @@ fn header_bearer_token(headers: &HeaderMap) -> Option { .map(ToOwned::to_owned) } -pub fn try_auth_from_headers( +pub async fn try_auth_from_headers( headers: &HeaderMap, db: &Db, config: &AppConfig, ) -> Result, ApiErr> { if let Some(token) = header_bearer_token(headers) { - return resolve_auth_user(&token, db, config, false).map(Some); + return resolve_auth_user(&token, db, config, false).await.map(Some); } if let Some(token) = parse_cookie_value(headers, ACCESS_COOKIE_NAME) { - return resolve_auth_user(&token, db, config, true).map(Some); + return resolve_auth_user(&token, db, config, true).await.map(Some); } Ok(None) } @@ -133,9 +132,11 @@ where let db = Db::from_ref(state); let config = AppConfig::from_ref(state); - try_auth_from_headers(&parts.headers, &db, &config)?.ok_or(ApiErr::unauthorized( - "missing or invalid authentication token", - )) + try_auth_from_headers(&parts.headers, &db, &config) + .await? + .ok_or(ApiErr::unauthorized( + "missing or invalid authentication token", + )) } } @@ -283,16 +284,16 @@ pub(crate) fn enforce_csrf_if_cookie_auth( } /// Public wrapper for oauth module to issue tokens. -pub fn issue_tokens_pub( +pub async fn issue_tokens_pub( db: &Db, jwt_secret: &str, user_id: &str, nickname: &str, ) -> Result { - issue_tokens(db, jwt_secret, user_id, nickname) + issue_tokens(db, jwt_secret, user_id, nickname).await } -fn issue_tokens( +async fn issue_tokens( db: &Db, jwt_secret: &str, user_id: &str, @@ -302,16 +303,13 @@ fn issue_tokens( let bundle = service::prepare_token_bundle(jwt_secret, user_id, nickname, now).map_err(ApiErr::from)?; - let conn = db.conn(); - sq_execute( - &conn, - dbq::users::insert_refresh_token( - &bundle.token_id, - user_id, - &bundle.token_hash, - &bundle.expires_at, - ), + db.insert_refresh_token( + &bundle.token_id, + user_id, + &bundle.token_hash, + &bundle.expires_at, ) + .await .map_err(ApiErr::from_db("issue_tokens"))?; Ok(bundle.response) @@ -361,9 +359,7 @@ pub async fn auth_register( // Check email uniqueness { - let conn = db.conn(); - let exists: bool = sq_query_row(&conn, dbq::users::email_exists(&email), |row| row.get(0)) - .unwrap_or(false); + let exists = db.email_exists(&email).await.unwrap_or(false); if exists { return Err(ApiErr::conflict("email already registered")); } @@ -374,32 +370,19 @@ pub async fn auth_register( crypto::hash_password(&req.password).map_err(ApiErr::from)?; { - let conn = db.conn(); - let result = sq_execute( - &conn, - dbq::users::insert_with_email( - &user_id, - &nickname, - &email, - &password_hash, - &password_salt, - ), - ); - match result { - Ok(_) => {} - Err(rusqlite::Error::SqliteFailure(err, _)) - if err.code == rusqlite::ErrorCode::ConstraintViolation => - { + if let Err(err) = db + .insert_user_with_email(&user_id, &nickname, &email, &password_hash, &password_salt) + .await + { + if err.is_constraint_violation() { return Err(ApiErr::conflict("nickname already taken")); } - Err(e) => { - tracing::error!("auth_register error: {e}"); - return Err(ApiErr::internal("internal server error")); - } + tracing::error!("auth_register error: {err}"); + return Err(ApiErr::internal("internal server error")); } } - let tokens = issue_tokens(&db, &config.jwt_secret, &user_id, &nickname)?; + let tokens = issue_tokens(&db, &config.jwt_secret, &user_id, &nickname).await?; let cookies = set_cookie_headers_for_auth(&tokens, &headers, &config)?; response_with_cookies(StatusCode::CREATED, &tokens, &cookies) } @@ -417,19 +400,12 @@ pub async fn login( let email = service::validate_email(&req.email).map_err(ApiErr::from)?; - let conn = db.conn(); - let user = sq_query_row(&conn, dbq::users::get_by_email_for_login(&email), |row| { - Ok(( - row.get::<_, String>(0)?, - row.get::<_, String>(1)?, - row.get::<_, Option>(2)?, - row.get::<_, Option>(3)?, - )) - }) - .map_err(|_| ApiErr::unauthorized("invalid email or password"))?; + let user = db + .get_login_user(&email) + .await + .map_err(|_| ApiErr::unauthorized("invalid email or password"))?; - let (user_id, nickname, hash, salt) = user; - let (hash, salt) = match (hash, salt) { + let (hash, salt) = match (user.password_hash, user.password_salt) { (Some(h), Some(s)) => (h, s), _ => { return Err(ApiErr::unauthorized( @@ -441,9 +417,8 @@ pub async fn login( if !crypto::verify_password(&req.password, &hash, &salt) { return Err(ApiErr::unauthorized("invalid email or password")); } - drop(conn); - let tokens = issue_tokens(&db, &config.jwt_secret, &user_id, &nickname)?; + let tokens = issue_tokens(&db, &config.jwt_secret, &user.user_id, &user.nickname).await?; let cookies = set_cookie_headers_for_auth(&tokens, &headers, &config)?; response_with_cookies(StatusCode::OK, &tokens, &cookies) } @@ -463,34 +438,21 @@ pub async fn refresh( enforce_csrf_if_cookie_auth(&headers, &config, using_cookie_refresh)?; let token_hash = crypto::hash_token(&refresh_token); - let conn = db.conn(); - let row = sq_query_row( - &conn, - dbq::users::lookup_refresh_token(&token_hash), - |row| { - Ok(( - row.get::<_, String>(0)?, - row.get::<_, String>(1)?, - row.get::<_, String>(2)?, - row.get::<_, String>(3)?, - )) - }, - ) - .map_err(|_| ApiErr::unauthorized("invalid refresh token"))?; - - let (rt_id, user_id, expires_at, nickname) = row; + let row = db + .lookup_refresh_token(&token_hash) + .await + .map_err(|_| ApiErr::unauthorized("invalid refresh token"))?; let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string(); - if expires_at < now { - sq_execute(&conn, dbq::users::delete_refresh_token_by_id(&rt_id)).ok(); + if row.expires_at < now { + db.delete_refresh_token_by_id(&row.token_id).await.ok(); return Err(ApiErr::unauthorized("refresh token expired")); } // Rotate: delete old, issue new - sq_execute(&conn, dbq::users::delete_refresh_token(&token_hash)).ok(); - drop(conn); + db.delete_refresh_token(&token_hash).await.ok(); - let tokens = issue_tokens(&db, &config.jwt_secret, &user_id, &nickname)?; + let tokens = issue_tokens(&db, &config.jwt_secret, &row.user_id, &row.nickname).await?; let cookies = set_cookie_headers_for_auth(&tokens, &headers, &config)?; response_with_cookies(StatusCode::OK, &tokens, &cookies) } @@ -505,8 +467,7 @@ pub async fn logout( if let Ok((refresh_token, using_cookie_refresh)) = resolve_refresh_token(&headers, &body) { enforce_csrf_if_cookie_auth(&headers, &config, using_cookie_refresh)?; let token_hash = crypto::hash_token(&refresh_token); - let conn = db.conn(); - sq_execute(&conn, dbq::users::delete_refresh_token(&token_hash)).ok(); + db.delete_refresh_token(&token_hash).await.ok(); } let payload = OkResponse { ok: true }; let cookies = clear_cookie_headers(&headers, &config); @@ -522,15 +483,12 @@ pub async fn change_password( Json(req): Json, ) -> Result, ApiErr> { enforce_csrf_if_cookie_auth(&headers, &config, user.auth_via_cookie)?; - let conn = db.conn(); - let (hash, salt): (Option, Option) = sq_query_row( - &conn, - dbq::users::get_password_fields(&user.user_id), - |row| Ok((row.get(0)?, row.get(1)?)), - ) - .map_err(ApiErr::from_db("change_password lookup"))?; + let fields = db + .get_password_fields(&user.user_id) + .await + .map_err(ApiErr::from_db("change_password lookup"))?; - let (hash, salt) = match (hash, salt) { + let (hash, salt) = match (fields.password_hash, fields.password_salt) { (Some(h), Some(s)) => (h, s), _ => { return Err(ApiErr::bad_request( @@ -546,11 +504,9 @@ pub async fn change_password( service::validate_password(&req.new_password).map_err(ApiErr::from)?; let (new_hash, new_salt) = crypto::hash_password(&req.new_password).map_err(ApiErr::from)?; - sq_execute( - &conn, - dbq::users::update_password(&user.user_id, &new_hash, &new_salt), - ) - .map_err(ApiErr::from_db("change_password update"))?; + db.update_password(&user.user_id, &new_hash, &new_salt) + .await + .map_err(ApiErr::from_db("change_password update"))?; Ok(Json(OkResponse { ok: true })) } @@ -576,42 +532,18 @@ pub async fn me( State(db): State, user: AuthUser, ) -> Result, ApiErr> { - let conn = db.conn(); - let (email, avatar_url): (Option, Option) = - sq_query_row(&conn, dbq::users::get_email_avatar(&user.user_id), |row| { - Ok((row.get(0)?, row.get(1)?)) - }) + let settings = db + .get_user_settings_data(&user.user_id) + .await .map_err(ApiErr::from_db("me error"))?; - // Load linked OAuth providers - let providers: Vec = - sq_query_map(&conn, dbq::oauth::find_by_user(&user.user_id), |row| { - Ok(oauth::LinkedProvider { - provider: row.get(1)?, - provider_username: row.get::<_, Option>(3)?.unwrap_or_default(), - display_name: match row.get::<_, String>(1)?.as_str() { - "github" => "GitHub".to_string(), - "gitlab" => "GitLab".to_string(), - other => other.to_string(), - }, - }) - }) - .map_err(ApiErr::from_db("me query oauth"))?; - - let created_at: String = sq_query_row( - &conn, - dbq::users::get_settings_fields(&user.user_id), - |row| row.get(0), - ) - .unwrap_or_default(); - Ok(Json(UserSettingsResponse { user_id: user.user_id, nickname: user.nickname, - created_at, - email, - avatar_url, - oauth_providers: providers, + created_at: settings.created_at, + email: settings.email, + avatar_url: settings.avatar_url, + oauth_providers: settings.oauth_providers, })) } @@ -637,17 +569,12 @@ pub async fn issue_api_key( let key_prefix = service::key_prefix(&new_key); let key_id = Uuid::new_v4().to_string(); - let conn = db.conn(); - sq_execute( - &conn, - dbq::api_keys::move_active_to_grace(&user.user_id, &grace_until), - ) - .map_err(ApiErr::from_db("issue api key move old keys"))?; - sq_execute( - &conn, - dbq::api_keys::insert_active(&key_id, &user.user_id, &key_hash, &key_prefix), - ) - .map_err(ApiErr::from_db("issue api key insert"))?; + db.move_active_api_keys_to_grace(&user.user_id, &grace_until) + .await + .map_err(ApiErr::from_db("issue api key move old keys"))?; + db.insert_active_api_key(&key_id, &user.user_id, &key_hash, &key_prefix) + .await + .map_err(ApiErr::from_db("issue api key insert"))?; Ok(Json(IssueApiKeyResponse { api_key: new_key })) } @@ -741,24 +668,10 @@ pub async fn list_git_credentials( State(db): State, user: AuthUser, ) -> Result, ApiErr> { - let conn = db.conn(); - let credentials = sq_query_map( - &conn, - dbq::git_credentials::list_by_user(&user.user_id), - |row| { - Ok(GitCredentialSummary { - id: row.get(0)?, - label: row.get(1)?, - host: row.get(2)?, - path_prefix: row.get(3)?, - header_name: row.get(4)?, - created_at: row.get(5)?, - updated_at: row.get(6)?, - last_used_at: row.get(7)?, - }) - }, - ) - .map_err(ApiErr::from_db("list git credentials"))?; + let credentials = db + .list_git_credentials(&user.user_id) + .await + .map_err(ApiErr::from_db("list git credentials"))?; Ok(Json(ListGitCredentialsResponse { credentials })) } @@ -794,39 +707,22 @@ pub async fn create_git_credential( let header_value_enc = keyring.encrypt(header_value).map_err(ApiErr::from)?; let id = Uuid::new_v4().to_string(); - let conn = db.conn(); - sq_execute( - &conn, - dbq::git_credentials::insert( - &id, - &user.user_id, - &label, - &host, - &path_prefix, - &header_name, - &header_value_enc, - ), - ) + db.insert_git_credential(NewGitCredentialRecord { + id: id.clone(), + user_id: user.user_id.clone(), + label, + host, + path_prefix, + header_name, + header_value_enc, + }) + .await .map_err(ApiErr::from_db("create git credential"))?; - let created = sq_query_row( - &conn, - dbq::git_credentials::get_by_id_and_user(&id, &user.user_id), - |row| { - let current_id: String = row.get(0)?; - Ok(GitCredentialSummary { - id: current_id, - label: row.get(1)?, - host: row.get(2)?, - path_prefix: row.get(3)?, - header_name: row.get(4)?, - created_at: row.get(5)?, - updated_at: row.get(6)?, - last_used_at: row.get(7)?, - }) - }, - ) - .map_err(ApiErr::from_db("reload git credential"))?; + let created = db + .get_git_credential_by_id_and_user(&id, &user.user_id) + .await + .map_err(ApiErr::from_db("reload git credential"))?; Ok((StatusCode::CREATED, Json(created))) } @@ -840,12 +736,10 @@ pub async fn delete_git_credential( user: AuthUser, ) -> Result, ApiErr> { enforce_csrf_if_cookie_auth(&headers, &config, user.auth_via_cookie)?; - let conn = db.conn(); - let affected = sq_execute( - &conn, - dbq::git_credentials::delete_by_id_and_user(id.as_str(), &user.user_id), - ) - .map_err(ApiErr::from_db("delete git credential"))?; + let affected = db + .delete_git_credential_by_id_and_user(id.as_str(), &user.user_id) + .await + .map_err(ApiErr::from_db("delete git credential"))?; if affected == 0 { return Err(ApiErr::not_found("credential not found")); diff --git a/crates/server/src/routes/ingest/auth.rs b/crates/server/src/routes/ingest/auth.rs index 46564cee..a75ef2d7 100644 --- a/crates/server/src/routes/ingest/auth.rs +++ b/crates/server/src/routes/ingest/auth.rs @@ -1,7 +1,7 @@ -use opensession_api::{db as dbq, parse_preview_source::GitSource}; +use opensession_api::parse_preview_source::GitSource; use crate::AppConfig; -use crate::storage::{Db, sq_query_map, sq_query_row}; +use crate::storage::Db; use super::errors::PreviewRouteError; use super::remote::{ @@ -9,12 +9,13 @@ use super::remote::{ validate_remote_url, }; -pub(super) fn resolve_optional_user_id( +pub(super) async fn resolve_optional_user_id( headers: &axum::http::HeaderMap, db: &Db, config: &AppConfig, ) -> Result, PreviewRouteError> { super::super::auth::try_auth_from_headers(headers, db, config) + .await .map(|user| user.map(|row| row.user_id)) .map_err(|_| PreviewRouteError::unauthorized("invalid authorization token")) } @@ -35,7 +36,7 @@ pub(super) struct GitFetchAuthHeader { pub(super) source: GitCredentialSource, } -pub(super) fn resolve_fetch_auth_header( +pub(super) async fn resolve_fetch_auth_header( source: &GitSource, db: &Db, config: &AppConfig, @@ -54,16 +55,13 @@ pub(super) fn resolve_fetch_auth_header( .ok_or_else(|| PreviewRouteError::invalid_source("remote host is required"))? .to_ascii_lowercase(); let repo_path = repo_path_from_remote(&remote)?; - let conn = db.conn(); let gitlab_hosts = configured_gitlab_hosts(config); if let Some(provider) = provider_for_host(&host, &gitlab_hosts) { - let provider_token_enc: Option = sq_query_row( - &conn, - dbq::oauth_provider_tokens::get_by_user_provider_host(user_id, provider, &host), - |row| row.get(1), - ) - .ok(); + let provider_token_enc = db + .get_provider_token_enc(user_id, provider, &host) + .await + .map_err(|_| PreviewRouteError::fetch_failed("failed loading provider credential"))?; if let Some(enc) = provider_token_enc { let token = keyring.decrypt(&enc).map_err(|_| { PreviewRouteError::fetch_failed("failed to decrypt provider credential") @@ -76,32 +74,23 @@ pub(super) fn resolve_fetch_auth_header( } } - let manual_rows: Vec<(String, String, String, String)> = sq_query_map( - &conn, - dbq::git_credentials::list_for_host_with_secret(user_id, &host), - |row| { - Ok(( - row.get::<_, String>(0)?, - row.get::<_, String>(3)?, - row.get::<_, String>(4)?, - row.get::<_, String>(5)?, - )) - }, - ) - .map_err(|_| PreviewRouteError::fetch_failed("failed loading git credentials"))?; + let manual_rows = db + .list_git_credential_secrets_for_host(user_id, &host) + .await + .map_err(|_| PreviewRouteError::fetch_failed("failed loading git credentials"))?; - for (credential_id, path_prefix, header_name, secret_enc) in manual_rows { - if !path_prefix_matches(&repo_path, &path_prefix) { + for row in manual_rows { + if !path_prefix_matches(&repo_path, &row.path_prefix) { continue; } let secret = keyring - .decrypt(&secret_enc) + .decrypt(&row.header_value_enc) .map_err(|_| PreviewRouteError::fetch_failed("failed to decrypt git credential"))?; return Ok(Some(GitFetchAuthHeader { - header_name, + header_name: row.header_name, header_value: secret, source: GitCredentialSource::Manual { - credential_id, + credential_id: row.credential_id, user_id: user_id.to_string(), }, })); diff --git a/crates/server/src/routes/ingest/fetch.rs b/crates/server/src/routes/ingest/fetch.rs index c43daca3..cff85493 100644 --- a/crates/server/src/routes/ingest/fetch.rs +++ b/crates/server/src/routes/ingest/fetch.rs @@ -4,7 +4,7 @@ use opensession_api::parse_preview_source::GitSource; use reqwest::header::CONTENT_TYPE; use crate::AppConfig; -use crate::storage::{Db, sq_execute}; +use crate::storage::Db; use super::auth::{GitCredentialSource, GitFetchAuthHeader, resolve_fetch_auth_header}; use super::errors::PreviewRouteError; @@ -23,7 +23,7 @@ pub(super) async fn fetch_git_source( ensure_remote_resolves_public(&remote).await?; let gitlab_hosts = configured_gitlab_hosts(config); let url = build_git_raw_url(source, &gitlab_hosts)?; - let fetch_auth = resolve_fetch_auth_header(source, db, config, user_id)?; + let fetch_auth = resolve_fetch_auth_header(source, db, config, user_id).await?; let client = reqwest::Client::builder() .redirect(reqwest::redirect::Policy::none()) .timeout(Duration::from_secs(FETCH_TIMEOUT_SECS)) @@ -114,14 +114,9 @@ pub(super) async fn fetch_git_source( .. }) = fetch_auth { - let conn = db.conn(); - let _ = sq_execute( - &conn, - opensession_api::db::git_credentials::touch_last_used( - credential_id.as_str(), - user_id.as_str(), - ), - ); + let _ = db + .touch_git_credential_last_used(credential_id.as_str(), user_id.as_str()) + .await; } Ok(body.to_vec()) diff --git a/crates/server/src/routes/ingest/mod.rs b/crates/server/src/routes/ingest/mod.rs index 77b3df8f..a5d05085 100644 --- a/crates/server/src/routes/ingest/mod.rs +++ b/crates/server/src/routes/ingest/mod.rs @@ -37,8 +37,9 @@ pub async fn preview( headers: HeaderMap, Json(req): Json, ) -> Result, (StatusCode, Json)> { - let user_id = - resolve_optional_user_id(&headers, &db, &config).map_err(PreviewRouteError::into_http)?; + let user_id = resolve_optional_user_id(&headers, &db, &config) + .await + .map_err(PreviewRouteError::into_http)?; let input = prepare_parse_input_with_ctx(req.source, Some(&db), Some(&config), user_id.as_deref()) .await diff --git a/crates/server/src/routes/oauth.rs b/crates/server/src/routes/oauth.rs index 7018e3bd..6473cfe1 100644 --- a/crates/server/src/routes/oauth.rs +++ b/crates/server/src/routes/oauth.rs @@ -7,13 +7,13 @@ use axum::{ use uuid::Uuid; use opensession_api::{ - OAuthLinkResponse, crypto, db as dbq, + OAuthLinkResponse, crypto, oauth::{self, AuthProvidersResponse, OAuthProviderConfig, OAuthProviderInfo}, }; use super::auth::AuthUser; use crate::error::ApiErr; -use crate::storage::{Db, sq_execute, sq_query_row}; +use crate::storage::Db; use crate::{AppConfig, AppState}; // --------------------------------------------------------------------------- @@ -66,7 +66,7 @@ fn resolve_base_url(headers: &HeaderMap, fallback: &str, prefer_request_host: bo } } -fn maybe_store_provider_access_token( +async fn maybe_store_provider_access_token( db: &Db, config: &AppConfig, user_id: &str, @@ -79,18 +79,14 @@ fn maybe_store_provider_access_token( let provider_host = oauth_provider_host(provider)?; let encrypted = keyring.encrypt(access_token).map_err(ApiErr::from)?; let token_id = Uuid::new_v4().to_string(); - let conn = db.conn(); - sq_execute( - &conn, - dbq::oauth_provider_tokens::upsert_access_token( - &token_id, - user_id, - &provider.id, - &provider_host, - &encrypted, - None, - ), + db.upsert_oauth_provider_access_token( + &token_id, + user_id, + &provider.id, + &provider_host, + &encrypted, ) + .await .map_err(ApiErr::from_db("oauth provider token upsert"))?; Ok(()) } @@ -145,12 +141,9 @@ pub async fn redirect( .format("%Y-%m-%d %H:%M:%S") .to_string(); - let conn = db.conn(); - sq_execute( - &conn, - dbq::oauth::insert_state(&state, &provider_id, &expires_at, None), - ) - .map_err(ApiErr::from_db("oauth state insert"))?; + db.insert_oauth_state(&state, &provider_id, &expires_at, None) + .await + .map_err(ApiErr::from_db("oauth state insert"))?; let base_url = resolve_base_url(&headers, &config.base_url, config.oauth_use_request_host); let redirect_uri = format!("{}/api/auth/oauth/{}/callback", base_url, provider_id); @@ -183,32 +176,19 @@ pub async fn callback( .ok_or_else(|| ApiErr::bad_request("missing state parameter"))?; // Validate state (scope the MutexGuard so it's dropped before await) - let (_state_provider, linking_user_id) = { - let conn = db.conn(); - let state_row = sq_query_row(&conn, dbq::oauth::validate_state(state_param), |row| { - Ok(( - row.get::<_, String>(1)?, // provider - row.get::<_, String>(2)?, // expires_at - row.get::<_, Option>(3)?, // user_id - )) - }) + let state_row = db + .validate_oauth_state(state_param) + .await .map_err(|_| ApiErr::bad_request("invalid OAuth state"))?; - - let (sp, expires_at, lu) = state_row; - - if sp != provider_id { - return Err(ApiErr::bad_request("OAuth state provider mismatch")); - } - - let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string(); - if expires_at < now { - return Err(ApiErr::bad_request("OAuth state expired")); - } - - // Delete used state - sq_execute(&conn, dbq::oauth::delete_state(state_param)).ok(); - (sp, lu) - }; // conn dropped here, before any .await + if state_row.provider != provider_id { + return Err(ApiErr::bad_request("OAuth state provider mismatch")); + } + let now = chrono::Utc::now().format("%Y-%m-%d %H:%M:%S").to_string(); + if state_row.expires_at < now { + return Err(ApiErr::bad_request("OAuth state expired")); + } + db.delete_oauth_state(state_param).await.ok(); + let linking_user_id = state_row.user_id; // Exchange code for access token let redirect_uri = format!("{}/api/auth/oauth/{}/callback", base_url, provider_id); @@ -280,17 +260,13 @@ pub async fn callback( let user_info = oauth::extract_user_info(provider, &userinfo, emails.as_deref()).map_err(ApiErr::from)?; - let conn = db.conn(); - // ── Linking mode ── if let Some(ref link_uid) = linking_user_id { // Check if this provider identity is already linked to another account - let existing_user: Option = sq_query_row( - &conn, - dbq::oauth::find_by_provider(&provider_id, &user_info.provider_user_id), - |row| row.get(0), - ) - .ok(); + let existing_user = db + .find_oauth_user_id_by_provider(&provider_id, &user_info.provider_user_id) + .await + .map_err(ApiErr::from_db("oauth link lookup"))?; if let Some(ref existing) = existing_user { if existing != link_uid { @@ -302,19 +278,16 @@ pub async fn callback( } } - sq_execute( - &conn, - dbq::oauth::upsert_identity( - link_uid, - &provider_id, - &user_info.provider_user_id, - Some(&user_info.username), - user_info.avatar_url.as_deref(), - None, - ), + db.upsert_oauth_identity( + link_uid, + &provider_id, + &user_info.provider_user_id, + Some(&user_info.username), + user_info.avatar_url.as_deref(), ) + .await .map_err(ApiErr::from_db("oauth link upsert"))?; - maybe_store_provider_access_token(&db, &config, link_uid, provider, &access_token)?; + maybe_store_provider_access_token(&db, &config, link_uid, provider, &access_token).await?; return Ok( Redirect::temporary(&format!("{}/settings?oauth_linked=true", base_url)) @@ -325,57 +298,45 @@ pub async fn callback( // ── Normal login/register flow ── // Check if OAuth identity already exists - let existing_user_id: Option = sq_query_row( - &conn, - dbq::oauth::find_by_provider(&provider_id, &user_info.provider_user_id), - |row| row.get(0), - ) - .ok(); + let existing_user_id = db + .find_oauth_user_id_by_provider(&provider_id, &user_info.provider_user_id) + .await + .map_err(ApiErr::from_db("oauth identity lookup"))?; let (user_id, nickname) = if let Some(uid) = existing_user_id { // Update provider info - sq_execute( - &conn, - dbq::oauth::upsert_identity( - &uid, - &provider_id, - &user_info.provider_user_id, - Some(&user_info.username), - user_info.avatar_url.as_deref(), - None, - ), + db.upsert_oauth_identity( + &uid, + &provider_id, + &user_info.provider_user_id, + Some(&user_info.username), + user_info.avatar_url.as_deref(), ) + .await .ok(); - let nick: String = sq_query_row( - &conn, - dbq::users::get_by_id(&uid), - |row| row.get(1), // col 1 = nickname - ) - .unwrap_or_else(|_| user_info.username.clone()); + let nick = db + .get_user_nickname(&uid) + .await + .unwrap_or_else(|_| user_info.username.clone()); (uid, nick) } else { // Check if email matches existing user (auto-link) - let by_email = user_info.email.as_ref().and_then(|email| { - sq_query_row(&conn, dbq::users::get_by_email_for_login(email), |row| { - Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?)) - }) - .ok() - }); + let by_email = match user_info.email.as_deref() { + Some(email) => db.get_user_id_and_nickname_by_email(email).await.ok(), + None => None, + }; if let Some((uid, nick)) = by_email { - sq_execute( - &conn, - dbq::oauth::upsert_identity( - &uid, - &provider_id, - &user_info.provider_user_id, - Some(&user_info.username), - user_info.avatar_url.as_deref(), - None, - ), + db.upsert_oauth_identity( + &uid, + &provider_id, + &user_info.provider_user_id, + Some(&user_info.username), + user_info.avatar_url.as_deref(), ) + .await .ok(); (uid, nick) } else { @@ -384,33 +345,28 @@ pub async fn callback( let username = user_info.username.clone(); // OAuth users have no password — insert with email but empty hash/salt - sq_execute( - &conn, - dbq::users::insert_oauth(&user_id, &username, user_info.email.as_deref()), - ) - .map_err(ApiErr::from_db("create user from oauth"))?; - - sq_execute( - &conn, - dbq::oauth::upsert_identity( - &user_id, - &provider_id, - &user_info.provider_user_id, - Some(&user_info.username), - user_info.avatar_url.as_deref(), - None, - ), + db.insert_oauth_user(&user_id, &username, user_info.email.as_deref()) + .await + .map_err(ApiErr::from_db("create user from oauth"))?; + + db.upsert_oauth_identity( + &user_id, + &provider_id, + &user_info.provider_user_id, + Some(&user_info.username), + user_info.avatar_url.as_deref(), ) + .await .map_err(ApiErr::from_db("oauth identity insert"))?; (user_id, username) } }; - drop(conn); - maybe_store_provider_access_token(&db, &config, &user_id, provider, &access_token)?; + maybe_store_provider_access_token(&db, &config, &user_id, provider, &access_token).await?; // Issue tokens - let tokens = super::auth::issue_tokens_pub(&db, &config.jwt_secret, &user_id, &nickname)?; + let tokens = + super::auth::issue_tokens_pub(&db, &config.jwt_secret, &user_id, &nickname).await?; // Redirect to frontend without exposing tokens in URL fragments. let redirect_url = format!("{}/auth/callback", base_url); @@ -444,14 +400,10 @@ pub async fn link( } // Check if already linked - let conn = db.conn(); - let count: i64 = sq_query_row( - &conn, - dbq::oauth::has_provider(&user.user_id, &provider_id), - |row| row.get(0), - ) - .unwrap_or(0); - let already = count > 0; + let already = db + .user_has_oauth_provider(&user.user_id, &provider_id) + .await + .unwrap_or(false); if already { return Err(ApiErr::conflict(format!( "{} account already linked", @@ -464,11 +416,9 @@ pub async fn link( .format("%Y-%m-%d %H:%M:%S") .to_string(); - sq_execute( - &conn, - dbq::oauth::insert_state(&state, &provider_id, &expires_at, Some(&user.user_id)), - ) - .map_err(ApiErr::from_db("oauth state insert for link"))?; + db.insert_oauth_state(&state, &provider_id, &expires_at, Some(&user.user_id)) + .await + .map_err(ApiErr::from_db("oauth state insert for link"))?; let base_url = resolve_base_url(&headers, &config.base_url, config.oauth_use_request_host); let redirect_uri = format!("{}/api/auth/oauth/{}/callback", base_url, provider_id); diff --git a/crates/server/src/routes/sessions.rs b/crates/server/src/routes/sessions.rs index fa077e97..07c3b03b 100644 --- a/crates/server/src/routes/sessions.rs +++ b/crates/server/src/routes/sessions.rs @@ -6,14 +6,13 @@ use axum::{ }; use opensession_api::{ - LinkType, SessionDetail, SessionLink, SessionListQuery, SessionListResponse, - SessionRepoListResponse, SessionSummary, db, + SessionDetail, SessionListQuery, SessionListResponse, SessionRepoListResponse, }; use crate::AppConfig; use crate::error::ApiErr; use crate::routes::auth::AuthUser; -use crate::storage::{Db, session_from_row, sq_query_map, sq_query_row}; +use crate::storage::Db; const PUBLIC_LIST_CACHE_CONTROL: &str = "public, max-age=30, stale-while-revalidate=60"; @@ -61,24 +60,12 @@ pub async fn list_sessions( )); } - let built = db::sessions::list(&q); - let conn = db.conn(); - - // Count total - let total: i64 = sq_query_row(&conn, built.count_query, |row| row.get(0)) - .map_err(ApiErr::from_db("count sessions"))?; - - // Fetch page - let sessions: Vec = sq_query_map(&conn, built.select_query, session_from_row) + let payload: SessionListResponse = db + .list_sessions(&q) + .await .map_err(ApiErr::from_db("list sessions"))?; - let mut resp = Json(SessionListResponse { - sessions, - total, - page: built.page, - per_page: built.per_page, - }) - .into_response(); + let mut resp = Json(payload).into_response(); let has_session_cookie = headers .get(header::COOKIE) @@ -111,8 +98,9 @@ pub async fn list_session_repos( )); } - let conn = db.conn(); - let repos: Vec = sq_query_map(&conn, db::sessions::list_repo_names(), |row| row.get(0)) + let repos = db + .list_session_repos() + .await .map_err(ApiErr::from_db("list session repos"))?; Ok(Json(SessionRepoListResponse { repos })) @@ -127,34 +115,12 @@ pub async fn get_session( State(db): State, Path(id): Path, ) -> Result, ApiErr> { - let conn = db.conn(); - - let summary: SessionSummary = - sq_query_row(&conn, db::sessions::get_by_id(&id), session_from_row) - .map_err(|_| ApiErr::not_found("session not found"))?; - - // Fetch linked sessions - let linked_sessions: Vec = - sq_query_map(&conn, db::sessions::links_by_session(&id), |row| { - let lt: String = row.get(2)?; - Ok(SessionLink { - session_id: row.get(0)?, - linked_session_id: row.get(1)?, - link_type: match lt.as_str() { - "related" => LinkType::Related, - "parent" => LinkType::Parent, - "child" => LinkType::Child, - _ => LinkType::Handoff, - }, - created_at: row.get(3)?, - }) - }) - .map_err(ApiErr::from_db("query session_links"))?; - - Ok(Json(SessionDetail { - summary, - linked_sessions, - })) + let detail: SessionDetail = db + .get_session_detail(&id) + .await + .map_err(|_| ApiErr::not_found("session not found"))?; + + Ok(Json(detail)) } // --------------------------------------------------------------------------- @@ -166,17 +132,12 @@ pub async fn get_session_raw( State(db): State, Path(id): Path, ) -> Result { - let conn = db.conn(); - - let (body_storage_key, body_url): (String, Option) = - sq_query_row(&conn, db::sessions::get_storage_info(&id), |row| { - Ok((row.get(0)?, row.get(1)?)) - }) + let info = db + .get_session_storage_info(&id) + .await .map_err(|_| ApiErr::not_found("session not found"))?; - drop(conn); - - match resolve_raw_body_source(body_storage_key, body_url)? { + match resolve_raw_body_source(info.body_storage_key, info.body_url)? { RawBodySource::RedirectUrl(url) => { let location = HeaderValue::from_str(&url) .map_err(|_| ApiErr::internal("invalid session body URL"))?; @@ -185,7 +146,7 @@ pub async fn get_session_raw( Ok(response) } RawBodySource::LocalStorage(storage_key) => { - let body = db.read_body(&storage_key).map_err(|e| { + let body = db.read_body(&storage_key).await.map_err(|e| { tracing::error!("read body: {e}"); ApiErr::internal("failed to read session body") })?; diff --git a/crates/server/src/storage.rs b/crates/server/src/storage.rs index 70ea926b..fddd0eca 100644 --- a/crates/server/src/storage.rs +++ b/crates/server/src/storage.rs @@ -1,52 +1,888 @@ use anyhow::{Context, Result}; use rusqlite::Connection; +use std::fmt; use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; -use opensession_api::SessionSummary; -use opensession_api::db; +use opensession_api::{ + GitCredentialSummary, LinkType, SessionDetail, SessionLink, SessionListQuery, + SessionListResponse, SessionSummary, db, oauth, +}; -/// Shared database state +/// Shared database state. #[derive(Clone)] pub struct Db { conn: Arc>, data_dir: PathBuf, } +#[derive(Debug)] +pub enum StorageError { + Sqlite(rusqlite::Error), + Join(tokio::task::JoinError), + Poisoned, +} + +impl StorageError { + pub fn is_constraint_violation(&self) -> bool { + matches!( + self, + Self::Sqlite(rusqlite::Error::SqliteFailure(err, _)) + if err.code == rusqlite::ErrorCode::ConstraintViolation + ) + } +} + +impl fmt::Display for StorageError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Sqlite(err) => write!(f, "{err}"), + Self::Join(err) => write!(f, "database worker join failed: {err}"), + Self::Poisoned => write!(f, "database mutex poisoned"), + } + } +} + +impl std::error::Error for StorageError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Self::Sqlite(err) => Some(err), + Self::Join(err) => Some(err), + Self::Poisoned => None, + } + } +} + +impl From for StorageError { + fn from(value: rusqlite::Error) -> Self { + Self::Sqlite(value) + } +} + +#[derive(Debug, Clone)] +pub struct AuthUserRecord { + pub user_id: String, + pub nickname: String, + pub email: Option, +} + +#[derive(Debug, Clone)] +pub struct LoginUserRecord { + pub user_id: String, + pub nickname: String, + pub password_hash: Option, + pub password_salt: Option, +} + +#[derive(Debug, Clone)] +pub struct RefreshTokenRecord { + pub token_id: String, + pub user_id: String, + pub expires_at: String, + pub nickname: String, +} + +#[derive(Debug, Clone)] +pub struct PasswordFields { + pub password_hash: Option, + pub password_salt: Option, +} + +#[derive(Debug)] +pub struct UserSettingsData { + pub email: Option, + pub avatar_url: Option, + pub oauth_providers: Vec, + pub created_at: String, +} + +#[derive(Debug, Clone)] +pub struct SessionStorageInfo { + pub body_storage_key: String, + pub body_url: Option, +} + +#[derive(Debug, Clone)] +pub struct OAuthStateRecord { + pub provider: String, + pub expires_at: String, + pub user_id: Option, +} + +#[derive(Debug, Clone)] +pub struct GitCredentialSecretRecord { + pub credential_id: String, + pub path_prefix: String, + pub header_name: String, + pub header_value_enc: String, +} + +#[derive(Debug, Clone)] +pub struct NewGitCredentialRecord { + pub id: String, + pub user_id: String, + pub label: String, + pub host: String, + pub path_prefix: String, + pub header_name: String, + pub header_value_enc: String, +} + impl Db { - pub fn conn(&self) -> std::sync::MutexGuard<'_, Connection> { - self.conn.lock().expect("database mutex poisoned") + async fn with_conn(&self, op: F) -> std::result::Result + where + T: Send + 'static, + F: FnOnce(&Connection) -> rusqlite::Result + Send + 'static, + { + let conn = Arc::clone(&self.conn); + tokio::task::spawn_blocking(move || { + let conn = conn.lock().map_err(|_| StorageError::Poisoned)?; + op(&conn).map_err(StorageError::from) + }) + .await + .map_err(StorageError::Join)? } - /// Path to the session body storage directory - pub fn bodies_dir(&self) -> PathBuf { + fn bodies_dir(&self) -> PathBuf { self.data_dir.join("bodies") } - /// Write a session body as HAIL JSONL to disk, return the storage key - pub fn write_body(&self, session_id: &str, data: &[u8]) -> Result { + /// Write a session body as HAIL JSONL to disk, return the storage key. + pub async fn write_body(&self, session_id: &str, data: &[u8]) -> Result { let dir = self.bodies_dir(); - std::fs::create_dir_all(&dir)?; + tokio::fs::create_dir_all(&dir).await?; let key = format!("{session_id}.hail.jsonl"); let path = dir.join(&key); - std::fs::write(&path, data).context("writing session body")?; + tokio::fs::write(&path, data) + .await + .context("writing session body")?; Ok(key) } - /// Read a session body from disk - pub fn read_body(&self, storage_key: &str) -> Result> { + /// Read a session body from disk. + pub async fn read_body(&self, storage_key: &str) -> Result> { let path = self.bodies_dir().join(storage_key); - std::fs::read(&path).context("reading session body") + tokio::fs::read(&path).await.context("reading session body") + } + + pub async fn list_sessions( + &self, + query: &SessionListQuery, + ) -> std::result::Result { + let query = SessionListQuery { + page: query.page, + per_page: query.per_page, + search: query.search.clone(), + tool: query.tool.clone(), + git_repo_name: query.git_repo_name.clone(), + sort: query.sort.clone(), + time_range: query.time_range.clone(), + }; + self.with_conn(move |conn| { + let built = db::sessions::list(&query); + let total: i64 = sq_query_row(conn, built.count_query, |row| row.get(0))?; + let sessions = sq_query_map(conn, built.select_query, session_from_row)?; + Ok(SessionListResponse { + sessions, + total, + page: built.page, + per_page: built.per_page, + }) + }) + .await + } + + pub async fn list_session_repos(&self) -> std::result::Result, StorageError> { + self.with_conn(move |conn| { + sq_query_map(conn, db::sessions::list_repo_names(), |row| row.get(0)) + }) + .await + } + + pub async fn get_session_detail( + &self, + id: &str, + ) -> std::result::Result { + let id = id.to_string(); + self.with_conn(move |conn| { + let summary = sq_query_row(conn, db::sessions::get_by_id(&id), session_from_row)?; + let linked_sessions = sq_query_map(conn, db::sessions::links_by_session(&id), |row| { + let link_type: String = row.get(2)?; + Ok(SessionLink { + session_id: row.get(0)?, + linked_session_id: row.get(1)?, + link_type: match link_type.as_str() { + "related" => LinkType::Related, + "parent" => LinkType::Parent, + "child" => LinkType::Child, + _ => LinkType::Handoff, + }, + created_at: row.get(3)?, + }) + })?; + Ok(SessionDetail { + summary, + linked_sessions, + }) + }) + .await + } + + pub async fn get_session_storage_info( + &self, + id: &str, + ) -> std::result::Result { + let id = id.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::sessions::get_storage_info(&id), |row| { + Ok(SessionStorageInfo { + body_storage_key: row.get(0)?, + body_url: row.get(1)?, + }) + }) + }) + .await + } + + pub async fn delete_session(&self, id: &str) -> std::result::Result { + let id = id.to_string(); + self.with_conn(move |conn| { + let exists = match sq_query_row(conn, db::sessions::get_by_id(&id), |_row| Ok(())) { + Ok(()) => true, + Err(rusqlite::Error::QueryReturnedNoRows) => false, + Err(err) => return Err(err), + }; + if !exists { + return Ok(false); + } + + sq_execute(conn, db::sessions::delete_links(&id))?; + sq_execute(conn, db::sessions::delete(&id))?; + let _ = sq_execute(conn, db::sessions::delete_fts(&id)); + Ok(true) + }) + .await + } + + pub async fn get_auth_user_by_api_key_hash( + &self, + key_hash: &str, + ) -> std::result::Result { + let key_hash = key_hash.to_string(); + self.with_conn(move |conn| { + sq_query_row( + conn, + db::api_keys::get_user_by_valid_key_hash(&key_hash), + |row| { + Ok(AuthUserRecord { + user_id: row.get(0)?, + nickname: row.get(1)?, + email: row.get(2)?, + }) + }, + ) + }) + .await + } + + pub async fn get_auth_user_by_id( + &self, + user_id: &str, + ) -> std::result::Result { + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::users::get_by_id(&user_id), |row| { + Ok(AuthUserRecord { + user_id: row.get(0)?, + nickname: row.get(1)?, + email: row.get(2)?, + }) + }) + }) + .await + } + + pub async fn insert_refresh_token( + &self, + token_id: &str, + user_id: &str, + token_hash: &str, + expires_at: &str, + ) -> std::result::Result<(), StorageError> { + let token_id = token_id.to_string(); + let user_id = user_id.to_string(); + let token_hash = token_hash.to_string(); + let expires_at = expires_at.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::users::insert_refresh_token(&token_id, &user_id, &token_hash, &expires_at), + )?; + Ok(()) + }) + .await + } + + pub async fn email_exists(&self, email: &str) -> std::result::Result { + let email = email.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::users::email_exists(&email), |row| row.get(0)) + }) + .await + } + + pub async fn insert_user_with_email( + &self, + user_id: &str, + nickname: &str, + email: &str, + password_hash: &str, + password_salt: &str, + ) -> std::result::Result<(), StorageError> { + let user_id = user_id.to_string(); + let nickname = nickname.to_string(); + let email = email.to_string(); + let password_hash = password_hash.to_string(); + let password_salt = password_salt.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::users::insert_with_email( + &user_id, + &nickname, + &email, + &password_hash, + &password_salt, + ), + )?; + Ok(()) + }) + .await + } + + pub async fn get_login_user( + &self, + email: &str, + ) -> std::result::Result { + let email = email.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::users::get_by_email_for_login(&email), |row| { + Ok(LoginUserRecord { + user_id: row.get(0)?, + nickname: row.get(1)?, + password_hash: row.get(2)?, + password_salt: row.get(3)?, + }) + }) + }) + .await + } + + pub async fn get_user_id_and_nickname_by_email( + &self, + email: &str, + ) -> std::result::Result<(String, String), StorageError> { + let email = email.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::users::get_by_email_for_login(&email), |row| { + Ok((row.get(0)?, row.get(1)?)) + }) + }) + .await + } + + pub async fn lookup_refresh_token( + &self, + token_hash: &str, + ) -> std::result::Result { + let token_hash = token_hash.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::users::lookup_refresh_token(&token_hash), |row| { + Ok(RefreshTokenRecord { + token_id: row.get(0)?, + user_id: row.get(1)?, + expires_at: row.get(2)?, + nickname: row.get(3)?, + }) + }) + }) + .await + } + + pub async fn delete_refresh_token_by_id( + &self, + token_id: &str, + ) -> std::result::Result<(), StorageError> { + let token_id = token_id.to_string(); + self.with_conn(move |conn| { + sq_execute(conn, db::users::delete_refresh_token_by_id(&token_id))?; + Ok(()) + }) + .await + } + + pub async fn delete_refresh_token( + &self, + token_hash: &str, + ) -> std::result::Result<(), StorageError> { + let token_hash = token_hash.to_string(); + self.with_conn(move |conn| { + sq_execute(conn, db::users::delete_refresh_token(&token_hash))?; + Ok(()) + }) + .await + } + + pub async fn get_password_fields( + &self, + user_id: &str, + ) -> std::result::Result { + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::users::get_password_fields(&user_id), |row| { + Ok(PasswordFields { + password_hash: row.get(0)?, + password_salt: row.get(1)?, + }) + }) + }) + .await + } + + pub async fn update_password( + &self, + user_id: &str, + password_hash: &str, + password_salt: &str, + ) -> std::result::Result<(), StorageError> { + let user_id = user_id.to_string(); + let password_hash = password_hash.to_string(); + let password_salt = password_salt.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::users::update_password(&user_id, &password_hash, &password_salt), + )?; + Ok(()) + }) + .await + } + + pub async fn get_user_settings_data( + &self, + user_id: &str, + ) -> std::result::Result { + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + let (email, avatar_url) = + sq_query_row(conn, db::users::get_email_avatar(&user_id), |row| { + Ok((row.get(0)?, row.get(1)?)) + })?; + + let oauth_providers = sq_query_map(conn, db::oauth::find_by_user(&user_id), |row| { + let provider: String = row.get(1)?; + Ok(oauth::LinkedProvider { + provider: provider.clone(), + provider_username: row.get::<_, Option>(3)?.unwrap_or_default(), + display_name: match provider.as_str() { + "github" => "GitHub".to_string(), + "gitlab" => "GitLab".to_string(), + other => other.to_string(), + }, + }) + })?; + + let created_at = sq_query_row(conn, db::users::get_settings_fields(&user_id), |row| { + row.get(0) + }) + .unwrap_or_default(); + + Ok(UserSettingsData { + email, + avatar_url, + oauth_providers, + created_at, + }) + }) + .await + } + + pub async fn move_active_api_keys_to_grace( + &self, + user_id: &str, + grace_until: &str, + ) -> std::result::Result<(), StorageError> { + let user_id = user_id.to_string(); + let grace_until = grace_until.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::api_keys::move_active_to_grace(&user_id, &grace_until), + )?; + Ok(()) + }) + .await + } + + pub async fn insert_active_api_key( + &self, + key_id: &str, + user_id: &str, + key_hash: &str, + key_prefix: &str, + ) -> std::result::Result<(), StorageError> { + let key_id = key_id.to_string(); + let user_id = user_id.to_string(); + let key_hash = key_hash.to_string(); + let key_prefix = key_prefix.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::api_keys::insert_active(&key_id, &user_id, &key_hash, &key_prefix), + )?; + Ok(()) + }) + .await + } + + pub async fn list_git_credentials( + &self, + user_id: &str, + ) -> std::result::Result, StorageError> { + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + sq_query_map(conn, db::git_credentials::list_by_user(&user_id), |row| { + Ok(GitCredentialSummary { + id: row.get(0)?, + label: row.get(1)?, + host: row.get(2)?, + path_prefix: row.get(3)?, + header_name: row.get(4)?, + created_at: row.get(5)?, + updated_at: row.get(6)?, + last_used_at: row.get(7)?, + }) + }) + }) + .await + } + + pub async fn insert_git_credential( + &self, + record: NewGitCredentialRecord, + ) -> std::result::Result<(), StorageError> { + self.with_conn(move |conn| { + sq_execute( + conn, + db::git_credentials::insert( + &record.id, + &record.user_id, + &record.label, + &record.host, + &record.path_prefix, + &record.header_name, + &record.header_value_enc, + ), + )?; + Ok(()) + }) + .await + } + + pub async fn get_git_credential_by_id_and_user( + &self, + id: &str, + user_id: &str, + ) -> std::result::Result { + let id = id.to_string(); + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + sq_query_row( + conn, + db::git_credentials::get_by_id_and_user(&id, &user_id), + |row| { + Ok(GitCredentialSummary { + id: row.get(0)?, + label: row.get(1)?, + host: row.get(2)?, + path_prefix: row.get(3)?, + header_name: row.get(4)?, + created_at: row.get(5)?, + updated_at: row.get(6)?, + last_used_at: row.get(7)?, + }) + }, + ) + }) + .await + } + + pub async fn delete_git_credential_by_id_and_user( + &self, + id: &str, + user_id: &str, + ) -> std::result::Result { + let id = id.to_string(); + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::git_credentials::delete_by_id_and_user(&id, &user_id), + ) + }) + .await + } + + pub async fn upsert_oauth_provider_access_token( + &self, + token_id: &str, + user_id: &str, + provider: &str, + provider_host: &str, + encrypted_token: &str, + ) -> std::result::Result<(), StorageError> { + let token_id = token_id.to_string(); + let user_id = user_id.to_string(); + let provider = provider.to_string(); + let provider_host = provider_host.to_string(); + let encrypted_token = encrypted_token.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::oauth_provider_tokens::upsert_access_token( + &token_id, + &user_id, + &provider, + &provider_host, + &encrypted_token, + None, + ), + )?; + Ok(()) + }) + .await + } + + pub async fn insert_oauth_state( + &self, + state: &str, + provider: &str, + expires_at: &str, + user_id: Option<&str>, + ) -> std::result::Result<(), StorageError> { + let state = state.to_string(); + let provider = provider.to_string(); + let expires_at = expires_at.to_string(); + let user_id = user_id.map(ToOwned::to_owned); + self.with_conn(move |conn| { + sq_execute( + conn, + db::oauth::insert_state(&state, &provider, &expires_at, user_id.as_deref()), + )?; + Ok(()) + }) + .await + } + + pub async fn validate_oauth_state( + &self, + state: &str, + ) -> std::result::Result { + let state = state.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::oauth::validate_state(&state), |row| { + Ok(OAuthStateRecord { + provider: row.get(1)?, + expires_at: row.get(2)?, + user_id: row.get(3)?, + }) + }) + }) + .await + } + + pub async fn delete_oauth_state(&self, state: &str) -> std::result::Result<(), StorageError> { + let state = state.to_string(); + self.with_conn(move |conn| { + sq_execute(conn, db::oauth::delete_state(&state))?; + Ok(()) + }) + .await + } + + pub async fn find_oauth_user_id_by_provider( + &self, + provider: &str, + provider_user_id: &str, + ) -> std::result::Result, StorageError> { + let provider = provider.to_string(); + let provider_user_id = provider_user_id.to_string(); + self.with_conn(move |conn| { + match sq_query_row( + conn, + db::oauth::find_by_provider(&provider, &provider_user_id), + |row| row.get(0), + ) { + Ok(user_id) => Ok(Some(user_id)), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), + Err(err) => Err(err), + } + }) + .await + } + + pub async fn upsert_oauth_identity( + &self, + user_id: &str, + provider: &str, + provider_user_id: &str, + provider_username: Option<&str>, + avatar_url: Option<&str>, + ) -> std::result::Result<(), StorageError> { + let user_id = user_id.to_string(); + let provider = provider.to_string(); + let provider_user_id = provider_user_id.to_string(); + let provider_username = provider_username.map(ToOwned::to_owned); + let avatar_url = avatar_url.map(ToOwned::to_owned); + self.with_conn(move |conn| { + sq_execute( + conn, + db::oauth::upsert_identity( + &user_id, + &provider, + &provider_user_id, + provider_username.as_deref(), + avatar_url.as_deref(), + None, + ), + )?; + Ok(()) + }) + .await + } + + pub async fn get_user_nickname( + &self, + user_id: &str, + ) -> std::result::Result { + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + sq_query_row(conn, db::users::get_nickname(&user_id), |row| row.get(0)) + }) + .await + } + + pub async fn insert_oauth_user( + &self, + user_id: &str, + nickname: &str, + email: Option<&str>, + ) -> std::result::Result<(), StorageError> { + let user_id = user_id.to_string(); + let nickname = nickname.to_string(); + let email = email.map(ToOwned::to_owned); + self.with_conn(move |conn| { + sq_execute( + conn, + db::users::insert_oauth(&user_id, &nickname, email.as_deref()), + )?; + Ok(()) + }) + .await + } + + pub async fn user_has_oauth_provider( + &self, + user_id: &str, + provider: &str, + ) -> std::result::Result { + let user_id = user_id.to_string(); + let provider = provider.to_string(); + self.with_conn(move |conn| { + let count: i64 = + sq_query_row(conn, db::oauth::has_provider(&user_id, &provider), |row| { + row.get(0) + })?; + Ok(count > 0) + }) + .await + } + + pub async fn get_provider_token_enc( + &self, + user_id: &str, + provider: &str, + provider_host: &str, + ) -> std::result::Result, StorageError> { + let user_id = user_id.to_string(); + let provider = provider.to_string(); + let provider_host = provider_host.to_string(); + self.with_conn(move |conn| { + match sq_query_row( + conn, + db::oauth_provider_tokens::get_by_user_provider_host( + &user_id, + &provider, + &provider_host, + ), + |row| row.get(1), + ) { + Ok(token) => Ok(Some(token)), + Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None), + Err(err) => Err(err), + } + }) + .await + } + + pub async fn list_git_credential_secrets_for_host( + &self, + user_id: &str, + host: &str, + ) -> std::result::Result, StorageError> { + let user_id = user_id.to_string(); + let host = host.to_string(); + self.with_conn(move |conn| { + sq_query_map( + conn, + db::git_credentials::list_for_host_with_secret(&user_id, &host), + |row| { + Ok(GitCredentialSecretRecord { + credential_id: row.get(0)?, + path_prefix: row.get(3)?, + header_name: row.get(4)?, + header_value_enc: row.get(5)?, + }) + }, + ) + }) + .await + } + + pub async fn touch_git_credential_last_used( + &self, + credential_id: &str, + user_id: &str, + ) -> std::result::Result<(), StorageError> { + let credential_id = credential_id.to_string(); + let user_id = user_id.to_string(); + self.with_conn(move |conn| { + sq_execute( + conn, + db::git_credentials::touch_last_used(&credential_id, &user_id), + )?; + Ok(()) + }) + .await } } // ── sea-query ↔ rusqlite helpers ────────────────────────────────────────── /// Built query: `(sql, sea_query::Values)`. -pub type Built = (String, sea_query::Values); +type Built = (String, sea_query::Values); -/// Convert `sea_query::Values` to boxed rusqlite params. -pub fn sq_params(values: &sea_query::Values) -> Vec> { +fn sq_params(values: &sea_query::Values) -> Vec> { values .0 .iter() @@ -71,29 +907,26 @@ pub fn sq_params(values: &sea_query::Values) -> Vec rusqlite::Result { +fn sq_execute(conn: &Connection, (sql, values): Built) -> rusqlite::Result { let params = sq_params(&values); let refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect(); conn.execute(&sql, refs.as_slice()) } -/// Query a single row from a built query. -pub fn sq_query_row( +fn sq_query_row( conn: &Connection, (sql, values): Built, - f: impl FnOnce(&rusqlite::Row) -> rusqlite::Result, + f: impl FnOnce(&rusqlite::Row<'_>) -> rusqlite::Result, ) -> rusqlite::Result { let params = sq_params(&values); let refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect(); conn.query_row(&sql, refs.as_slice(), f) } -/// Prepare + query_map from a built query, collecting into a Vec. -pub fn sq_query_map( +fn sq_query_map( conn: &Connection, (sql, values): Built, - f: impl FnMut(&rusqlite::Row) -> rusqlite::Result, + f: impl FnMut(&rusqlite::Row<'_>) -> rusqlite::Result, ) -> rusqlite::Result> { let params = sq_params(&values); let refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect(); @@ -102,10 +935,7 @@ pub fn sq_query_map( rows.collect() } -// ── Row mappers ─────────────────────────────────────────────────────────── - -/// Map a `session_columns()` row into a `SessionSummary`. -pub fn session_from_row(row: &rusqlite::Row<'_>) -> rusqlite::Result { +fn session_from_row(row: &rusqlite::Row<'_>) -> rusqlite::Result { Ok(SessionSummary { id: row.get(0)?, user_id: row.get(1)?, @@ -144,13 +974,12 @@ pub fn session_from_row(row: &rusqlite::Row<'_>) -> rusqlite::Result Result { std::fs::create_dir_all(data_dir)?; let db_path = data_dir.join("opensession.db"); let conn = Connection::open(&db_path).context("opening SQLite database")?; - // Enable WAL mode for better concurrent read performance conn.execute_batch("PRAGMA journal_mode=WAL;")?; conn.execute_batch("PRAGMA foreign_keys=ON;")?; @@ -195,7 +1024,6 @@ fn run_migrations(conn: &Connection) -> Result<()> { rebuild_oauth_provider_tokens_table(conn)?; } - // Bootstrap schema is single-file; ensure newer tables also exist for already-initialized DBs. conn.execute_batch( r#" CREATE TABLE IF NOT EXISTS git_credentials ( @@ -271,3 +1099,114 @@ ON oauth_provider_tokens(user_id, provider, provider_host); ) .context("rebuild oauth_provider_tokens table") } + +#[cfg(test)] +mod tests { + use super::*; + + fn test_data_dir(name: &str) -> PathBuf { + let path = std::env::temp_dir().join(format!( + "opensession-server-{name}-{}", + uuid::Uuid::new_v4() + )); + std::fs::create_dir_all(&path).expect("create temp data dir"); + path + } + + fn cleanup_dir(path: &Path) { + std::fs::remove_dir_all(path).expect("remove temp data dir"); + } + + fn insert_test_user(db: &Db, user_id: &str, nickname: &str) { + let conn = db.conn.lock().expect("db conn"); + sq_execute( + &conn, + db::users::insert_oauth(user_id, nickname, Some("test@example.com")), + ) + .expect("insert test user"); + } + + fn insert_test_session(db: &Db, session_id: &str, user_id: &str, storage_key: &str) { + let conn = db.conn.lock().expect("db conn"); + let params = db::sessions::InsertParams { + id: session_id, + user_id, + team_id: "team-1", + tool: "codex", + agent_provider: "openai", + agent_model: "gpt-5", + title: "Test Session", + description: "Description", + tags: "test", + created_at: "2026-03-09 12:00:00", + message_count: 1, + task_count: 1, + event_count: 1, + duration_seconds: 1, + total_input_tokens: 1, + total_output_tokens: 1, + body_storage_key: storage_key, + body_url: None, + git_remote: Some("https://github.com/hwisu/opensession"), + git_branch: Some("main"), + git_commit: Some("abc123"), + git_repo_name: Some("opensession"), + pr_number: None, + pr_url: None, + working_directory: Some("/tmp"), + files_modified: Some("src/lib.rs"), + files_read: Some("src/main.rs"), + has_errors: false, + max_active_agents: 1, + session_score: 42, + score_plugin: "default", + }; + sq_execute(&conn, db::sessions::insert(¶ms)).expect("insert test session"); + } + + #[tokio::test] + async fn body_round_trip_uses_async_fs() { + let data_dir = test_data_dir("body-round-trip"); + let db = init_db(&data_dir).expect("init db"); + + let key = db + .write_body("session-1", b"{\"type\":\"message\"}\n") + .await + .expect("write body"); + let body = db.read_body(&key).await.expect("read body"); + + assert_eq!(key, "session-1.hail.jsonl"); + assert_eq!(body, b"{\"type\":\"message\"}\n"); + + cleanup_dir(&data_dir); + } + + #[tokio::test] + async fn concurrent_session_reads_are_serialized_inside_storage() { + let data_dir = test_data_dir("concurrent-session-reads"); + let db = init_db(&data_dir).expect("init db"); + insert_test_user(&db, "user-1", "tester"); + insert_test_session(&db, "session-1", "user-1", "session-1.hail.jsonl"); + + let query = SessionListQuery { + page: 1, + per_page: 20, + search: None, + tool: None, + git_repo_name: None, + sort: None, + time_range: None, + }; + + let (list_result, detail_result) = + tokio::join!(db.list_sessions(&query), db.get_session_detail("session-1")); + + let list_result = list_result.expect("list sessions"); + let detail_result = detail_result.expect("session detail"); + assert_eq!(list_result.total, 1); + assert_eq!(list_result.sessions.len(), 1); + assert_eq!(detail_result.summary.id, "session-1"); + + cleanup_dir(&data_dir); + } +} diff --git a/crates/summary-runtime/Cargo.toml b/crates/summary-runtime/Cargo.toml new file mode 100644 index 00000000..d66efd30 --- /dev/null +++ b/crates/summary-runtime/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "opensession-summary-runtime" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true +description = "Runtime adapters for OpenSession semantic summaries" + +[lib] +doctest = false + +[lints] +workspace = true + +[dependencies] +opensession-summary = { workspace = true } +opensession-runtime-config = { workspace = true } +opensession-core = { workspace = true } +serde = { workspace = true } +reqwest = { workspace = true } + +[dev-dependencies] +tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } diff --git a/crates/summary-runtime/src/lib.rs b/crates/summary-runtime/src/lib.rs new file mode 100644 index 00000000..39cfb892 --- /dev/null +++ b/crates/summary-runtime/src/lib.rs @@ -0,0 +1,74 @@ +mod provider; + +pub use provider::{ + LocalSummaryProfile, detect_local_summary_profile, generate_summary, generate_text, +}; + +use opensession_core::trace::Session; +use opensession_runtime_config::SummarySettings; +use opensession_summary::git::{GitSummaryContext, GitSummaryService, ShellGitCommandRunner}; +use opensession_summary::{ + GitSummaryRequest, SemanticSummaryArtifact, classify_and_summarize_git_context, + summarize_session_with_provider, +}; +use std::path::Path; + +fn runtime_generate_summary<'a>( + settings: &'a SummarySettings, + prompt: &'a str, +) -> opensession_summary::SummaryGenerateFuture<'a> { + Box::pin(provider::generate_summary(settings, prompt)) +} + +pub async fn summarize_session( + session: &Session, + settings: &SummarySettings, + git_request: Option<&GitSummaryRequest>, +) -> Result { + let git_context = if settings.allows_git_changes_fallback() { + git_request.and_then(collect_git_context) + } else { + None + }; + + summarize_session_with_provider(session, settings, git_context, runtime_generate_summary).await +} + +pub async fn summarize_git_commit( + repo_root: &Path, + commit: &str, + settings: &SummarySettings, +) -> Result { + let request = GitSummaryRequest::from_commit(repo_root.to_path_buf(), commit.to_string()); + let context = collect_git_context(&request) + .ok_or_else(|| format!("unable to collect git summary context for commit `{commit}`"))?; + classify_and_summarize_git_context(context, settings, runtime_generate_summary).await +} + +pub async fn summarize_git_working_tree( + repo_root: &Path, + settings: &SummarySettings, +) -> Result { + let request = GitSummaryRequest::working_tree(repo_root.to_path_buf()); + let context = collect_git_context(&request) + .ok_or_else(|| "unable to collect git summary context for working tree".to_string())?; + classify_and_summarize_git_context(context, settings, runtime_generate_summary).await +} + +fn collect_git_context(request: &GitSummaryRequest) -> Option { + let service = GitSummaryService::new(ShellGitCommandRunner); + if let Some(commit) = request.commit.as_deref() { + return service.collect_commit_context( + &request.repo_root, + commit, + opensession_summary::MAX_FILE_CHANGE_ENTRIES, + opensession_summary::classify_arch_layer, + ); + } + + service.collect_working_tree_context( + &request.repo_root, + opensession_summary::MAX_FILE_CHANGE_ENTRIES, + opensession_summary::classify_arch_layer, + ) +} diff --git a/crates/summary-runtime/src/provider.rs b/crates/summary-runtime/src/provider.rs new file mode 100644 index 00000000..13aff3a8 --- /dev/null +++ b/crates/summary-runtime/src/provider.rs @@ -0,0 +1,376 @@ +use opensession_runtime_config::{SummaryProvider, SummarySettings}; +use opensession_summary::{SemanticSummary, parse_semantic_summary_or_fallback}; +use serde::{Deserialize, Serialize}; +use std::fs; +use std::path::PathBuf; +use std::process::{Command, Output, Stdio}; +use std::thread; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; + +const DEFAULT_OLLAMA_ENDPOINT: &str = "http://127.0.0.1:11434"; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct LocalSummaryProfile { + pub provider: SummaryProvider, + pub endpoint: String, + pub model: String, +} + +pub fn detect_local_summary_profile() -> Option { + first_available_profile([ + detect_ollama_profile(), + detect_codex_exec_profile(), + detect_claude_cli_profile(), + ]) +} + +fn first_available_profile( + profiles: [Option; N], +) -> Option { + profiles.into_iter().flatten().next() +} + +fn detect_ollama_profile() -> Option { + let output = Command::new("ollama").arg("list").output().ok()?; + if !output.status.success() { + return None; + } + let stdout = String::from_utf8_lossy(&output.stdout); + let model = parse_ollama_list_output(&stdout).into_iter().next()?; + Some(LocalSummaryProfile { + provider: SummaryProvider::Ollama, + endpoint: DEFAULT_OLLAMA_ENDPOINT.to_string(), + model, + }) +} + +fn detect_codex_exec_profile() -> Option { + if !command_available("codex", &["exec", "--help"]) { + return None; + } + Some(LocalSummaryProfile { + provider: SummaryProvider::CodexExec, + endpoint: String::new(), + model: String::new(), + }) +} + +fn detect_claude_cli_profile() -> Option { + if !command_available("claude", &["--help"]) { + return None; + } + Some(LocalSummaryProfile { + provider: SummaryProvider::ClaudeCli, + endpoint: String::new(), + model: String::new(), + }) +} + +fn command_available(program: &str, args: &[&str]) -> bool { + Command::new(program) + .args(args) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .map(|status| status.success()) + .unwrap_or(false) +} + +fn parse_ollama_list_output(raw: &str) -> Vec { + let mut models = Vec::new(); + for line in raw.lines() { + let trimmed = line.trim(); + if trimmed.is_empty() { + continue; + } + let lowered = trimmed.to_ascii_lowercase(); + if lowered.starts_with("name ") + || lowered.starts_with("error") + || lowered.starts_with("failed") + { + continue; + } + let Some(token) = trimmed.split_whitespace().next() else { + continue; + }; + let candidate = token.trim().to_string(); + if candidate.is_empty() || models.contains(&candidate) { + continue; + } + models.push(candidate); + } + models +} + +#[derive(Debug, Serialize)] +struct OllamaGenerateRequest<'a> { + model: &'a str, + prompt: &'a str, + stream: bool, +} + +#[derive(Debug, Deserialize)] +struct OllamaGenerateResponse { + response: String, +} + +pub async fn generate_summary( + settings: &SummarySettings, + prompt: &str, +) -> Result { + let raw = generate_text(settings, prompt).await?; + Ok(parse_semantic_summary_or_fallback(&raw, settings)) +} + +pub async fn generate_text(settings: &SummarySettings, prompt: &str) -> Result { + if prompt.trim().is_empty() { + return Err("summary prompt is empty".to_string()); + } + if !settings.is_configured() { + return Err("local summary provider is not configured".to_string()); + } + + match settings.provider.id { + SummaryProvider::Disabled => Err("local summary provider is disabled".to_string()), + SummaryProvider::Ollama => generate_text_with_ollama(settings, prompt).await, + SummaryProvider::CodexExec => generate_text_with_codex_exec(settings, prompt).await, + SummaryProvider::ClaudeCli => generate_text_with_claude_cli(settings, prompt).await, + } +} + +async fn generate_text_with_ollama( + settings: &SummarySettings, + prompt: &str, +) -> Result { + let endpoint = if settings.provider.endpoint.trim().is_empty() { + DEFAULT_OLLAMA_ENDPOINT + } else { + settings.provider.endpoint.trim() + }; + let url = format!("{}/api/generate", endpoint.trim_end_matches('/')); + let model = settings.provider.model.trim(); + if model.is_empty() { + return Err("ollama model is empty".to_string()); + } + + let client = reqwest::Client::builder() + .connect_timeout(Duration::from_secs(2)) + .timeout(Duration::from_secs(20)) + .build() + .map_err(|err| format!("failed to build local summary HTTP client: {err}"))?; + + let response = client + .post(url) + .json(&OllamaGenerateRequest { + model, + prompt, + stream: false, + }) + .send() + .await + .map_err(|err| format!("failed to call ollama summary API: {err}"))?; + + if !response.status().is_success() { + let status = response.status().as_u16(); + let body = response.text().await.unwrap_or_default(); + return Err(format!( + "ollama summary API returned {status}: {}", + body.trim() + )); + } + + let payload: OllamaGenerateResponse = response + .json() + .await + .map_err(|err| format!("failed to decode ollama summary response: {err}"))?; + if payload.response.trim().is_empty() { + return Err("ollama summary response was empty".to_string()); + } + + Ok(payload.response) +} + +async fn generate_text_with_codex_exec( + settings: &SummarySettings, + prompt: &str, +) -> Result { + let output_path = temp_cli_output_path("codex-summary"); + + let mut command = Command::new("codex"); + command + .arg("exec") + .arg("--skip-git-repo-check") + .arg("--sandbox") + .arg("read-only") + .arg("--output-last-message") + .arg(output_path.to_string_lossy().to_string()); + let model = settings.provider.model.trim(); + if !model.is_empty() { + command.arg("--model").arg(model); + } + command.arg(prompt); + + let output = run_command_with_timeout(command, Duration::from_secs(60)) + .map_err(|err| format!("failed to run codex exec summary: {err}"))?; + + let response = read_output_or_stdout(&output_path, &output); + if response.trim().is_empty() { + return Err("codex exec summary response was empty".to_string()); + } + Ok(response) +} + +async fn generate_text_with_claude_cli( + settings: &SummarySettings, + prompt: &str, +) -> Result { + let model = settings.provider.model.trim().to_string(); + let timeout = Duration::from_secs(60); + + let mut command = Command::new("claude"); + command.arg("-c"); + if !model.is_empty() { + command.arg("--model").arg(&model); + } + command.arg(prompt); + + let output = match run_command_with_timeout(command, timeout) { + Ok(output) => output, + Err(primary_error) => { + let mut fallback = Command::new("claude"); + fallback + .arg("--print") + .arg("--output-format") + .arg("text") + .arg("--no-session-persistence") + .arg("--tools") + .arg(""); + if !model.is_empty() { + fallback.arg("--model").arg(&model); + } + fallback.arg(prompt); + + run_command_with_timeout(fallback, timeout).map_err(|fallback_error| { + format!( + "failed to run claude summary (`claude -c` => {primary_error}; fallback => {fallback_error})" + ) + })? + } + }; + + let response = String::from_utf8_lossy(&output.stdout).to_string(); + if response.trim().is_empty() { + return Err("claude summary response was empty".to_string()); + } + Ok(response) +} + +fn read_output_or_stdout(path: &PathBuf, output: &Output) -> String { + let file_text = fs::read_to_string(path).unwrap_or_default(); + let _ = fs::remove_file(path); + if !file_text.trim().is_empty() { + return file_text; + } + String::from_utf8_lossy(&output.stdout).to_string() +} + +fn temp_cli_output_path(prefix: &str) -> PathBuf { + let pid = std::process::id(); + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|duration| duration.as_nanos()) + .unwrap_or(0); + std::env::temp_dir().join(format!("{prefix}-{pid}-{timestamp}.txt")) +} + +fn run_command_with_timeout(mut command: Command, timeout: Duration) -> Result { + command.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let program = command.get_program().to_string_lossy().to_string(); + let mut child = command + .spawn() + .map_err(|err| format!("failed to spawn `{program}`: {err}"))?; + let started = Instant::now(); + + loop { + match child.try_wait() { + Ok(Some(_status)) => { + let output = child + .wait_with_output() + .map_err(|err| format!("failed to collect `{program}` output: {err}"))?; + if output.status.success() { + return Ok(output); + } + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string(); + let detail = if !stderr.is_empty() { + stderr + } else if !stdout.is_empty() { + stdout + } else { + format!("exit status {}", output.status) + }; + return Err(format!("`{program}` failed: {detail}")); + } + Ok(None) => { + if started.elapsed() >= timeout { + let _ = child.kill(); + let _ = child.wait(); + return Err(format!( + "`{program}` timed out after {}s", + timeout.as_secs() + )); + } + thread::sleep(Duration::from_millis(50)); + } + Err(err) => { + return Err(format!("failed while waiting for `{program}`: {err}")); + } + } + } +} + +#[cfg(test)] +mod tests { + use super::{ + LocalSummaryProfile, SummaryProvider, first_available_profile, parse_ollama_list_output, + }; + + #[test] + fn parse_ollama_list_output_extracts_model_names() { + let output = r#" +NAME ID SIZE MODIFIED +llama3.2:3b a80c4f17acd5 2.0 GB 3 hours ago +qwen2.5-coder:7b 2b0496514337 4.7 GB 1 day ago +"#; + + let models = parse_ollama_list_output(output); + assert_eq!( + models, + vec!["llama3.2:3b".to_string(), "qwen2.5-coder:7b".to_string()] + ); + } + + #[test] + fn parse_ollama_list_output_ignores_errors_and_empty_lines() { + let output = "\nError: could not connect to ollama\n"; + assert!(parse_ollama_list_output(output).is_empty()); + } + + #[test] + fn first_available_profile_preserves_provider_priority() { + let claude = Some(LocalSummaryProfile { + provider: SummaryProvider::ClaudeCli, + endpoint: String::new(), + model: String::new(), + }); + let codex = Some(LocalSummaryProfile { + provider: SummaryProvider::CodexExec, + endpoint: String::new(), + model: String::new(), + }); + + let selected = first_available_profile([None, codex.clone(), claude]); + assert_eq!(selected, codex); + } +} diff --git a/crates/summary/Cargo.toml b/crates/summary/Cargo.toml index f9e53434..a561d204 100644 --- a/crates/summary/Cargo.toml +++ b/crates/summary/Cargo.toml @@ -19,7 +19,6 @@ opensession-core = { workspace = true } opensession-runtime-config = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -reqwest = { workspace = true } sha2 = { workspace = true } hex = "0.4" diff --git a/crates/summary/src/lib.rs b/crates/summary/src/lib.rs index 1ed39c48..e3544a6b 100644 --- a/crates/summary/src/lib.rs +++ b/crates/summary/src/lib.rs @@ -3,14 +3,16 @@ pub mod prompt; pub mod provider; pub mod text; pub mod types; -pub use prompt::{DEFAULT_SUMMARY_PROMPT_TEMPLATE_V2, validate_summary_prompt_template}; +pub use prompt::{ + DEFAULT_SUMMARY_PROMPT_TEMPLATE_V2, classify_arch_layer, validate_summary_prompt_template, +}; +pub use provider::{LayerFileChange, SemanticSummary, parse_semantic_summary_or_fallback}; -use crate::git::{GitSummaryContext, GitSummaryService, ShellGitCommandRunner}; +use crate::git::GitSummaryContext; use crate::prompt::{ - SummaryPromptConfig, build_summary_prompt, classify_arch_layer, collect_file_changes, - collect_timeline_snippets, contains_auth_security_keyword, + SummaryPromptConfig, build_summary_prompt, collect_file_changes, collect_timeline_snippets, + contains_auth_security_keyword, }; -use crate::provider::{SemanticSummary, generate_summary}; use crate::text::compact_summary_snippet; use crate::types::HailCompactFileChange; use opensession_core::trace::{Agent, ContentBlock, Event, EventType, Session}; @@ -18,10 +20,12 @@ use opensession_runtime_config::{SummaryProvider, SummarySettings}; use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; use std::collections::{BTreeMap, HashMap}; -use std::path::{Path, PathBuf}; +use std::future::Future; +use std::path::PathBuf; +use std::pin::Pin; const MAX_TIMELINE_SNIPPETS: usize = 32; -const MAX_FILE_CHANGE_ENTRIES: usize = 200; +pub const MAX_FILE_CHANGE_ENTRIES: usize = 200; const MAX_DIFF_HUNKS_PER_FILE: usize = 10; const MAX_DIFF_LINES_PER_HUNK: usize = 40; const MAX_DIFF_FILES_PER_LAYER: usize = 80; @@ -113,15 +117,18 @@ impl GitSummaryRequest { } } -pub fn detect_summary_provider() -> Option { - provider::detect_local_summary_profile() -} +pub type SummaryGenerateFuture<'a> = + Pin> + Send + 'a>>; -pub async fn summarize_session( +pub async fn summarize_session_with_provider( session: &Session, settings: &SummarySettings, - git_request: Option<&GitSummaryRequest>, -) -> Result { + git_context: Option, + generate_summary: Generate, +) -> Result +where + Generate: for<'a> Fn(&'a SummarySettings, &'a str) -> SummaryGenerateFuture<'a>, +{ let timeline = collect_timeline_snippets(session, MAX_TIMELINE_SNIPPETS, default_event_snippet); let files = collect_file_changes(session, MAX_FILE_CHANGE_ENTRIES); @@ -135,35 +142,28 @@ pub async fn summarize_session( }; if signals.is_empty() && settings.allows_git_changes_fallback() { - if let Some(request) = git_request { - if let Some(git_ctx) = collect_git_context(request) { - signals = summary_signals_from_git(git_ctx)?; - } + if let Some(git_ctx) = git_context { + signals = summary_signals_from_git(git_ctx)?; } } - summarize_from_signals(signals, settings).await -} - -pub async fn summarize_git_commit( - repo_root: &Path, - commit: &str, - settings: &SummarySettings, -) -> Result { - let request = GitSummaryRequest::from_commit(repo_root.to_path_buf(), commit.to_string()); - let context = collect_git_context(&request) - .ok_or_else(|| format!("unable to collect git summary context for commit `{commit}`"))?; - summarize_from_signals(summary_signals_from_git(context)?, settings).await + summarize_from_signals(signals, settings, generate_summary).await } -pub async fn summarize_git_working_tree( - repo_root: &Path, +pub async fn classify_and_summarize_git_context( + context: GitSummaryContext, settings: &SummarySettings, -) -> Result { - let request = GitSummaryRequest::working_tree(repo_root.to_path_buf()); - let context = collect_git_context(&request) - .ok_or_else(|| "unable to collect git summary context for working tree".to_string())?; - summarize_from_signals(summary_signals_from_git(context)?, settings).await + generate_summary: Generate, +) -> Result +where + Generate: for<'a> Fn(&'a SummarySettings, &'a str) -> SummaryGenerateFuture<'a>, +{ + summarize_from_signals( + summary_signals_from_git(context)?, + settings, + generate_summary, + ) + .await } #[derive(Debug, Clone)] @@ -182,10 +182,14 @@ impl SummarySignals { } } -async fn summarize_from_signals( +async fn summarize_from_signals( signals: SummarySignals, settings: &SummarySettings, -) -> Result { + generate_summary: Generate, +) -> Result +where + Generate: for<'a> Fn(&'a SummarySettings, &'a str) -> SummaryGenerateFuture<'a>, +{ let prompt_template = if settings.prompt.template.trim().is_empty() { DEFAULT_SUMMARY_PROMPT_TEMPLATE_V2 } else { @@ -281,24 +285,6 @@ async fn summarize_from_signals( } } -fn collect_git_context(request: &GitSummaryRequest) -> Option { - let service = GitSummaryService::new(ShellGitCommandRunner); - if let Some(commit) = request.commit.as_deref() { - return service.collect_commit_context( - &request.repo_root, - commit, - MAX_FILE_CHANGE_ENTRIES, - classify_arch_layer, - ); - } - - service.collect_working_tree_context( - &request.repo_root, - MAX_FILE_CHANGE_ENTRIES, - classify_arch_layer, - ) -} - fn summary_signals_from_git(context: GitSummaryContext) -> Result { let mut session = Session::new( context @@ -596,10 +582,11 @@ fn parse_diff_hunks(diff: &str) -> Vec { #[cfg(test)] mod tests { use super::{ - DiffLayerNode, GitSummaryRequest, SummaryGenerationKind, SummarySourceKind, - build_diff_tree, default_event_snippet, heuristic_summary, parse_diff_hunks, - summarize_session, + DiffLayerNode, SummaryGenerationKind, SummarySourceKind, build_diff_tree, + default_event_snippet, heuristic_summary, parse_diff_hunks, + summarize_session_with_provider, }; + use crate::git::GitSummaryContext; use crate::types::HailCompactFileChange; use chrono::Utc; use opensession_core::trace::{Agent, Content, Event, EventType, Session}; @@ -712,9 +699,13 @@ mod tests { let session = session_with_file_edit("src/auth.rs", "@@ -1 +1 @@\n-a\n+b\n"); let settings = SummarySettings::default(); - let artifact = summarize_session(&session, &settings, None) - .await - .expect("summarize"); + let artifact = summarize_session_with_provider(&session, &settings, None, |_, _| { + Box::pin(async { + unreachable!("provider should not run when summary settings are disabled") + }) + }) + .await + .expect("summarize"); assert_eq!( artifact.generation_kind, SummaryGenerationKind::HeuristicFallback @@ -726,7 +717,7 @@ mod tests { } #[tokio::test] - async fn summarize_session_uses_git_fallback_when_session_has_low_signal() { + async fn summarize_session_uses_git_context_when_session_has_low_signal() { let mut session = Session::new( "s-empty".to_string(), Agent { @@ -740,19 +731,57 @@ mod tests { let mut settings = SummarySettings::default(); settings.source_mode = opensession_runtime_config::SummarySourceMode::SessionOrGitChanges; + settings.provider.id = SummaryProvider::CodexExec; - let artifact = summarize_session( + let artifact = summarize_session_with_provider( &session, &settings, - Some(&GitSummaryRequest::working_tree(std::env::temp_dir())), + Some(GitSummaryContext { + source: "git_working_tree".to_string(), + repo_root: std::env::temp_dir(), + commit: None, + timeline_signals: vec!["working_tree: 1 files changed".to_string()], + file_changes: vec![HailCompactFileChange { + path: "src/lib.rs".to_string(), + layer: "application".to_string(), + operation: "edit".to_string(), + lines_added: 3, + lines_removed: 1, + }], + }), + |_, _| { + Box::pin(async { + Ok(crate::provider::SemanticSummary { + changes: "Updated working tree".to_string(), + auth_security: "none detected".to_string(), + layer_file_changes: Vec::new(), + }) + }) + }, ) .await .expect("summarize"); - // temp dir is typically not a git repo, so fallback remains heuristic/session. - assert!(matches!( - artifact.source_kind, - SummarySourceKind::SessionSignals | SummarySourceKind::Heuristic - )); + assert_eq!(artifact.source_kind, SummarySourceKind::GitWorkingTree); + assert_eq!(artifact.generation_kind, SummaryGenerationKind::Provider); + } + + #[tokio::test] + async fn summarize_session_records_provider_errors_as_heuristic_fallback() { + let session = session_with_file_edit("src/auth.rs", "@@ -1 +1 @@\n-a\n+b\n"); + let mut settings = SummarySettings::default(); + settings.provider.id = SummaryProvider::CodexExec; + + let artifact = summarize_session_with_provider(&session, &settings, None, |_, _| { + Box::pin(async { Err("codex exec failed".to_string()) }) + }) + .await + .expect("summarize"); + + assert_eq!( + artifact.generation_kind, + SummaryGenerationKind::HeuristicFallback + ); + assert_eq!(artifact.error.as_deref(), Some("codex exec failed")); } } diff --git a/crates/summary/src/provider.rs b/crates/summary/src/provider.rs index 5a4db35b..6923409c 100644 --- a/crates/summary/src/provider.rs +++ b/crates/summary/src/provider.rs @@ -1,14 +1,6 @@ -use opensession_runtime_config::{ - SummaryOutputShape, SummaryProvider, SummaryResponseStyle, SummarySettings, -}; +use opensession_runtime_config::{SummaryOutputShape, SummaryResponseStyle, SummarySettings}; use serde::{Deserialize, Serialize}; -use std::fs; -use std::path::PathBuf; -use std::process::{Command, Output, Stdio}; -use std::thread; -use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; -const DEFAULT_OLLAMA_ENDPOINT: &str = "http://127.0.0.1:11434"; const DEFAULT_SUMMARY_CHAR_LIMIT: usize = 560; const DEFAULT_AUTH_SECURITY_CHAR_LIMIT: usize = 320; const DEFAULT_LAYER_SUMMARY_CHAR_LIMIT: usize = 260; @@ -48,13 +40,6 @@ fn summary_limits(settings: &SummarySettings) -> SummaryNormalizationLimits { } } -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct LocalSummaryProfile { - pub provider: SummaryProvider, - pub endpoint: String, - pub model: String, -} - #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct SemanticSummary { pub changes: String, @@ -123,248 +108,10 @@ impl SemanticSummary { } } -pub fn detect_local_summary_profile() -> Option { - detect_ollama_profile() - .or_else(detect_codex_exec_profile) - .or_else(detect_claude_cli_profile) -} - -fn detect_ollama_profile() -> Option { - let output = Command::new("ollama").arg("list").output().ok()?; - if !output.status.success() { - return None; - } - let stdout = String::from_utf8_lossy(&output.stdout); - let model = parse_ollama_list_output(&stdout).into_iter().next()?; - Some(LocalSummaryProfile { - provider: SummaryProvider::Ollama, - endpoint: DEFAULT_OLLAMA_ENDPOINT.to_string(), - model, - }) -} - -fn detect_codex_exec_profile() -> Option { - if !command_available("codex", &["exec", "--help"]) { - return None; - } - Some(LocalSummaryProfile { - provider: SummaryProvider::CodexExec, - endpoint: String::new(), - model: String::new(), - }) -} - -fn detect_claude_cli_profile() -> Option { - if !command_available("claude", &["--help"]) { - return None; - } - Some(LocalSummaryProfile { - provider: SummaryProvider::ClaudeCli, - endpoint: String::new(), - model: String::new(), - }) -} - -fn command_available(program: &str, args: &[&str]) -> bool { - Command::new(program) - .args(args) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - .map(|status| status.success()) - .unwrap_or(false) -} - -fn parse_ollama_list_output(raw: &str) -> Vec { - let mut models = Vec::new(); - for line in raw.lines() { - let trimmed = line.trim(); - if trimmed.is_empty() { - continue; - } - let lowered = trimmed.to_ascii_lowercase(); - if lowered.starts_with("name ") - || lowered.starts_with("error") - || lowered.starts_with("failed") - { - continue; - } - let Some(token) = trimmed.split_whitespace().next() else { - continue; - }; - let candidate = token.trim().to_string(); - if candidate.is_empty() || models.contains(&candidate) { - continue; - } - models.push(candidate); - } - models -} - -#[derive(Debug, Serialize)] -struct OllamaGenerateRequest<'a> { - model: &'a str, - prompt: &'a str, - stream: bool, -} - -#[derive(Debug, Deserialize)] -struct OllamaGenerateResponse { - response: String, -} - -pub async fn generate_summary( +pub fn parse_semantic_summary_or_fallback( + raw: &str, settings: &SummarySettings, - prompt: &str, -) -> Result { - let raw = generate_text(settings, prompt).await?; - Ok(parse_semantic_summary_or_fallback(&raw, settings)) -} - -pub async fn generate_text(settings: &SummarySettings, prompt: &str) -> Result { - if prompt.trim().is_empty() { - return Err("summary prompt is empty".to_string()); - } - if !settings.is_configured() { - return Err("local summary provider is not configured".to_string()); - } - - match settings.provider.id { - SummaryProvider::Disabled => Err("local summary provider is disabled".to_string()), - SummaryProvider::Ollama => generate_text_with_ollama(settings, prompt).await, - SummaryProvider::CodexExec => generate_text_with_codex_exec(settings, prompt).await, - SummaryProvider::ClaudeCli => generate_text_with_claude_cli(settings, prompt).await, - } -} - -async fn generate_text_with_ollama( - settings: &SummarySettings, - prompt: &str, -) -> Result { - let endpoint = if settings.provider.endpoint.trim().is_empty() { - DEFAULT_OLLAMA_ENDPOINT - } else { - settings.provider.endpoint.trim() - }; - let url = format!("{}/api/generate", endpoint.trim_end_matches('/')); - let model = settings.provider.model.trim(); - if model.is_empty() { - return Err("ollama model is empty".to_string()); - } - - let client = reqwest::Client::builder() - .connect_timeout(Duration::from_secs(2)) - .timeout(Duration::from_secs(20)) - .build() - .map_err(|err| format!("failed to build local summary HTTP client: {err}"))?; - - let response = client - .post(url) - .json(&OllamaGenerateRequest { - model, - prompt, - stream: false, - }) - .send() - .await - .map_err(|err| format!("failed to call ollama summary API: {err}"))?; - - if !response.status().is_success() { - let status = response.status().as_u16(); - let body = response.text().await.unwrap_or_default(); - return Err(format!( - "ollama summary API returned {status}: {}", - body.trim() - )); - } - - let payload: OllamaGenerateResponse = response - .json() - .await - .map_err(|err| format!("failed to decode ollama summary response: {err}"))?; - if payload.response.trim().is_empty() { - return Err("ollama summary response was empty".to_string()); - } - - Ok(payload.response) -} - -async fn generate_text_with_codex_exec( - settings: &SummarySettings, - prompt: &str, -) -> Result { - let output_path = temp_cli_output_path("codex-summary"); - - let mut command = Command::new("codex"); - command - .arg("exec") - .arg("--skip-git-repo-check") - .arg("--sandbox") - .arg("read-only") - .arg("--output-last-message") - .arg(output_path.to_string_lossy().to_string()); - let model = settings.provider.model.trim(); - if !model.is_empty() { - command.arg("--model").arg(model); - } - command.arg(prompt); - - let output = run_command_with_timeout(command, Duration::from_secs(60)) - .map_err(|err| format!("failed to run codex exec summary: {err}"))?; - - let response = read_output_or_stdout(&output_path, &output); - if response.trim().is_empty() { - return Err("codex exec summary response was empty".to_string()); - } - Ok(response) -} - -async fn generate_text_with_claude_cli( - settings: &SummarySettings, - prompt: &str, -) -> Result { - let model = settings.provider.model.trim().to_string(); - let timeout = Duration::from_secs(60); - - let mut command = Command::new("claude"); - command.arg("-c"); - if !model.is_empty() { - command.arg("--model").arg(&model); - } - command.arg(prompt); - - let output = match run_command_with_timeout(command, timeout) { - Ok(output) => output, - Err(primary_error) => { - let mut fallback = Command::new("claude"); - fallback - .arg("--print") - .arg("--output-format") - .arg("text") - .arg("--no-session-persistence") - .arg("--tools") - .arg(""); - if !model.is_empty() { - fallback.arg("--model").arg(&model); - } - fallback.arg(prompt); - - run_command_with_timeout(fallback, timeout).map_err(|fallback_error| { - format!( - "failed to run claude summary (`claude -c` => {primary_error}; fallback => {fallback_error})" - ) - })? - } - }; - - let response = String::from_utf8_lossy(&output.stdout).to_string(); - if response.trim().is_empty() { - return Err("claude summary response was empty".to_string()); - } - Ok(response) -} - -fn parse_semantic_summary_or_fallback(raw: &str, settings: &SummarySettings) -> SemanticSummary { +) -> SemanticSummary { let limits = summary_limits(settings); match parse_semantic_summary(raw) { Ok(summary) => summary.normalize(limits), @@ -372,71 +119,6 @@ fn parse_semantic_summary_or_fallback(raw: &str, settings: &SummarySettings) -> } } -fn read_output_or_stdout(path: &PathBuf, output: &Output) -> String { - let file_text = fs::read_to_string(path).unwrap_or_default(); - let _ = fs::remove_file(path); - if !file_text.trim().is_empty() { - return file_text; - } - String::from_utf8_lossy(&output.stdout).to_string() -} - -fn temp_cli_output_path(prefix: &str) -> PathBuf { - let pid = std::process::id(); - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .map(|duration| duration.as_nanos()) - .unwrap_or(0); - std::env::temp_dir().join(format!("{prefix}-{pid}-{timestamp}.txt")) -} - -fn run_command_with_timeout(mut command: Command, timeout: Duration) -> Result { - command.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let program = command.get_program().to_string_lossy().to_string(); - let mut child = command - .spawn() - .map_err(|err| format!("failed to spawn `{program}`: {err}"))?; - let started = Instant::now(); - - loop { - match child.try_wait() { - Ok(Some(_status)) => { - let output = child - .wait_with_output() - .map_err(|err| format!("failed to collect `{program}` output: {err}"))?; - if output.status.success() { - return Ok(output); - } - let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); - let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string(); - let detail = if !stderr.is_empty() { - stderr - } else if !stdout.is_empty() { - stdout - } else { - format!("exit status {}", output.status) - }; - return Err(format!("`{program}` failed: {detail}")); - } - Ok(None) => { - if started.elapsed() >= timeout { - let _ = child.kill(); - let _ = child.wait(); - return Err(format!( - "`{program}` timed out after {}s", - timeout.as_secs() - )); - } - thread::sleep(Duration::from_millis(50)); - } - Err(err) => { - return Err(format!("failed while waiting for `{program}`: {err}")); - } - } - } -} - #[cfg(test)] fn normalize_summary_text(raw: &str) -> String { normalize_summary_text_with_limit(raw, DEFAULT_SUMMARY_CHAR_LIMIT) @@ -507,32 +189,11 @@ fn find_json_object_slice(raw: &str) -> Option<&str> { #[cfg(test)] mod tests { use super::{ - SemanticSummary, normalize_summary_text, parse_ollama_list_output, parse_semantic_summary, + SemanticSummary, normalize_summary_text, parse_semantic_summary, parse_semantic_summary_or_fallback, }; use opensession_runtime_config::{SummaryOutputShape, SummaryResponseStyle, SummarySettings}; - #[test] - fn parse_ollama_list_output_extracts_model_names() { - let output = r#" -NAME ID SIZE MODIFIED -llama3.2:3b a80c4f17acd5 2.0 GB 3 hours ago -qwen2.5-coder:7b 2b0496514337 4.7 GB 1 day ago -"#; - - let models = parse_ollama_list_output(output); - assert_eq!( - models, - vec!["llama3.2:3b".to_string(), "qwen2.5-coder:7b".to_string()] - ); - } - - #[test] - fn parse_ollama_list_output_ignores_errors_and_empty_lines() { - let output = "\nError: could not connect to ollama\n"; - assert!(parse_ollama_list_output(output).is_empty()); - } - #[test] fn normalize_summary_text_collapses_whitespace_and_limits_length() { let raw = " fixed setup flow\nand added summary cache "; diff --git a/desktop/src-tauri/Cargo.toml b/desktop/src-tauri/Cargo.toml index 53bad706..50d405c1 100644 --- a/desktop/src-tauri/Cargo.toml +++ b/desktop/src-tauri/Cargo.toml @@ -15,9 +15,12 @@ opensession-api = { path = "../../crates/api" } opensession-local-db = { path = "../../crates/local-db" } opensession-local-store = { path = "../../crates/local-store" } opensession-core = { path = "../../crates/core" } +opensession-paths = { path = "../../crates/paths" } opensession-parsers = { path = "../../crates/parsers" } +opensession-parser-discovery = { path = "../../crates/parser-discovery" } opensession-runtime-config = { path = "../../crates/runtime-config" } opensession-summary = { path = "../../crates/summary" } +opensession-summary-runtime = { path = "../../crates/summary-runtime" } opensession-git-native = { path = "../../crates/git-native" } serde = { version = "1", features = ["derive"] } serde_json = "1" diff --git a/desktop/src-tauri/src/app/change_reader.rs b/desktop/src-tauri/src/app/change_reader.rs index df5960dd..9c88095a 100644 --- a/desktop/src-tauri/src/app/change_reader.rs +++ b/desktop/src-tauri/src/app/change_reader.rs @@ -8,7 +8,7 @@ use opensession_api::{ use opensession_core::trace::{ContentBlock, Event, EventType, Session as HailSession}; use opensession_local_db::LocalDb; use opensession_runtime_config::{ChangeReaderVoiceProvider, DaemonConfig, SummaryProvider}; -use opensession_summary::provider::generate_text; +use opensession_summary_runtime::generate_text; use serde_json::json; use std::time::Duration; diff --git a/desktop/src-tauri/src/app/runtime_settings.rs b/desktop/src-tauri/src/app/runtime_settings.rs index f6a72586..fb0fe4e3 100644 --- a/desktop/src-tauri/src/app/runtime_settings.rs +++ b/desktop/src-tauri/src/app/runtime_settings.rs @@ -610,7 +610,7 @@ pub(crate) fn desktop_update_runtime_settings( #[tauri::command] pub(crate) fn desktop_detect_summary_provider() -> DesktopSummaryProviderDetectResponse { - if let Some(profile) = opensession_summary::detect_summary_provider() { + if let Some(profile) = opensession_summary_runtime::detect_local_summary_profile() { return DesktopSummaryProviderDetectResponse { detected: true, provider: Some(map_summary_provider_id_from_runtime(&profile.provider)), diff --git a/desktop/src-tauri/src/app/session_access.rs b/desktop/src-tauri/src/app/session_access.rs index bb68dd93..9196db47 100644 --- a/desktop/src-tauri/src/app/session_access.rs +++ b/desktop/src-tauri/src/app/session_access.rs @@ -9,7 +9,8 @@ use opensession_core::session::{is_auxiliary_session, working_directory}; use opensession_core::trace::Session as HailSession; use opensession_git_native::extract_git_context; use opensession_local_db::{LocalDb, LocalSessionLink, LocalSessionRow}; -use opensession_parsers::{ParserRegistry, discover::discover_for_tool}; +use opensession_parser_discovery::discover_for_tool; +use opensession_parsers::ParserRegistry; use serde_json::json; use std::collections::BTreeSet; use std::fs; diff --git a/desktop/src-tauri/src/app/session_summary.rs b/desktop/src-tauri/src/app/session_summary.rs index cc8f3b1c..c3abe60e 100644 --- a/desktop/src-tauri/src/app/session_summary.rs +++ b/desktop/src-tauri/src/app/session_summary.rs @@ -14,7 +14,8 @@ use opensession_local_db::{LocalDb, LocalSessionFilter, LocalSessionRow, Summary use opensession_runtime_config::{ DaemonConfig, SummaryBatchScope as RuntimeSummaryBatchScope, SummaryStorageBackend, }; -use opensession_summary::{GitSummaryRequest, SemanticSummaryArtifact, summarize_session}; +use opensession_summary::{GitSummaryRequest, SemanticSummaryArtifact}; +use opensession_summary_runtime::summarize_session; use serde_json::json; use std::collections::HashSet; use std::path::{Path, PathBuf}; diff --git a/desktop/src-tauri/src/app/vector.rs b/desktop/src-tauri/src/app/vector.rs index 149fbf41..224764a2 100644 --- a/desktop/src-tauri/src/app/vector.rs +++ b/desktop/src-tauri/src/app/vector.rs @@ -53,6 +53,21 @@ static VECTOR_INSTALL_STATE: LazyLock> = LazyLo static VECTOR_INDEX_REBUILD_RUNNING: LazyLock> = LazyLock::new(|| Mutex::new(false)); +fn run_vector_blocking_task(task: F) -> DesktopApiResult +where + T: Send + 'static, + F: FnOnce() -> DesktopApiResult + Send + 'static, +{ + std::thread::spawn(task).join().map_err(|_| { + desktop_error( + "desktop.vector_runtime_join_failed", + 500, + "vector task terminated unexpectedly", + None, + ) + })? +} + pub(crate) fn vector_embed_endpoint(runtime: &DaemonConfig) -> String { let configured = runtime.vector_search.endpoint.trim(); if !configured.is_empty() { @@ -1308,7 +1323,7 @@ fn install_status_response_from_state( #[tauri::command] pub(crate) fn desktop_vector_preflight() -> DesktopApiResult { let runtime = load_runtime_config()?; - Ok(vector_preflight_for_runtime(&runtime)) + run_vector_blocking_task(move || Ok(vector_preflight_for_runtime(&runtime))) } #[tauri::command] @@ -1412,7 +1427,9 @@ pub(crate) fn desktop_search_sessions_vector( cursor: Option, limit: Option, ) -> DesktopApiResult { - let db = open_local_db()?; - let runtime = load_runtime_config()?; - search_sessions_vector_internal(&db, &runtime, &query, cursor, limit, None) + run_vector_blocking_task(move || { + let db = open_local_db()?; + let runtime = load_runtime_config()?; + search_sessions_vector_internal(&db, &runtime, &query, cursor, limit, None) + }) } diff --git a/desktop/src-tauri/src/main.rs b/desktop/src-tauri/src/main.rs index c571687f..261222a7 100644 --- a/desktop/src-tauri/src/main.rs +++ b/desktop/src-tauri/src/main.rs @@ -113,20 +113,14 @@ fn open_local_db() -> DesktopApiResult { } fn runtime_config_path() -> DesktopApiResult { - let home = std::env::var("HOME") - .or_else(|_| std::env::var("USERPROFILE")) - .map_err(|error| { - desktop_error( - "desktop.runtime_config_home_unavailable", - 500, - "failed to resolve home directory for runtime config", - Some(json!({ "cause": error.to_string() })), - ) - })?; - Ok(PathBuf::from(home) - .join(".config") - .join("opensession") - .join(opensession_runtime_config::CONFIG_FILE_NAME)) + opensession_paths::runtime_config_path().map_err(|error| { + desktop_error( + "desktop.runtime_config_home_unavailable", + 500, + "failed to resolve home directory for runtime config", + Some(json!({ "cause": error.to_string() })), + ) + }) } fn load_runtime_config() -> DesktopApiResult { From 91015761dc44afdb36c4a69ade417534741cc7f2 Mon Sep 17 00:00:00 2001 From: hwisu Date: Mon, 9 Mar 2026 14:15:38 +0900 Subject: [PATCH 21/30] chore(platform): drop generated desktop and worker artifacts --- .gitignore | 2 + desktop/src-tauri/Cargo.lock | 138 ++++++++++++++++++++++++++++++++++- 2 files changed, 137 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index caa3b609..dedb8c9d 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,8 @@ /target/ /target-*/ /crates/worker/target/ +/crates/worker/build/ +/build/ # Local cargo overrides (dev only) .cargo/config.toml diff --git a/desktop/src-tauri/Cargo.lock b/desktop/src-tauri/Cargo.lock index 06979fd5..7af5354e 100644 --- a/desktop/src-tauri/Cargo.lock +++ b/desktop/src-tauri/Cargo.lock @@ -667,13 +667,34 @@ dependencies = [ "subtle", ] +[[package]] +name = "directories" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +dependencies = [ + "dirs-sys 0.4.1", +] + [[package]] name = "dirs" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" dependencies = [ - "dirs-sys", + "dirs-sys 0.5.0", +] + +[[package]] +name = "dirs-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +dependencies = [ + "libc", + "option-ext", + "redox_users 0.4.6", + "windows-sys 0.48.0", ] [[package]] @@ -684,7 +705,7 @@ checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", - "redox_users", + "redox_users 0.5.2", "windows-sys 0.61.2", ] @@ -3209,9 +3230,12 @@ dependencies = [ "opensession-git-native", "opensession-local-db", "opensession-local-store", + "opensession-parser-discovery", "opensession-parsers", + "opensession-paths", "opensession-runtime-config", "opensession-summary", + "opensession-summary-runtime", "reqwest 0.12.28", "serde", "serde_json", @@ -3242,6 +3266,7 @@ dependencies = [ "chrono", "opensession-api", "opensession-core", + "opensession-paths", "rusqlite", "serde", "serde_json", @@ -3253,10 +3278,21 @@ name = "opensession-local-store" version = "0.2.34" dependencies = [ "opensession-core", + "opensession-paths", "sha2", "thiserror 2.0.18", ] +[[package]] +name = "opensession-parser-discovery" +version = "0.2.34" +dependencies = [ + "glob", + "opensession-paths", + "rusqlite", + "shellexpand", +] + [[package]] name = "opensession-parsers" version = "0.2.34" @@ -3276,6 +3312,15 @@ dependencies = [ "uuid", ] +[[package]] +name = "opensession-paths" +version = "0.2.34" +dependencies = [ + "directories", + "opensession-runtime-config", + "thiserror 2.0.18", +] + [[package]] name = "opensession-runtime-config" version = "0.2.34" @@ -3291,12 +3336,22 @@ dependencies = [ "hex", "opensession-core", "opensession-runtime-config", - "reqwest 0.12.28", "serde", "serde_json", "sha2", ] +[[package]] +name = "opensession-summary-runtime" +version = "0.2.34" +dependencies = [ + "opensession-core", + "opensession-runtime-config", + "opensession-summary", + "reqwest 0.12.28", + "serde", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -3920,6 +3975,17 @@ dependencies = [ "bitflags 2.11.0", ] +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 1.0.69", +] + [[package]] name = "redox_users" version = "0.5.2" @@ -5983,6 +6049,15 @@ dependencies = [ "windows-targets 0.42.2", ] +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + [[package]] name = "windows-sys" version = "0.52.0" @@ -6034,6 +6109,21 @@ dependencies = [ "windows_x86_64_msvc 0.42.2", ] +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -6091,6 +6181,12 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -6109,6 +6205,12 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + [[package]] name = "windows_aarch64_msvc" version = "0.52.6" @@ -6127,6 +6229,12 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -6157,6 +6265,12 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + [[package]] name = "windows_i686_msvc" version = "0.52.6" @@ -6175,6 +6289,12 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + [[package]] name = "windows_x86_64_gnu" version = "0.52.6" @@ -6193,6 +6313,12 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -6211,6 +6337,12 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + [[package]] name = "windows_x86_64_msvc" version = "0.52.6" From 688f6192176774f86c50aceceb175dc0fb85ddd0 Mon Sep 17 00:00:00 2001 From: hwisu Date: Mon, 9 Mar 2026 16:33:33 +0900 Subject: [PATCH 22/30] Add Korean localization and build cache guardrails --- .githooks/pre-commit | 1 + .gitignore | 2 + Cargo.toml | 6 + README.ko.md | 49 +- README.md | 1 + build.sh | 33 + crates/cli/src/cli_args.rs | 204 +++- crates/cli/src/docs_cmd.rs | 50 +- crates/cli/src/entrypoint.rs | 15 +- crates/cli/src/locale.rs | 40 + crates/cli/src/main.rs | 1 + crates/server/src/routes/docs.rs | 45 +- crates/worker/src/routes/docs.rs | 37 +- desktop/README.ko.md | 102 ++ desktop/README.md | 2 + desktop/package-lock.json | 96 +- desktop/package.json | 2 +- desktop/src-tauri/src/main.rs | 22 +- desktop/src-tauri/src/main_tests.rs | 12 + docs.ko.md | 390 ++++++++ packages/ui/package-lock.json | 115 +-- packages/ui/package.json | 8 +- packages/ui/src/api-internal/runtime.ts | 35 + packages/ui/src/components/AppShell.svelte | 221 +++-- .../ui/src/components/AuthCallbackPage.svelte | 12 +- .../ui/src/components/CodeBlockView.svelte | 5 +- packages/ui/src/components/DocsPage.svelte | 73 +- packages/ui/src/components/EventView.svelte | 72 +- packages/ui/src/components/FieldHelp.svelte | 8 +- .../src/components/FloatingJobStatus.svelte | 14 +- packages/ui/src/components/LandingPage.svelte | 114 ++- .../src/components/LanguageModePicker.svelte | 54 + .../components/LanguageSettingsPanel.svelte | 16 + packages/ui/src/components/LoginPage.svelte | 44 +- .../src/components/ParseSourceBanner.svelte | 11 +- .../src/components/ParserSelectPanel.svelte | 18 +- packages/ui/src/components/SessionCard.svelte | 9 +- .../src/components/SessionDetailPage.svelte | 146 ++- .../ui/src/components/SessionListPage.svelte | 88 +- .../src/components/SessionRenderPage.svelte | 116 ++- .../ui/src/components/SessionSidebar.svelte | 28 +- .../ui/src/components/SettingsPage.svelte | 937 +++++++++++------- packages/ui/src/components/ThemeToggle.svelte | 5 +- .../ui/src/components/ThreadMinimap.svelte | 14 +- .../ui/src/components/TimelineView.svelte | 32 +- .../GitCredentialEditorPanel.svelte | 16 +- .../GitCredentialListPanel.svelte | 19 +- .../settings-page/GitCredentialsPanel.svelte | 17 +- .../settings-page/RuntimeActivityPanel.svelte | 16 +- .../settings-page/RuntimeQuickMenu.svelte | 71 +- .../settings-page/SettingsApiKeyPanel.svelte | 17 +- .../SettingsAuthGatePanel.svelte | 9 +- .../SettingsOverviewHeaderPanel.svelte | 14 +- .../settings-page/SettingsProfilePanel.svelte | 23 +- .../settings-page/SettingsSectionNav.svelte | 5 +- packages/ui/src/i18n.test.ts | 130 +++ packages/ui/src/i18n.ts | 769 ++++++++++++++ packages/ui/src/index.ts | 13 + packages/ui/src/types.ts | 34 +- packages/ui/src/utils.ts | 7 +- scripts/check-rust-artifact-guardrails.mjs | 52 + scripts/check-validation-hooks.mjs | 1 + web/README.ko.md | 38 + web/README.md | 4 + web/e2e/navigation.spec.ts | 15 +- web/e2e/settings.spec.ts | 3 +- web/package-lock.json | 598 +++++------ web/package.json | 12 +- web/src/routes/register/+page.svelte | 5 +- .../src/[provider]/[...segments]/+page.svelte | 15 +- 70 files changed, 3957 insertions(+), 1251 deletions(-) create mode 100644 crates/cli/src/locale.rs create mode 100644 desktop/README.ko.md create mode 100644 docs.ko.md create mode 100644 packages/ui/src/components/LanguageModePicker.svelte create mode 100644 packages/ui/src/components/LanguageSettingsPanel.svelte create mode 100644 packages/ui/src/i18n.test.ts create mode 100644 packages/ui/src/i18n.ts create mode 100644 scripts/check-rust-artifact-guardrails.mjs create mode 100644 web/README.ko.md diff --git a/.githooks/pre-commit b/.githooks/pre-commit index c9480d00..b19f7f36 100755 --- a/.githooks/pre-commit +++ b/.githooks/pre-commit @@ -68,6 +68,7 @@ run_node_check "desktop build preflight" scripts/validate/desktop-build-prefligh run_node_check "content contract check" scripts/verify-content-contract.mjs --check run_node_check "session-review workflow check" scripts/check-session-review-workflow.mjs run_node_check "publishable dependency check" scripts/check-publishable-deps.mjs +run_node_check "rust artifact guardrail check" scripts/check-rust-artifact-guardrails.mjs run_node_check "validation hook guardrail check" scripts/check-validation-hooks.mjs run_node_check "docs portability check" scripts/check-doc-portability.mjs run_node_check "product version sync check" scripts/sync-product-version.mjs --check diff --git a/.gitignore b/.gitignore index dedb8c9d..fc81df41 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # Build artifacts +/node_modules/ /target/ /target-*/ /crates/worker/target/ @@ -8,6 +9,7 @@ # Local cargo overrides (dev only) .cargo/config.toml web/node_modules/ +packages/ui/node_modules/ web/build/ web/.svelte-kit/ desktop/node_modules/ diff --git a/Cargo.toml b/Cargo.toml index 0f266d9c..e5e0f538 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -103,9 +103,15 @@ gix = "0.79" sea-query = { version = "0.32", features = ["backend-sqlite", "derive"] } directories = "5" +[profile.dev] +incremental = false + [profile.dev.package."*"] opt-level = 1 +[profile.test] +incremental = false + [profile.release] lto = "thin" codegen-units = 4 diff --git a/README.ko.md b/README.ko.md index 028c0f7b..5f74be07 100644 --- a/README.ko.md +++ b/README.ko.md @@ -8,11 +8,13 @@ OpenSession은 AI 세션 로그를 로컬 우선(local-first)으로 기록/등록/공유/검토하는 워크플로입니다. 웹: [opensession.io](https://opensession.io) -문서: [opensession.io/docs](https://opensession.io/docs) +문서: [opensession.io/docs](https://opensession.io/docs) +한국어 제품 문서: [`docs.ko.md`](docs.ko.md) ## 문서 맵 -- 제품 계약/명령 모델: [`docs.md`](docs.md) +- 제품 계약/명령 모델: [`docs.ko.md`](docs.ko.md) +- 영문 원본 계약 문서: [`docs.md`](docs.md) - 개발/검증 런북: [`docs/development-validation-flow.md`](docs/development-validation-flow.md) - 하네스 루프 정책: [`docs/harness-auto-improve-loop.md`](docs/harness-auto-improve-loop.md) - 파서 소스/재사용 매트릭스: [`docs/parser-source-matrix.md`](docs/parser-source-matrix.md) @@ -53,6 +55,19 @@ cargo install opensession 사용자 표면은 `opensession` CLI입니다. 자동 세션 수집(auto-capture)을 쓰려면 daemon 프로세스가 추가로 실행 중이어야 합니다. +CLI 로컬 전용 경로(backup/summary/handoff): + +```bash +opensession doctor +opensession doctor --fix --profile local +``` + +데스크톱 앱 경로(app + CLI + app-first 기본값): + +```bash +opensession doctor --fix --profile app --open-target app +``` + ## 개발 툴체인 (레포 작업 필수) 로컬 환경 편차를 줄이기 위해 레포 훅/검증은 `mise` 관리 툴체인을 필수로 사용합니다. @@ -75,16 +90,16 @@ cargo install opensession opensession doctor # 3) 권장 설치값 적용 (변경 전 동의 프롬프트) -opensession doctor --fix +opensession doctor --fix --profile local # 선택: fanout 모드를 명시적으로 지정 -opensession doctor --fix --fanout-mode hidden_ref +opensession doctor --fix --profile local --fanout-mode hidden_ref # 선택: view/review 오프너를 명시적으로 지정 -opensession doctor --fix --open-target app +opensession doctor --fix --profile app --open-target app # 자동화/비대화형(non-TTY) -opensession doctor --fix --yes --fanout-mode hidden_ref --open-target app +opensession doctor --fix --yes --profile local --fanout-mode hidden_ref --open-target web ``` `doctor`는 내부적으로 기존 setup 파이프라인을 재사용합니다. @@ -92,7 +107,7 @@ opensession doctor --fix --yes --fanout-mode hidden_ref --open-target app 첫 interactive 적용 시 fanout 저장 모드(`hidden_ref` 또는 `git_notes`)를 선택하며, 선택값은 로컬 git 설정(`.git/config`)의 `opensession.fanout-mode`에 저장됩니다. 같은 설정 흐름에서 `opensession view/review` 오프너(`app` 또는 `web`)도 선택하며 `opensession.open-target`으로 저장됩니다. 비대화형 환경에서는 `--fix`에 `--yes`가 필요하고, 저장된 fanout 모드가 없으면 `--fanout-mode`를 명시해야 합니다. -`--open-target`은 선택사항이며 기본값은 `app`입니다. +`--open-target`은 선택사항이며 기본값이 profile을 따릅니다(`local -> web`, `app -> app`). 자동 수집을 위한 daemon 실행: @@ -179,18 +194,26 @@ opensession inspect os://src/local/ ## 공유(share) ```bash -# local URI -> git 공유 가능 Source URI -opensession share os://src/local/ --git --remote origin +# 원클릭 git 공유 (첫 push만 한 번 확인, 이후 quick 모드에서 자동 push) +opensession share os://src/local/ --quick # 선택적 네트워크 변경 opensession share os://src/local/ --git --remote origin --push +# OpenSession pre-push 훅 설치/업데이트 +opensession doctor +opensession doctor --fix --profile local +# 선택: fanout 실패 시 push 자체를 실패시키고 싶다면 +OPENSESSION_STRICT=1 git push + # remote-resolvable URI -> 웹 URL opensession config init --base-url https://opensession.io opensession share os://src/git//ref//path/ --web ``` `share --web`는 `.opensession/config.toml`이 반드시 필요합니다. +Git-native write는 이제 hidden ledger ref(`refs/opensession/branches/`)를 사용하며, 새 write에 레거시 고정 ref는 쓰지 않습니다. +`opensession doctor --fix`는 훅 안정성을 위해 `~/.local/share/opensession/bin/opensession` shim도 설치합니다. ## Cleanup 자동화 @@ -274,12 +297,12 @@ opensession share os://src/git//ref//path/ --web ``` 2. `share --git`에서 remote 누락: ```bash -opensession share os://src/local/ --git --remote origin +opensession share os://src/local/ --quick ``` 3. git 저장소 밖에서 `share --git` 실행: ```bash cd -opensession share os://src/local/ --git --remote origin +opensession share os://src/local/ --quick ``` 4. `.opensession/config.toml` 없이 `share --web` 실행: ```bash @@ -311,10 +334,10 @@ opensession cleanup run 처음 사용자 5분 복귀 경로: ```bash opensession doctor -opensession doctor --fix +opensession doctor --fix --profile local opensession parse --profile codex ./raw-session.jsonl --out ./session.hail.jsonl opensession register ./session.hail.jsonl -opensession share os://src/local/ --git --remote origin +opensession share os://src/local/ --quick ``` ## 로컬 개발 검증 diff --git a/README.md b/README.md index df70cc26..da58b4d0 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ Docs: [opensession.io/docs](https://opensession.io/docs) ## Documentation Map - Product contract and command model: [`docs.md`](docs.md) +- Korean product contract: [`docs.ko.md`](docs.ko.md) - Development and validation runbook: [`docs/development-validation-flow.md`](docs/development-validation-flow.md) - Harness loop policy: [`docs/harness-auto-improve-loop.md`](docs/harness-auto-improve-loop.md) - Parser source/reuse matrix: [`docs/parser-source-matrix.md`](docs/parser-source-matrix.md) diff --git a/build.sh b/build.sh index 6493f7c1..bef0e87d 100755 --- a/build.sh +++ b/build.sh @@ -28,6 +28,39 @@ run_with_stable_rustc() { "$@" } +stable_target_dir_name() { + if command -v rustup >/dev/null 2>&1; then + RUSTC_BIN=$(rustup which --toolchain stable rustc 2>/dev/null || true) + if [ -n "$RUSTC_BIN" ]; then + RELEASE=$("$RUSTC_BIN" -Vv 2>/dev/null | awk '/^release:/ { print $2; exit }') + if [ -n "$RELEASE" ]; then + printf 'target-rustup-%s\n' "$(printf '%s' "$RELEASE" | tr '.' '_')" + return + fi + fi + fi +} + +prune_rust_build_artifacts() { + rm -rf target/debug/incremental + + for path in target-rustup-*/debug/incremental; do + [ -d "$path" ] || continue + rm -rf "$path" + done + + current_target_dir=$(stable_target_dir_name) + for path in target-rustup-*; do + [ -d "$path" ] || continue + if [ -n "$current_target_dir" ] && [ "$(basename "$path")" = "$current_target_dir" ]; then + continue + fi + rm -rf "$path" + done +} + +prune_rust_build_artifacts + # Frontend cd packages/ui && npm install && cd ../.. cd web && npm install && npm run build && cd .. diff --git a/crates/cli/src/cli_args.rs b/crates/cli/src/cli_args.rs index 81916a85..1a8b156f 100644 --- a/crates/cli/src/cli_args.rs +++ b/crates/cli/src/cli_args.rs @@ -1,7 +1,7 @@ -use clap::{Parser, Subcommand}; +use clap::{Command, CommandFactory, FromArgMatches, Parser, Subcommand}; use std::path::PathBuf; -use crate::setup_cmd; +use crate::{locale::localize, setup_cmd}; #[derive(Parser)] #[command( @@ -86,6 +86,206 @@ pub(crate) enum DocsAction { }, } +pub(crate) fn command() -> Command { + let mut command = ::command(); + localize_command(&mut command); + command +} + +pub(crate) fn parse_cli() -> Cli { + let matches = command().get_matches(); + Cli::from_arg_matches(&matches).unwrap_or_else(|err| err.exit()) +} + +fn set_about(command: &mut Command, text: &'static str) { + *command = command.clone().about(text); +} + +fn set_after_help(command: &mut Command, text: &'static str) { + *command = command.clone().after_help(text); +} + +fn localize_command(command: &mut Command) { + match command.get_name() { + "opensession" => { + set_about( + command, + localize( + "OpenSession CLI - local-first source URI workflows", + "OpenSession CLI - 로컬 우선 Source URI 워크플로", + ), + ); + set_after_help( + command, + localize( + "First-user flow (5 minutes):\n opensession docs quickstart\n\nCommon next steps:\n opensession doctor\n opensession doctor --fix --profile local\n opensession parse --profile codex ./raw-session.jsonl --out ./session.hail.jsonl\n opensession register ./session.hail.jsonl\n opensession share os://src/local/ --quick", + "첫 사용자 흐름 (5분):\n opensession docs quickstart\n\n다음으로 많이 쓰는 명령:\n opensession doctor\n opensession doctor --fix --profile local\n opensession parse --profile codex ./raw-session.jsonl --out ./session.hail.jsonl\n opensession register ./session.hail.jsonl\n opensession share os://src/local/ --quick", + ), + ); + } + "register" => { + set_about( + command, + localize( + "Register canonical HAIL JSONL into local object store.", + "canonical HAIL JSONL을 로컬 객체 저장소에 등록합니다.", + ), + ); + } + "cat" => { + set_about( + command, + localize( + "Print canonical JSONL for a local source URI.", + "로컬 source URI의 canonical JSONL을 출력합니다.", + ), + ); + } + "inspect" => { + set_about( + command, + localize( + "Inspect summary metadata for source/artifact URIs.", + "source/artifact URI의 summary 메타데이터를 확인합니다.", + ), + ); + } + "share" => { + set_about( + command, + localize( + "Resolve sharing outputs from a source URI.", + "Source URI에서 공유 출력을 생성합니다.", + ), + ); + } + "view" => { + set_about( + command, + localize( + "Open a review-centric web view from URI/file/URL/commit targets.", + "URI/파일/URL/커밋 대상을 리뷰 중심 웹 보기로 엽니다.", + ), + ); + set_after_help( + command, + localize( + "Recovery examples:\n opensession view --no-open\n opensession view os://src/local/ --no-open\n opensession view ./session.hail.jsonl --no-open\n opensession view HEAD~3..HEAD --no-open", + "복구 예시:\n opensession view --no-open\n opensession view os://src/local/ --no-open\n opensession view ./session.hail.jsonl --no-open\n opensession view HEAD~3..HEAD --no-open", + ), + ); + } + "review" => { + set_about( + command, + localize( + "Review a GitHub PR using local hidden refs and grouped commit sessions.", + "로컬 hidden ref와 grouped commit session을 사용해 GitHub PR을 검토합니다.", + ), + ); + } + "handoff" => { + set_about( + command, + localize( + "Build and manage immutable handoff artifacts.", + "불변 handoff artifact를 생성하고 관리합니다.", + ), + ); + } + "parse" => { + set_about( + command, + localize( + "Parse agent-native logs into canonical HAIL JSONL.", + "agent-native 로그를 canonical HAIL JSONL로 변환합니다.", + ), + ); + set_after_help( + command, + localize( + "Recovery examples:\n opensession parse --profile codex ./raw-session.jsonl --preview\n opensession parse --profile codex ./raw-session.jsonl --out ./session.hail.jsonl", + "복구 예시:\n opensession parse --profile codex ./raw-session.jsonl --preview\n opensession parse --profile codex ./raw-session.jsonl --out ./session.hail.jsonl", + ), + ); + } + "summary" => { + set_about( + command, + localize( + "Generate/show local semantic summaries.", + "로컬 시맨틱 summary를 생성하거나 표시합니다.", + ), + ); + } + "config" => { + set_about( + command, + localize( + "Manage explicit repo config (`.opensession/config.toml`).", + "명시적 레포 설정(`.opensession/config.toml`)을 관리합니다.", + ), + ); + } + "cleanup" => { + set_about( + command, + localize( + "Configure and run hidden-ref cleanup automation.", + "hidden-ref cleanup 자동화를 구성하고 실행합니다.", + ), + ); + } + "setup" => { + set_about( + command, + localize( + "Install/update OpenSession git hooks and diagnostics.", + "OpenSession git hook과 진단 구성을 설치/업데이트합니다.", + ), + ); + } + "doctor" => { + set_about( + command, + localize( + "Diagnose and optionally fix local OpenSession setup.", + "로컬 OpenSession 설정을 진단하고 필요하면 수정합니다.", + ), + ); + } + "docs" => { + set_about( + command, + localize( + "Print shell completions and quickstart guidance.", + "셸 completion과 빠른 시작 안내를 출력합니다.", + ), + ); + } + "completion" => { + set_about( + command, + localize("Generate shell completions.", "셸 completion을 생성합니다."), + ); + } + "quickstart" => { + set_about( + command, + localize( + "Print a 5-minute first-user flow.", + "5분짜리 첫 사용자 흐름을 출력합니다.", + ), + ); + } + _ => {} + } + + for subcommand in command.get_subcommands_mut() { + localize_command(subcommand); + } +} + #[cfg(test)] mod tests { use clap::Parser; diff --git a/crates/cli/src/docs_cmd.rs b/crates/cli/src/docs_cmd.rs index 80d86fe5..1c483968 100644 --- a/crates/cli/src/docs_cmd.rs +++ b/crates/cli/src/docs_cmd.rs @@ -1,12 +1,11 @@ -use clap::CommandFactory; use std::path::Path; -use crate::{cli_args::Cli, setup_cmd}; +use crate::{cli_args::command, locale::localize, setup_cmd}; pub(crate) fn run_docs(action: crate::cli_args::DocsAction) -> anyhow::Result<()> { match action { crate::cli_args::DocsAction::Completion { shell } => { - let mut cmd = ::command(); + let mut cmd = command(); clap_complete::generate(shell, &mut cmd, "opensession", &mut std::io::stdout()); Ok(()) } @@ -30,9 +29,18 @@ fn print_quickstart( out: &Path, remote: &str, ) { - println!("# OpenSession 5-minute first-user flow"); + println!( + "{}", + localize( + "# OpenSession 5-minute first-user flow", + "# OpenSession 5분 첫 사용자 흐름", + ) + ); println!(); - println!("# 1) Diagnose and apply setup"); + println!( + "{}", + localize("# 1) Diagnose and apply setup", "# 1) 설정을 진단하고 적용") + ); println!("opensession doctor"); println!( "opensession doctor --fix --profile {}", @@ -42,7 +50,13 @@ fn print_quickstart( println!("opensession doctor --fix --profile app --open-target app"); } println!(); - println!("# 2) Parse raw logs into canonical HAIL JSONL"); + println!( + "{}", + localize( + "# 2) Parse raw logs into canonical HAIL JSONL", + "# 2) raw 로그를 canonical HAIL JSONL로 변환", + ) + ); println!( "opensession parse --profile {} {} --out {}", profile, @@ -50,17 +64,35 @@ fn print_quickstart( out.display() ); println!(); - println!("# 3) Register canonical session locally"); + println!( + "{}", + localize( + "# 3) Register canonical session locally", + "# 3) canonical 세션을 로컬에 등록", + ) + ); println!("opensession register {}", out.display()); println!("# -> os://src/local/"); println!(); - println!("# 4) Share local source URI via quick git flow"); + println!( + "{}", + localize( + "# 4) Share local source URI via quick git flow", + "# 4) quick git 흐름으로 로컬 source URI 공유", + ) + ); println!( "opensession share os://src/local/ --quick --remote {}", remote ); println!(); - println!("# 5) Optional: convert a remote URI to web URL"); + println!( + "{}", + localize( + "# 5) Optional: convert a remote URI to web URL", + "# 5) 선택: remote URI를 웹 URL로 변환", + ) + ); println!("opensession config init --base-url https://opensession.io"); println!("opensession share os://src/git//ref//path/ --web"); } diff --git a/crates/cli/src/entrypoint.rs b/crates/cli/src/entrypoint.rs index 01da61a4..2d1a65c6 100644 --- a/crates/cli/src/entrypoint.rs +++ b/crates/cli/src/entrypoint.rs @@ -1,14 +1,13 @@ -use clap::Parser; - use crate::{ cat_cmd, cleanup_cmd, - cli_args::{Cli, Commands}, - config_cmd, docs_cmd, doctor_cmd, handoff_v1, inspect, parse_cmd, register, review, setup_cmd, - share, summary_cmd, view, + cli_args::{Commands, parse_cli}, + config_cmd, docs_cmd, doctor_cmd, handoff_v1, inspect, + locale::localize, + parse_cmd, register, review, setup_cmd, share, summary_cmd, view, }; pub(crate) async fn run_process() { - let cli = Cli::parse(); + let cli = parse_cli(); let result = match cli.command { Commands::Register(args) => register::run(args), @@ -29,9 +28,9 @@ pub(crate) async fn run_process() { if let Err(error) = result { if debug_errors_enabled() { - eprintln!("Error: {error:#}"); + eprintln!("{} {error:#}", localize("Error:", "오류:")); } else { - eprintln!("Error: {error}"); + eprintln!("{} {error}", localize("Error:", "오류:")); } std::process::exit(1); } diff --git a/crates/cli/src/locale.rs b/crates/cli/src/locale.rs new file mode 100644 index 00000000..66fe0c80 --- /dev/null +++ b/crates/cli/src/locale.rs @@ -0,0 +1,40 @@ +pub(crate) fn is_korean() -> bool { + ["LC_ALL", "LC_MESSAGES", "LANG"] + .into_iter() + .filter_map(|key| std::env::var(key).ok()) + .map(|value| value.trim().to_ascii_lowercase()) + .any(|value| value == "ko" || value.starts_with("ko_") || value.starts_with("ko-")) +} + +pub(crate) fn localize<'a>(en: &'a str, ko: &'a str) -> &'a str { + if is_korean() { ko } else { en } +} + +#[cfg(test)] +mod tests { + use super::is_korean; + use std::sync::{Mutex, OnceLock}; + + fn env_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + #[test] + fn detects_korean_lang_prefixes() { + let _guard = env_lock().lock().expect("lock env"); + let original = std::env::var("LANG").ok(); + unsafe { + std::env::set_var("LANG", "ko_KR.UTF-8"); + } + assert!(is_korean()); + match original { + Some(value) => unsafe { + std::env::set_var("LANG", value); + }, + None => unsafe { + std::env::remove_var("LANG"); + }, + } + } +} diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index de9013f1..44ff3023 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -8,6 +8,7 @@ mod entrypoint; mod handoff_v1; mod hooks; mod inspect; +mod locale; mod open_target; mod parse_cmd; mod register; diff --git a/crates/server/src/routes/docs.rs b/crates/server/src/routes/docs.rs index 89558040..7e4c27ab 100644 --- a/crates/server/src/routes/docs.rs +++ b/crates/server/src/routes/docs.rs @@ -1,9 +1,12 @@ use axum::{ + extract::Query, http::{HeaderMap, StatusCode, header}, response::{IntoResponse, Response}, }; +use serde::Deserialize; -const DOCS_MD: &str = include_str!("../../../../docs.md"); +const DOCS_MD_EN: &str = include_str!("../../../../docs.md"); +const DOCS_MD_KO: &str = include_str!("../../../../docs.ko.md"); const LLMS_TXT: &str = "\ # OpenSession @@ -35,7 +38,26 @@ Base URL: `/api` - CLI: `cargo install opensession` "; -pub async fn handle(headers: HeaderMap) -> Response { +#[derive(Debug, Default, Deserialize)] +pub struct DocsQuery { + lang: Option, +} + +fn is_korean_locale(locale: Option<&str>) -> bool { + locale.map(str::trim).is_some_and(|value| { + value.eq_ignore_ascii_case("ko") || value.to_ascii_lowercase().starts_with("ko-") + }) +} + +fn docs_markdown_for_locale(locale: Option<&str>) -> &'static str { + if is_korean_locale(locale) { + DOCS_MD_KO + } else { + DOCS_MD_EN + } +} + +pub async fn handle(Query(query): Query, headers: HeaderMap) -> Response { let accept = headers .get("accept") .and_then(|v| v.to_str().ok()) @@ -48,7 +70,7 @@ pub async fn handle(headers: HeaderMap) -> Response { (header::CONTENT_TYPE, "text/markdown; charset=utf-8"), (header::CACHE_CONTROL, "public, max-age=3600"), ], - DOCS_MD, + docs_markdown_for_locale(query.lang.as_deref()), ) .into_response(); } @@ -68,6 +90,23 @@ pub async fn handle(headers: HeaderMap) -> Response { } } +#[cfg(test)] +mod tests { + use super::docs_markdown_for_locale; + + #[test] + fn selects_korean_docs_for_korean_locale() { + let selected = docs_markdown_for_locale(Some("ko-KR")); + assert!(selected.starts_with("# 문서")); + } + + #[test] + fn defaults_to_english_docs() { + let selected = docs_markdown_for_locale(Some("en-US")); + assert!(selected.starts_with("# Documentation")); + } +} + pub async fn llms_txt() -> impl IntoResponse { ( StatusCode::OK, diff --git a/crates/worker/src/routes/docs.rs b/crates/worker/src/routes/docs.rs index 02b09efd..f3275dd5 100644 --- a/crates/worker/src/routes/docs.rs +++ b/crates/worker/src/routes/docs.rs @@ -1,6 +1,7 @@ use worker::*; -const DOCS_MD: &str = include_str!("../../../../docs.md"); +const DOCS_MD_EN: &str = include_str!("../../../../docs.md"); +const DOCS_MD_KO: &str = include_str!("../../../../docs.ko.md"); const LLMS_TXT: &str = "\ # OpenSession @@ -31,11 +32,15 @@ Base URL: `https://opensession.io/api` pub async fn handle(req: Request, ctx: RouteContext<()>) -> Result { let accept = req.headers().get("Accept")?.unwrap_or_default(); + let url = req.url()?; + let lang = url + .query_pairs() + .find_map(|(key, value)| (key == "lang").then(|| value.into_owned())); if accept.contains("text/markdown") { let headers = Headers::new(); headers.set("Content-Type", "text/markdown; charset=utf-8")?; headers.set("Cache-Control", "public, max-age=3600")?; - return Ok(Response::ok(DOCS_MD)?.with_headers(headers)); + return Ok(Response::ok(docs_markdown_for_locale(lang.as_deref()))?.with_headers(headers)); } // HTML: delegate to ASSETS binding for SPA serving let assets: Fetcher = ctx.env.service("ASSETS")?; @@ -48,3 +53,31 @@ pub async fn llms_txt(_req: Request, _ctx: RouteContext<()>) -> Result headers.set("Cache-Control", "public, max-age=3600")?; Ok(Response::ok(LLMS_TXT)?.with_headers(headers)) } + +fn docs_markdown_for_locale(locale: Option<&str>) -> &'static str { + if locale + .map(str::trim) + .is_some_and(|value| value.eq_ignore_ascii_case("ko") || value.to_ascii_lowercase().starts_with("ko-")) + { + DOCS_MD_KO + } else { + DOCS_MD_EN + } +} + +#[cfg(test)] +mod tests { + use super::docs_markdown_for_locale; + + #[test] + fn returns_korean_docs_for_korean_locale() { + let selected = docs_markdown_for_locale(Some("ko")); + assert!(selected.starts_with("# 문서")); + } + + #[test] + fn returns_english_docs_for_other_locales() { + let selected = docs_markdown_for_locale(Some("en-US")); + assert!(selected.starts_with("# Documentation")); + } +} diff --git a/desktop/README.ko.md b/desktop/README.ko.md new file mode 100644 index 00000000..fc56496d --- /dev/null +++ b/desktop/README.ko.md @@ -0,0 +1,102 @@ +# OpenSession Desktop (Preview) + +[English](README.md) + +이 패키지는 `../web`의 기존 Svelte UI를 재사용하는 데스크톱 셸입니다. + +## 실행 (dev) + +```bash +cd desktop +npm install +npm run dev +``` + +`npm run dev`는 Tauri와 웹 UI 개발 서버를 함께 시작합니다. `opensession-server`는 필요하지 않습니다. + +데스크톱 명령을 실행하기 전에는 `mise`로 저장소 툴체인을 설치하세요. + +```bash +mise install +``` + +## 빌드 + +```bash +cd desktop +npm run build +``` + +빌드 흐름: + +1. `web` 정적 번들 빌드 (`../web/build`) +2. Tauri 데스크톱 번들 + +macOS universal 번들(서명 없는 로컬 검증): + +```bash +npm run tauri:build -- --target universal-apple-darwin --bundles app --no-sign --ci +``` + +## 참고 + +- UI 컴포넌트는 기존 `web` 앱을 통해 `@opensession/ui`에서 재사용합니다. +- 데스크톱 런타임에서는 세션/권한/인증 조회가 로컬 DB와 git-native 저장소를 사용하는 Tauri 명령으로 처리됩니다. +- 선택 사항: `OPENSESSION_LOCAL_DB_PATH`로 사용자 지정 sqlite 파일 경로를 지정할 수 있습니다. + +## 런타임 설정 (Desktop Local) + +Desktop local runtime은 typed summary model 기반 런타임 설정을 제공합니다. + +- `summary.provider.id|endpoint|model` +- `summary.prompt.template` +- `summary.response.style|shape` +- `summary.storage.trigger|backend` +- `summary.source_mode` + +Desktop local 정책: + +- `auth_enabled=false`이면 account/auth 섹션을 숨깁니다. 기본 데스크톱 로컬 동작입니다. +- source mode 선택기는 숨겨지며 내부적으로 `session_only`로 고정됩니다. +- summary storage backend 기본값은 `hidden_ref`입니다. +- `hidden_ref` 모드에서도 검색/필터 성능을 위해 로컬 SQLite(`local.db`)에 searchable list metadata를 기록합니다. +- response preview는 결정론적 fixture 렌더링이며 LLM/network dry-run이 아닙니다. + +프로바이더별 표시 필드: + +- `ollama` (`http`): endpoint + model +- `codex_exec`, `claude_cli` (`cli`): binary status + model +- `disabled`: provider detail 필드 숨김 + +Desktop local 문서: + +- `/docs`는 `opensession-server` 없이도 로컬 IPC(`desktop_get_docs_markdown`)로 렌더링할 수 있습니다. + +Desktop vector search (선택 기능): + +- vector ranking은 세션 전체 문자열이 아니라 event/line chunk 단위입니다. +- `vector_search` 설정은 typed payload로 저장됩니다. +- 기본 모델은 로컬 Ollama(`http://127.0.0.1:11434`)의 `bge-m3`입니다. +- 모델 설치는 Settings의 `desktop_vector_install_model`에서 명시적으로 실행하며 preflight 상태로 진행률을 확인할 수 있습니다. +- 인덱싱은 `desktop_vector_index_rebuild`로 명시적으로 실행하고 `desktop_vector_index_status`로 상태를 조회합니다. +- hidden refs는 summary ledger 저장소로 유지되고, vector/list metadata는 질의 성능을 위해 로컬 SQLite(`local.db`)에 유지됩니다. + +## 릴리스 + +- 제품 버전은 workspace `Cargo.toml`에서 `scripts/sync-product-version.mjs`로 desktop 파일에 동기화됩니다. +- 릴리스 전에 `node scripts/sync-product-version.mjs --check`를 실행하고, 적용이 필요하면 `--write`를 사용하세요. +- GitHub Actions `Release` 워크플로(수동)는 다음을 실행합니다. + 1. `release-plz update` + 릴리스 publish + 2. macOS universal Tauri 번들 빌드 (`.dmg`, `.app.zip`, checksum) + 3. 태그 `v`에 아티팩트 업로드 +- universal 정책: 릴리스 빌드는 `universal-apple-darwin`을 사용하고 `lipo -archs` 결과가 `x86_64 arm64`인지 검증합니다. +- 보안 게이트: 코드 서명 + notarization 검증이 통과한 경우에만 desktop 아티팩트를 업로드합니다. + 필요한 저장소 시크릿: + - `APPLE_CERTIFICATE` + - `APPLE_CERTIFICATE_PASSWORD` + - `APPLE_SIGNING_IDENTITY` + - `APPLE_ID` + - `APPLE_PASSWORD` + - `APPLE_TEAM_ID` +- release/CI/local 사전 점검 도우미: + - `node scripts/validate/desktop-build-preflight.mjs --mode release --os macos` diff --git a/desktop/README.md b/desktop/README.md index c10bbc84..d2be25ab 100644 --- a/desktop/README.md +++ b/desktop/README.md @@ -1,5 +1,7 @@ # OpenSession Desktop (Preview) +[한국어](README.ko.md) + This is a desktop shell that reuses the existing Svelte web UI from `../web`. ## Run (dev) diff --git a/desktop/package-lock.json b/desktop/package-lock.json index b9ca48a1..659e96c3 100644 --- a/desktop/package-lock.json +++ b/desktop/package-lock.json @@ -8,13 +8,13 @@ "name": "opensession-desktop", "version": "0.2.34", "devDependencies": { - "@tauri-apps/cli": "^2.2.7" + "@tauri-apps/cli": "^2.10.1" } }, "node_modules/@tauri-apps/cli": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli/-/cli-2.10.0.tgz", - "integrity": "sha512-ZwT0T+7bw4+DPCSWzmviwq5XbXlM0cNoleDKOYPFYqcZqeKY31KlpoMW/MOON/tOFBPgi31a2v3w9gliqwL2+Q==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli/-/cli-2.10.1.tgz", + "integrity": "sha512-jQNGF/5quwORdZSSLtTluyKQ+o6SMa/AUICfhf4egCGFdMHqWssApVgYSbg+jmrZoc8e1DscNvjTnXtlHLS11g==", "dev": true, "license": "Apache-2.0 OR MIT", "bin": { @@ -28,23 +28,23 @@ "url": "https://opencollective.com/tauri" }, "optionalDependencies": { - "@tauri-apps/cli-darwin-arm64": "2.10.0", - "@tauri-apps/cli-darwin-x64": "2.10.0", - "@tauri-apps/cli-linux-arm-gnueabihf": "2.10.0", - "@tauri-apps/cli-linux-arm64-gnu": "2.10.0", - "@tauri-apps/cli-linux-arm64-musl": "2.10.0", - "@tauri-apps/cli-linux-riscv64-gnu": "2.10.0", - "@tauri-apps/cli-linux-x64-gnu": "2.10.0", - "@tauri-apps/cli-linux-x64-musl": "2.10.0", - "@tauri-apps/cli-win32-arm64-msvc": "2.10.0", - "@tauri-apps/cli-win32-ia32-msvc": "2.10.0", - "@tauri-apps/cli-win32-x64-msvc": "2.10.0" + "@tauri-apps/cli-darwin-arm64": "2.10.1", + "@tauri-apps/cli-darwin-x64": "2.10.1", + "@tauri-apps/cli-linux-arm-gnueabihf": "2.10.1", + "@tauri-apps/cli-linux-arm64-gnu": "2.10.1", + "@tauri-apps/cli-linux-arm64-musl": "2.10.1", + "@tauri-apps/cli-linux-riscv64-gnu": "2.10.1", + "@tauri-apps/cli-linux-x64-gnu": "2.10.1", + "@tauri-apps/cli-linux-x64-musl": "2.10.1", + "@tauri-apps/cli-win32-arm64-msvc": "2.10.1", + "@tauri-apps/cli-win32-ia32-msvc": "2.10.1", + "@tauri-apps/cli-win32-x64-msvc": "2.10.1" } }, "node_modules/@tauri-apps/cli-darwin-arm64": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.10.0.tgz", - "integrity": "sha512-avqHD4HRjrMamE/7R/kzJPcAJnZs0IIS+1nkDP5b+TNBn3py7N2aIo9LIpy+VQq0AkN8G5dDpZtOOBkmWt/zjA==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.10.1.tgz", + "integrity": "sha512-Z2OjCXiZ+fbYZy7PmP3WRnOpM9+Fy+oonKDEmUE6MwN4IGaYqgceTjwHucc/kEEYZos5GICve35f7ZiizgqEnQ==", "cpu": [ "arm64" ], @@ -59,9 +59,9 @@ } }, "node_modules/@tauri-apps/cli-darwin-x64": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.10.0.tgz", - "integrity": "sha512-keDmlvJRStzVFjZTd0xYkBONLtgBC9eMTpmXnBXzsHuawV2q9PvDo2x6D5mhuoMVrJ9QWjgaPKBBCFks4dK71Q==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.10.1.tgz", + "integrity": "sha512-V/irQVvjPMGOTQqNj55PnQPVuH4VJP8vZCN7ajnj+ZS8Kom1tEM2hR3qbbIRoS3dBKs5mbG8yg1WC+97dq17Pw==", "cpu": [ "x64" ], @@ -76,9 +76,9 @@ } }, "node_modules/@tauri-apps/cli-linux-arm-gnueabihf": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.10.0.tgz", - "integrity": "sha512-e5u0VfLZsMAC9iHaOEANumgl6lfnJx0Dtjkd8IJpysZ8jp0tJ6wrIkto2OzQgzcYyRCKgX72aKE0PFgZputA8g==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.10.1.tgz", + "integrity": "sha512-Hyzwsb4VnCWKGfTw+wSt15Z2pLw2f0JdFBfq2vHBOBhvg7oi6uhKiF87hmbXOBXUZaGkyRDkCHsdzJcIfoJC2w==", "cpu": [ "arm" ], @@ -93,9 +93,9 @@ } }, "node_modules/@tauri-apps/cli-linux-arm64-gnu": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.10.0.tgz", - "integrity": "sha512-YrYYk2dfmBs5m+OIMCrb+JH/oo+4FtlpcrTCgiFYc7vcs6m3QDd1TTyWu0u01ewsCtK2kOdluhr/zKku+KP7HA==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.10.1.tgz", + "integrity": "sha512-OyOYs2t5GkBIvyWjA1+h4CZxTcdz1OZPCWAPz5DYEfB0cnWHERTnQ/SLayQzncrT0kwRoSfSz9KxenkyJoTelA==", "cpu": [ "arm64" ], @@ -110,9 +110,9 @@ } }, "node_modules/@tauri-apps/cli-linux-arm64-musl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.10.0.tgz", - "integrity": "sha512-GUoPdVJmrJRIXFfW3Rkt+eGK9ygOdyISACZfC/bCSfOnGt8kNdQIQr5WRH9QUaTVFIwxMlQyV3m+yXYP+xhSVA==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.10.1.tgz", + "integrity": "sha512-MIj78PDDGjkg3NqGptDOGgfXks7SYJwhiMh8SBoZS+vfdz7yP5jN18bNaLnDhsVIPARcAhE1TlsZe/8Yxo2zqg==", "cpu": [ "arm64" ], @@ -127,9 +127,9 @@ } }, "node_modules/@tauri-apps/cli-linux-riscv64-gnu": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-riscv64-gnu/-/cli-linux-riscv64-gnu-2.10.0.tgz", - "integrity": "sha512-JO7s3TlSxshwsoKNCDkyvsx5gw2QAs/Y2GbR5UE2d5kkU138ATKoPOtxn8G1fFT1aDW4LH0rYAAfBpGkDyJJnw==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-riscv64-gnu/-/cli-linux-riscv64-gnu-2.10.1.tgz", + "integrity": "sha512-X0lvOVUg8PCVaoEtEAnpxmnkwlE1gcMDTqfhbefICKDnOTJ5Est3qL0SrWxizDackIOKBcvtpejrSiVpuJI1kw==", "cpu": [ "riscv64" ], @@ -144,9 +144,9 @@ } }, "node_modules/@tauri-apps/cli-linux-x64-gnu": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.10.0.tgz", - "integrity": "sha512-Uvh4SUUp4A6DVRSMWjelww0GnZI3PlVy7VS+DRF5napKuIehVjGl9XD0uKoCoxwAQBLctvipyEK+pDXpJeoHng==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.10.1.tgz", + "integrity": "sha512-2/12bEzsJS9fAKybxgicCDFxYD1WEI9kO+tlDwX5znWG2GwMBaiWcmhGlZ8fi+DMe9CXlcVarMTYc0L3REIRxw==", "cpu": [ "x64" ], @@ -161,9 +161,9 @@ } }, "node_modules/@tauri-apps/cli-linux-x64-musl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.10.0.tgz", - "integrity": "sha512-AP0KRK6bJuTpQ8kMNWvhIpKUkQJfcPFeba7QshOQZjJ8wOS6emwTN4K5g/d3AbCMo0RRdnZWwu67MlmtJyxC1Q==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.10.1.tgz", + "integrity": "sha512-Y8J0ZzswPz50UcGOFuXGEMrxbjwKSPgXftx5qnkuMs2rmwQB5ssvLb6tn54wDSYxe7S6vlLob9vt0VKuNOaCIQ==", "cpu": [ "x64" ], @@ -178,9 +178,9 @@ } }, "node_modules/@tauri-apps/cli-win32-arm64-msvc": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.10.0.tgz", - "integrity": "sha512-97DXVU3dJystrq7W41IX+82JEorLNY+3+ECYxvXWqkq7DBN6FsA08x/EFGE8N/b0LTOui9X2dvpGGoeZKKV08g==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.10.1.tgz", + "integrity": "sha512-iSt5B86jHYAPJa/IlYw++SXtFPGnWtFJriHn7X0NFBVunF6zu9+/zOn8OgqIWSl8RgzhLGXQEEtGBdR4wzpVgg==", "cpu": [ "arm64" ], @@ -195,9 +195,9 @@ } }, "node_modules/@tauri-apps/cli-win32-ia32-msvc": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.10.0.tgz", - "integrity": "sha512-EHyQ1iwrWy1CwMalEm9z2a6L5isQ121pe7FcA2xe4VWMJp+GHSDDGvbTv/OPdkt2Lyr7DAZBpZHM6nvlHXEc4A==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.10.1.tgz", + "integrity": "sha512-gXyxgEzsFegmnWywYU5pEBURkcFN/Oo45EAwvZrHMh+zUSEAvO5E8TXsgPADYm31d1u7OQU3O3HsYfVBf2moHw==", "cpu": [ "ia32" ], @@ -212,9 +212,9 @@ } }, "node_modules/@tauri-apps/cli-win32-x64-msvc": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.10.0.tgz", - "integrity": "sha512-NTpyQxkpzGmU6ceWBTY2xRIEaS0ZLbVx1HE1zTA3TY/pV3+cPoPPOs+7YScr4IMzXMtOw7tLw5LEXo5oIG3qaQ==", + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.10.1.tgz", + "integrity": "sha512-6Cn7YpPFwzChy0ERz6djKEmUehWrYlM+xTaNzGPgZocw3BD7OfwfWHKVWxXzdjEW2KfKkHddfdxK1XXTYqBRLg==", "cpu": [ "x64" ], diff --git a/desktop/package.json b/desktop/package.json index 2d2df4cb..be15852f 100644 --- a/desktop/package.json +++ b/desktop/package.json @@ -10,6 +10,6 @@ "build": "npm run tauri:build" }, "devDependencies": { - "@tauri-apps/cli": "^2.2.7" + "@tauri-apps/cli": "^2.10.1" } } diff --git a/desktop/src-tauri/src/main.rs b/desktop/src-tauri/src/main.rs index 261222a7..2b6fd10e 100644 --- a/desktop/src-tauri/src/main.rs +++ b/desktop/src-tauri/src/main.rs @@ -4,8 +4,9 @@ mod app; use app::change_reader::{ desktop_ask_session_changes, desktop_change_reader_tts, desktop_read_session_changes, - require_non_empty_request_field, }; +#[cfg(test)] +use app::change_reader::require_non_empty_request_field; use app::handoff::{desktop_build_handoff, desktop_share_session_quick}; use app::launch_route::desktop_take_launch_route; #[cfg(test)] @@ -50,9 +51,11 @@ use app::{ }; use opensession_api::{ CapabilitiesResponse, DESKTOP_IPC_CONTRACT_VERSION, DesktopApiError, - DesktopContractVersionResponse, DesktopSessionListQuery, + DesktopContractVersionResponse, oauth::{AuthProvidersResponse, OAuthProviderInfo}, }; +#[cfg(test)] +use opensession_api::DesktopSessionListQuery; use opensession_local_db::LocalDb; use opensession_runtime_config::DaemonConfig; use serde_json::json; @@ -199,8 +202,19 @@ fn desktop_get_contract_version() -> DesktopContractVersionResponse { } #[tauri::command] -fn desktop_get_docs_markdown() -> String { - include_str!("../../../docs.md").to_string() +fn desktop_get_docs_markdown(locale: Option) -> String { + desktop_docs_markdown(locale.as_deref()).to_string() +} + +fn desktop_docs_markdown(locale: Option<&str>) -> &'static str { + if locale + .map(str::trim) + .is_some_and(|value| value.eq_ignore_ascii_case("ko") || value.to_ascii_lowercase().starts_with("ko-")) + { + include_str!("../../../docs.ko.md") + } else { + include_str!("../../../docs.md") + } } fn main() { diff --git a/desktop/src-tauri/src/main_tests.rs b/desktop/src-tauri/src/main_tests.rs index 177e3588..32ee7b29 100644 --- a/desktop/src-tauri/src/main_tests.rs +++ b/desktop/src-tauri/src/main_tests.rs @@ -228,3 +228,15 @@ fn wait_for_summary_batch_completion( } final_state } + +#[test] +fn desktop_docs_markdown_selects_korean_variant() { + let selected = super::desktop_docs_markdown(Some("ko-KR")); + assert!(selected.starts_with("# 문서")); +} + +#[test] +fn desktop_docs_markdown_defaults_to_english_variant() { + let selected = super::desktop_docs_markdown(Some("en-US")); + assert!(selected.starts_with("# Documentation")); +} diff --git a/docs.ko.md b/docs.ko.md new file mode 100644 index 00000000..c2362fe7 --- /dev/null +++ b/docs.ko.md @@ -0,0 +1,390 @@ +# 문서 + +OpenSession은 AI 세션 트레이스를 등록하고, 공유하고, 검토하는 로컬 우선(local-first) 워크플로입니다. +공개 계약은 CLI, Web, API가 함께 쓰는 단일 Source URI 모델입니다. + +## 문서 맵 + +- 루트 빠른 참조: `README.md` / `README.ko.md` +- 이 문서(`docs.ko.md`): 제품 계약과 명령 의미론 +- 개발/CI 정합성 런북: `docs/development-validation-flow.md` +- 하네스 실패 루프 정책: `docs/harness-auto-improve-loop.md` +- 파서 소스/재사용 경계: `docs/parser-source-matrix.md` + +## 시작하기 + +핵심 원칙: + +- 하나의 개념에는 하나의 이름을 쓴다. +- 하나의 식별자에는 하나의 URI를 쓴다. +- 암묵적인 네트워크 변경은 하지 않는다. +- 기본값을 사용하더라도 출력에 드러나야 한다. + +초보자용 3단계 빠른 시작: + +```bash +# 첫 사용자용 명령 흐름 출력 +opensession docs quickstart + +# 1) CLI 설치 +cargo install opensession + +# 2) 로컬 설정 진단 (flutter doctor 스타일) +opensession doctor + +# 3) 명시적 확인 후 설정 적용 +opensession doctor --fix --profile local +``` + +- `doctor --fix`는 훅/shim/fanout 변경을 적용하기 전에 계획을 출력하고 확인을 받습니다. +- 자동화나 비대화형 셸에서는 명시적 모드와 승인 플래그를 함께 사용하세요: + `opensession doctor --fix --yes --profile local --fanout-mode hidden_ref` + +빠른 경로: + +```bash +# 1) agent-native 로그를 canonical HAIL JSONL로 변환 +opensession parse --profile codex ./raw-session.jsonl > ./session.hail.jsonl + +# 2) canonical 세션을 로컬 object store에 등록 +opensession register ./session.hail.jsonl +# -> os://src/local/ + +# 3) 로컬 canonical 바이트 다시 읽기 +opensession cat os://src/local/ + +# 4) summary 메타데이터 확인 +opensession inspect os://src/local/ +``` + +설치: + +```bash +cargo install opensession +``` + +설치 프로필: + +- `local`(기본): backup/summary/handoff 중심 CLI 로컬 우선 경로 +- `app`: 데스크톱 앱 사용자용 프로필 (`opensession doctor --fix --profile app --open-target app`) + +자동 수집 참고: + +- `opensession`은 parse/register/share/handoff를 담당합니다. +- 백그라운드 자동 수집은 daemon 프로세스(`opensession-daemon run`)가 실행 중이어야 합니다. + +레포 개발 툴체인: + +- 로컬 검증 훅은 `mise`를 통해 실행됩니다. +- `./.githooks/pre-commit`, `./.githooks/pre-push` 전에 레포 루트에서 `mise install`을 실행하세요. +- 데스크톱 사전 점검 게이트: `node scripts/validate/desktop-build-preflight.mjs --mode local` + +로컬 object storage: + +- 레포 내부: `.opensession/objects/sha256/ab/cd/.jsonl` +- 레포 외부: `~/.local/share/opensession/objects/sha256/ab/cd/.jsonl` + +해시 정책: + +- canonical HAIL JSONL 바이트의 SHA-256 + +## 데스크톱 런타임 Summary 계약 (v3) + +데스크톱 IPC/runtime settings는 typed summary 계약을 사용합니다. + +- `summary.provider.id|endpoint|model` +- `summary.prompt.template` +- `summary.response.style|shape` +- `summary.storage.trigger|backend` +- `summary.source_mode` +- `vector_search.enabled|provider|model|endpoint|granularity|chunk_size_lines|chunk_overlap_lines|top_k_chunks|top_k_sessions` + +데스크톱 로컬 제약: + +- `auth_enabled=false` 런타임은 의도적으로 account/auth UI를 숨깁니다. +- 데스크톱 로컬 런타임에서는 `summary.source_mode`가 `session_only`로 고정됩니다. +- `session_or_git_changes`는 CI/CLI 같은 비-데스크톱 런타임 컨텍스트용입니다. +- 기본 summary storage backend는 `hidden_ref`입니다. +- `hidden_ref`를 써도 list/search 메타데이터와 vector index 메타데이터는 로컬 SQLite(`OPENSESSION_LOCAL_DB_PATH` 또는 기본 `~/.local/share/opensession/local.db`)에 인덱싱됩니다. +- Settings의 runtime response preview는 모델 출력이 아니라 결정론적 로컬 샘플 렌더링입니다. + +데스크톱 로컬 확장: + +- HTTP docs 라우트가 없어도 `/docs`는 desktop IPC(`desktop_get_docs_markdown`)에서 해석할 수 있습니다. +- 벡터 검색은 이벤트/라인 청크 인덱싱과 로컬 Ollama 임베딩(기본 `bge-m3`)을 사용합니다. +- 벡터 검색 활성화는 명시적입니다. 먼저 모델 설치가 끝나야 합니다(`desktop_vector_preflight`, `desktop_vector_install_model`). +- 인덱싱도 명시적이며 상태를 관찰할 수 있습니다(`desktop_vector_index_rebuild`, `desktop_vector_index_status`). + +## Git을 통한 공유 + +`register`는 로컬 전용입니다. 원격 공유는 `share`로 명시적으로 수행합니다. + +```bash +# 로컬 source -> 원클릭 git share URI 흐름 +opensession share os://src/local/ --quick + +# 선택적 네트워크 변경 +opensession share os://src/local/ --git --remote origin --push +``` + +`share --git` / `share --quick` 규칙: + +- `--quick`은 remote를 자동 감지합니다(`origin` 우선, 단일 remote fallback) +- `--git`은 명시적 `--remote `가 필요합니다 +- 기본 ref: `refs/opensession/branches/` +- 기본 path: `sessions/.jsonl` +- `--push`를 생략하면 네트워크 변경 없이 실행 가능한 push 명령만 출력합니다 +- `--quick`은 첫 푸시 때 한 번 확인을 받고, 레포별 동의를 `.git/config`의 `opensession.share.auto-push-consent=true`에 저장합니다 +- 새 write에는 더 이상 레거시 고정 ref `refs/heads/opensession/sessions`를 사용하지 않습니다 + +설치 후 그대로 쓰는 설정: + +```bash +opensession doctor +opensession doctor --fix --profile local +# interactive 셸에서 선택 모드를 명시하고 싶다면 +opensession doctor --fix --profile local --fanout-mode hidden_ref +# automation/non-interactive +opensession doctor --fix --yes --profile local --fanout-mode hidden_ref --open-target web +``` + +- `doctor` check 모드는 내부 setup check로, `doctor --fix`는 내부 setup apply 흐름으로 연결됩니다. +- `doctor --fix`는 명시적 승인이 필요합니다. interactive 기본 프롬프트 또는 자동화용 `--yes`를 사용합니다. +- 현재 레포에 OpenSession 관리 `pre-push` 훅을 설치/업데이트합니다. +- `~/.local/share/opensession/bin/opensession`에 OpenSession shim을 설치/업데이트합니다. +- fanout 모드가 아직 설정되지 않은 interactive 셸에서는 첫 적용 시 `hidden_ref` 또는 `git_notes`를 선택하도록 묻고, 결과를 로컬 git config(`opensession.fanout-mode`)에 저장합니다. +- 비대화형 적용은 레포에 저장된 `opensession.fanout-mode`가 없으면 명시적 `--fanout-mode`가 필요합니다. +- open target 기본값은 profile을 따릅니다(`local -> web`, `app -> app`). +- `doctor` 출력에는 `~/.config/opensession/daemon.pid` 기준 daemon 상태가 포함됩니다. +- daemon 시작: `opensession-daemon run` (소스 체크아웃에서는 `cargo run -p opensession-daemon -- run`) +- `remote..push`는 수정하지 않습니다. +- hook fanout push는 best-effort이며 경고만 출력합니다. +- fanout helper가 없거나 fanout push가 실패하면 push를 실패시키려면 `OPENSESSION_STRICT=1`을 사용하세요. +- PR 자동화는 현재 same-repo PR만 지원합니다. +- merge/branch delete cleanup은 ledger ref를 즉시 제거하고, 실제 object 제거는 remote GC 정책을 따릅니다. + +`share --web` 규칙: + +```bash +opensession config init --base-url https://opensession.io +opensession config show +opensession share os://src/git//ref//path/ --web +``` + +- `share --web`는 명시적 `.opensession/config.toml`이 필요합니다 +- 로컬 URI에 `--web`을 붙이면 후속 조치(`share --git`)와 함께 거부됩니다 +- 사람이 읽는 출력에서는 canonical URL이 첫 줄에 인쇄됩니다 + +## Cleanup 자동화 + +OpenSession은 서버 인프라를 바꾸지 않고도 사용자 저장소에서 hidden ref cleanup을 구성할 수 있습니다. + +```bash +# provider-aware cleanup 템플릿/설정 초기화 +opensession cleanup init --provider auto + +# 비대화형 설정 +opensession cleanup init --provider auto --yes + +# 설정 + janitor dry-run 요약 확인 +opensession cleanup status + +# dry-run (기본) +opensession cleanup run + +# 실제 삭제 적용 +opensession cleanup run --apply +``` + +기본값: + +- hidden ref TTL: 30일 +- artifact branch TTL: 30일 + +민감한 저장소용: + +```bash +opensession cleanup init --provider auto --hidden-ttl-days 0 --artifact-ttl-days 0 --yes +``` + +프로바이더 매트릭스: + +- GitHub: `.github/workflows/opensession-cleanup.yml`와 `.github/workflows/opensession-session-review.yml`을 생성합니다. PR 업데이트 시 `opensession/pr--sessions`를 게시/갱신하고 PR 코멘트를 upsert합니다. +- GitLab: `.gitlab/opensession-cleanup.yml`와 `.gitlab/opensession-session-review.yml`을 생성합니다. `.gitlab-ci.yml`은 OpenSession 관리 마커 블록이 있을 때만(또는 새 파일일 때만) 갱신합니다. MR 파이프라인은 `opensession/mr--sessions`를 게시/갱신하고 MR note를 남깁니다. +- Generic git: cron/system scheduler 연동용 `.opensession/cleanup/cron.example`를 생성합니다. +- session-review 코멘트에는 `Reviewer Quick Digest`가 포함되며, Q&A 발췌(`Question | Answer` 행), 수정 파일 요약, 추가/수정 테스트가 함께 표시됩니다. + +## 개발 및 검증 + +정식 검증 흐름(훅, API/worker/web/desktop E2E, CI 정합성, artifact 정책): + +- `docs/development-validation-flow.md` + +빠른 로컬 게이트 명령: + +```bash +./.githooks/pre-commit +./.githooks/pre-push +``` + +데스크톱 빌드 정책: + +- CI에서 Linux 데스크톱 번들 빌드 검증(`desktop-bundle-verify`)을 필수로 수행합니다. +- macOS 데스크톱 릴리즈 타깃은 `universal-apple-darwin`만 허용합니다. +- universal 아키텍처는 `lipo -archs`로 검증하며 반드시 `x86_64`와 `arm64`를 모두 포함해야 합니다. +- 정기/수동 `Desktop Dry Run` 워크플로는 no-sign 데스크톱 번들을 검증하고 diagnostics/metrics artifact를 업로드합니다. + +릴리즈 서명 체크리스트(수동 secret 준비): + +- `APPLE_CERTIFICATE` +- `APPLE_CERTIFICATE_PASSWORD` +- `APPLE_SIGNING_IDENTITY` +- `APPLE_ID` +- `APPLE_PASSWORD` +- `APPLE_TEAM_ID` + +## 실패 복구 + +일반적인 온보딩 흐름이 실패했을 때 아래 명령을 사용하세요. + +1. `share --web`에 로컬 URI를 넣은 경우: +```bash +opensession share os://src/local/ --git --remote origin +opensession share os://src/git//ref//path/ --web +``` +2. `share --git`에 `--remote`가 없는 경우: +```bash +opensession share os://src/local/ --quick +``` +3. `share --git`을 git 저장소 밖에서 실행한 경우: +```bash +cd +opensession share os://src/local/ --quick +``` +4. `share --web`에 config가 없는 경우: +```bash +opensession config init --base-url https://opensession.io +opensession config show +``` +5. `register`가 non-canonical 입력을 거부한 경우: +```bash +opensession parse --profile codex ./raw-session.jsonl --out ./session.hail.jsonl +opensession register ./session.hail.jsonl +``` +6. `parse`에서 parser/input이 맞지 않는 경우: +```bash +opensession parse --help +opensession parse --profile codex ./raw-session.jsonl --preview +``` +7. `view` target 해석이 실패한 경우: +```bash +opensession view os://src/... --no-open +opensession view ./session.hail.jsonl --no-open +opensession view HEAD +``` +8. `cleanup run` 전에 초기화를 하지 않은 경우: +```bash +opensession cleanup init --provider auto +opensession cleanup run +``` + +5분 복구 경로: + +```bash +opensession doctor +opensession doctor --fix --profile local +opensession parse --profile codex ./raw-session.jsonl --out ./session.hail.jsonl +opensession register ./session.hail.jsonl +opensession share os://src/local/ --quick +``` + +## 타임라인 검토 + +canonical 웹 라우트: + +- `/src/gh///ref//path/` +- `/src/gl//ref//path/` +- `/src/git//ref//path/` + +레거시 단축 라우트는 예약되어 있으며 404를 반환합니다. + +- `/git` +- `/gh/*` +- `/resolve/*` + +서버 parse preview 엔드포인트: + +- `POST /api/parse/preview` + +## Review 뷰 + +`opensession view`는 리뷰 중심 웹 진입점입니다. + +```bash +# Source URI -> /src/* +opensession view os://src/gl//ref//path/ + +# 로컬 source URI / jsonl 파일 -> /review/local/ +opensession view os://src/local/ +opensession view ./session.hail.jsonl + +# commit/ref/range -> commit-linked local review bundle +opensession view HEAD +opensession view main..feature/my-branch +``` + +기본 모드는 web입니다. URL만 출력하려면 `--no-open`을 사용하세요. + +로컬 `view` 대상은 등록된 git credential이 필요하지 않습니다. +로컬 git object / 로컬 source byte를 사용해 local review bundle을 만들기 때문입니다. +commit-linked local review page는 Q&A 내용 발췌, 수정 파일, 추가/수정 테스트를 포함하는 `Reviewer Quick Digest` 패널을 노출합니다. + +## Handoff + +handoff artifact는 immutable입니다. `build`는 매번 새 artifact URI를 생성합니다. + +```bash +# immutable artifact 생성 +opensession handoff build --from os://src/local/ --pin latest +# -> os://artifact/ + +# payload 표현 읽기 +opensession handoff artifacts get os://artifact/ --format canonical --encode jsonl + +# 결정론적 해시 검증 +opensession handoff artifacts verify os://artifact/ + +# alias 제어 +opensession handoff artifacts pin latest os://artifact/ +opensession handoff artifacts unpin latest + +# 제거 정책 (unpinned만 허용) +opensession handoff artifacts rm os://artifact/ +``` + +v1에는 refresh/update 명령이 없습니다. 다시 build하고 pin alias를 옮기면 됩니다. + +## 선택적 UI + +CLI가 정식 운영 표면입니다. +Web과 TUI는 같은 URI 계약 위에서 동작하는 선택적 인터페이스입니다. + +## 개념 + +source / artifact 식별자: + +- `os://src/local/` +- `os://src/gh///ref//path/` +- `os://src/gl//ref//path/` +- `os://src/git//ref//path/` +- `os://artifact/` + +인코딩 규칙: + +- `ref_enc`: RFC3986 percent-encoding +- `project_b64`, `remote_b64`: base64url(no padding) + +API 경계: + +- `DELETE /api/admin/sessions/{id}` +- Header: `X-OpenSession-Admin-Key` diff --git a/packages/ui/package-lock.json b/packages/ui/package-lock.json index 167b1b53..def31f48 100644 --- a/packages/ui/package-lock.json +++ b/packages/ui/package-lock.json @@ -10,13 +10,13 @@ "dependencies": { "effect": "3.19.19", "highlight.js": "^11.11.1", - "isomorphic-dompurify": "^2.28.0", - "marked": "^17.0.1" + "isomorphic-dompurify": "^3.0.0", + "marked": "^17.0.4" }, "devDependencies": { - "@biomejs/biome": "2.3.15", + "@biomejs/biome": "^2.4.6", "jsdom": "^28.1.0", - "svelte": "^5.51.5", + "svelte": "^5.53.7", "tsx": "^4.20.5", "turndown": "^7.2.0", "typescript": "^5.9.3" @@ -67,9 +67,9 @@ "license": "MIT" }, "node_modules/@biomejs/biome": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.15.tgz", - "integrity": "sha512-u+jlPBAU2B45LDkjjNNYpc1PvqrM/co4loNommS9/sl9oSxsAQKsNZejYuUztvToB5oXi1tN/e62iNd6ESiY3g==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.4.6.tgz", + "integrity": "sha512-QnHe81PMslpy3mnpL8DnO2M4S4ZnYPkjlGCLWBZT/3R9M6b5daArWMMtEfP52/n174RKnwRIf3oT8+wc9ihSfQ==", "dev": true, "license": "MIT OR Apache-2.0", "bin": { @@ -83,20 +83,20 @@ "url": "https://opencollective.com/biome" }, "optionalDependencies": { - "@biomejs/cli-darwin-arm64": "2.3.15", - "@biomejs/cli-darwin-x64": "2.3.15", - "@biomejs/cli-linux-arm64": "2.3.15", - "@biomejs/cli-linux-arm64-musl": "2.3.15", - "@biomejs/cli-linux-x64": "2.3.15", - "@biomejs/cli-linux-x64-musl": "2.3.15", - "@biomejs/cli-win32-arm64": "2.3.15", - "@biomejs/cli-win32-x64": "2.3.15" + "@biomejs/cli-darwin-arm64": "2.4.6", + "@biomejs/cli-darwin-x64": "2.4.6", + "@biomejs/cli-linux-arm64": "2.4.6", + "@biomejs/cli-linux-arm64-musl": "2.4.6", + "@biomejs/cli-linux-x64": "2.4.6", + "@biomejs/cli-linux-x64-musl": "2.4.6", + "@biomejs/cli-win32-arm64": "2.4.6", + "@biomejs/cli-win32-x64": "2.4.6" } }, "node_modules/@biomejs/cli-darwin-arm64": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.15.tgz", - "integrity": "sha512-SDCdrJ4COim1r8SNHg19oqT50JfkI/xGZHSyC6mGzMfKrpNe/217Eq6y98XhNTc0vGWDjznSDNXdUc6Kg24jbw==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.4.6.tgz", + "integrity": "sha512-NW18GSyxr+8sJIqgoGwVp5Zqm4SALH4b4gftIA0n62PTuBs6G2tHlwNAOj0Vq0KKSs7Sf88VjjmHh0O36EnzrQ==", "cpu": [ "arm64" ], @@ -111,9 +111,9 @@ } }, "node_modules/@biomejs/cli-darwin-x64": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.15.tgz", - "integrity": "sha512-RkyeSosBtn3C3Un8zQnl9upX0Qbq4E3QmBa0qjpOh1MebRbHhNlRC16jk8HdTe/9ym5zlfnpbb8cKXzW+vlTxw==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.4.6.tgz", + "integrity": "sha512-4uiE/9tuI7cnjtY9b07RgS7gGyYOAfIAGeVJWEfeCnAarOAS7qVmuRyX6d7JTKw28/mt+rUzMasYeZ+0R/U1Mw==", "cpu": [ "x64" ], @@ -128,9 +128,9 @@ } }, "node_modules/@biomejs/cli-linux-arm64": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.15.tgz", - "integrity": "sha512-FN83KxrdVWANOn5tDmW6UBC0grojchbGmcEz6JkRs2YY6DY63sTZhwkQ56x6YtKhDVV1Unz7FJexy8o7KwuIhg==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.4.6.tgz", + "integrity": "sha512-kMLaI7OF5GN1Q8Doymjro1P8rVEoy7BKQALNz6fiR8IC1WKduoNyteBtJlHT7ASIL0Cx2jR6VUOBIbcB1B8pew==", "cpu": [ "arm64" ], @@ -145,9 +145,9 @@ } }, "node_modules/@biomejs/cli-linux-arm64-musl": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.15.tgz", - "integrity": "sha512-SSSIj2yMkFdSkXqASzIBdjySBXOe65RJlhKEDlri7MN19RC4cpez+C0kEwPrhXOTgJbwQR9QH1F4+VnHkC35pg==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.4.6.tgz", + "integrity": "sha512-F/JdB7eN22txiTqHM5KhIVt0jVkzZwVYrdTR1O3Y4auBOQcXxHK4dxULf4z43QyZI5tsnQJrRBHZy7wwtL+B3A==", "cpu": [ "arm64" ], @@ -162,9 +162,9 @@ } }, "node_modules/@biomejs/cli-linux-x64": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.15.tgz", - "integrity": "sha512-T8n9p8aiIKOrAD7SwC7opiBM1LYGrE5G3OQRXWgbeo/merBk8m+uxJ1nOXMPzfYyFLfPlKF92QS06KN1UW+Zbg==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.4.6.tgz", + "integrity": "sha512-oHXmUFEoH8Lql1xfc3QkFLiC1hGR7qedv5eKNlC185or+o4/4HiaU7vYODAH3peRCfsuLr1g6v2fK9dFFOYdyw==", "cpu": [ "x64" ], @@ -179,9 +179,9 @@ } }, "node_modules/@biomejs/cli-linux-x64-musl": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.15.tgz", - "integrity": "sha512-dbjPzTh+ijmmNwojFYbQNMFp332019ZDioBYAMMJj5Ux9d8MkM+u+J68SBJGVwVeSHMYj+T9504CoxEzQxrdNw==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.4.6.tgz", + "integrity": "sha512-C9s98IPDu7DYarjlZNuzJKTjVHN03RUnmHV5htvqsx6vEUXCDSJ59DNwjKVD5XYoSS4N+BYhq3RTBAL8X6svEg==", "cpu": [ "x64" ], @@ -196,9 +196,9 @@ } }, "node_modules/@biomejs/cli-win32-arm64": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.15.tgz", - "integrity": "sha512-puMuenu/2brQdgqtQ7geNwQlNVxiABKEZJhMRX6AGWcmrMO8EObMXniFQywy2b81qmC+q+SDvlOpspNwz0WiOA==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.4.6.tgz", + "integrity": "sha512-xzThn87Pf3YrOGTEODFGONmqXpTwUNxovQb72iaUOdcw8sBSY3+3WD8Hm9IhMYLnPi0n32s3L3NWU6+eSjfqFg==", "cpu": [ "arm64" ], @@ -213,9 +213,9 @@ } }, "node_modules/@biomejs/cli-win32-x64": { - "version": "2.3.15", - "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.15.tgz", - "integrity": "sha512-kDZr/hgg+igo5Emi0LcjlgfkoGZtgIpJKhnvKTRmMBv6FF/3SDyEV4khBwqNebZIyMZTzvpca9sQNSXJ39pI2A==", + "version": "2.4.6", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.4.6.tgz", + "integrity": "sha512-7++XhnsPlr1HDbor5amovPjOH6vsrFOCdp93iKXhFn6bcMUI6soodj3WWKfgEO6JosKU1W5n3uky3WW9RlRjTg==", "cpu": [ "x64" ], @@ -936,9 +936,9 @@ } }, "node_modules/aria-query": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz", - "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.1.tgz", + "integrity": "sha512-Z/ZeOgVl7bcSYZ/u/rh0fOpvEpq//LZmdbkXyc7syVzjPAhfOa9ebsdTSjEBDU4vs5nC98Kfduj1uFo0qyET3g==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1046,10 +1046,13 @@ "license": "MIT" }, "node_modules/dompurify": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", - "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.2.tgz", + "integrity": "sha512-6obghkliLdmKa56xdbLOpUZ43pAR6xFy1uOrxBaIDjT+yaRuuybLjGS9eVBoSR/UPU5fq3OXClEHLJNGvbxKpQ==", "license": "(MPL-2.0 OR Apache-2.0)", + "engines": { + "node": ">=20" + }, "optionalDependencies": { "@types/trusted-types": "^2.0.7" } @@ -1249,16 +1252,16 @@ } }, "node_modules/isomorphic-dompurify": { - "version": "2.36.0", - "resolved": "https://registry.npmjs.org/isomorphic-dompurify/-/isomorphic-dompurify-2.36.0.tgz", - "integrity": "sha512-E8YkGyPY3a/U5s0WOoc8Ok+3SWL/33yn2IHCoxCFLBUUPVy9WGa++akJZFxQCcJIhI+UvYhbrbnTIFQkHKZbgA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-dompurify/-/isomorphic-dompurify-3.0.0.tgz", + "integrity": "sha512-5K+MYP7Nrg74+Bi+QmQGzQ/FgEOyVHWsN8MuJy5wYQxxBRxPnWsD25Tjjt5FWYhan3OQ+vNLubyNJH9dfG03lQ==", "license": "MIT", "dependencies": { "dompurify": "^3.3.1", "jsdom": "^28.0.0" }, "engines": { - "node": ">=20.19.5" + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" } }, "node_modules/jsdom": { @@ -1328,9 +1331,9 @@ } }, "node_modules/marked": { - "version": "17.0.1", - "resolved": "https://registry.npmjs.org/marked/-/marked-17.0.1.tgz", - "integrity": "sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg==", + "version": "17.0.4", + "resolved": "https://registry.npmjs.org/marked/-/marked-17.0.4.tgz", + "integrity": "sha512-NOmVMM+KAokHMvjWmC5N/ZOvgmSWuqJB8FoYI019j4ogb/PeRMKoKIjReZ2w3376kkA8dSJIP8uD993Kxc0iRQ==", "license": "MIT", "bin": { "marked": "bin/marked.js" @@ -1429,9 +1432,9 @@ } }, "node_modules/svelte": { - "version": "5.51.5", - "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.51.5.tgz", - "integrity": "sha512-/4tR5cLsWOgH3wnNRXnFoWaJlwPGbJanZPSKSD6nHM2y01dvXeEF4Nx7jevoZ+UpJpkIHh6mY2tqDncuI4GHng==", + "version": "5.53.7", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.53.7.tgz", + "integrity": "sha512-uxck1KI7JWtlfP3H6HOWi/94soAl23jsGJkBzN2BAWcQng0+lTrRNhxActFqORgnO9BHVd1hKJhG+ljRuIUWfQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1441,7 +1444,7 @@ "@types/estree": "^1.0.5", "@types/trusted-types": "^2.0.7", "acorn": "^8.12.1", - "aria-query": "^5.3.1", + "aria-query": "5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.3", diff --git a/packages/ui/package.json b/packages/ui/package.json index c2dea303..8d33f875 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -31,8 +31,8 @@ "dependencies": { "effect": "3.19.19", "highlight.js": "^11.11.1", - "isomorphic-dompurify": "^2.28.0", - "marked": "^17.0.1" + "isomorphic-dompurify": "^3.0.0", + "marked": "^17.0.4" }, "scripts": { "lint": "biome check src/", @@ -41,9 +41,9 @@ "test": "tsx --test src/*.test.ts src/**/*.test.ts" }, "devDependencies": { - "@biomejs/biome": "2.3.15", + "@biomejs/biome": "^2.4.6", "jsdom": "^28.1.0", - "svelte": "^5.51.5", + "svelte": "^5.53.7", "tsx": "^4.20.5", "turndown": "^7.2.0", "typescript": "^5.9.3" diff --git a/packages/ui/src/api-internal/runtime.ts b/packages/ui/src/api-internal/runtime.ts index 1a1a85bd..57018018 100644 --- a/packages/ui/src/api-internal/runtime.ts +++ b/packages/ui/src/api-internal/runtime.ts @@ -34,6 +34,7 @@ export interface RuntimeEnv { getStorageItem: (key: string) => string | null; setStorageItem: (key: string, value: string) => void; removeStorageItem: (key: string) => void; + getPreferredLanguages?: () => string[]; getDocumentCookie: () => string; getLocation: () => RuntimeLocation; replaceHistoryUrl: (url: string) => void; @@ -69,6 +70,18 @@ export function createBrowserRuntimeEnv(): RuntimeEnv { if (typeof localStorage === 'undefined') return; localStorage.removeItem(key); }, + getPreferredLanguages() { + if (typeof navigator === 'undefined') return []; + if (Array.isArray(navigator.languages) && navigator.languages.length > 0) { + return navigator.languages.filter( + (value): value is string => typeof value === 'string' && value.trim().length > 0, + ); + } + if (typeof navigator.language === 'string' && navigator.language.trim().length > 0) { + return [navigator.language]; + } + return []; + }, getDocumentCookie() { if (typeof document === 'undefined') return ''; return document.cookie ?? ''; @@ -218,3 +231,25 @@ export function getOAuthUrl(runtime: RuntimeEnv, provider: string): string { if (isDesktopLocalRuntime(runtime)) return '#'; return `${getBaseUrl(runtime)}/api/auth/oauth/${encodeURIComponent(provider)}`; } + +export function getPreferredLanguages(runtime: RuntimeEnv): string[] { + if (typeof runtime.getPreferredLanguages === 'function') { + return runtime.getPreferredLanguages(); + } + if (!runtime.hasWindow() || typeof navigator === 'undefined') return []; + if (Array.isArray(navigator.languages) && navigator.languages.length > 0) { + return navigator.languages.filter( + (value): value is string => typeof value === 'string' && value.trim().length > 0, + ); + } + if (typeof navigator.language === 'string' && navigator.language.trim().length > 0) { + return [navigator.language]; + } + return []; +} + +export function setDocumentLanguage(language: string) { + if (typeof document === 'undefined') return; + if (!document.documentElement) return; + document.documentElement.lang = language; +} diff --git a/packages/ui/src/components/AppShell.svelte b/packages/ui/src/components/AppShell.svelte index 272e53ef..fda609ba 100644 --- a/packages/ui/src/components/AppShell.svelte +++ b/packages/ui/src/components/AppShell.svelte @@ -1,6 +1,6 @@ - +
@@ -500,20 +533,35 @@ function handleGlobalKey(e: KeyboardEvent) { {#if shellState.accountMenuOpen}
@@ -558,7 +606,7 @@ function handleGlobalKey(e: KeyboardEvent) { href="/login" class="px-1.5 py-1 text-xs text-text-secondary transition-colors hover:bg-bg-hover hover:text-text-primary sm:px-3 sm:text-sm" > - Login + {translate($appLocale, 'nav.login')} {/if}
@@ -573,7 +621,7 @@ function handleGlobalKey(e: KeyboardEvent) { class="shrink-0 flex items-center gap-2 border-t border-border bg-bg-secondary px-2 py-1 text-[11px] text-text-muted sm:gap-3 sm:px-4 sm:text-xs" > - Shortcuts + {translate($appLocale, 'footer.shortcuts')} @@ -592,10 +640,23 @@ function handleGlobalKey(e: KeyboardEvent) { {/each} + + + {translate($appLocale, 'footer.github')} + + + opensession.io - opensession.io @@ -604,13 +665,13 @@ function handleGlobalKey(e: KeyboardEvent) {
{#if visiblePaletteCommands.length === 0} -

No commands matched your query.

+

+ {translate($appLocale, 'palette.noMatches')} +

{:else} {#each visiblePaletteCommands as command, idx (command.id)}
@@ -660,13 +725,13 @@ function handleGlobalKey(e: KeyboardEvent) {
-

Quick Help

-

Session Runtime Guide

+

+ {translate($appLocale, 'help.quickHelp')} +

+

+ {translate($appLocale, 'help.sessionRuntimeGuide')} +

-

Runtime Summary

+

+ {translate($appLocale, 'help.runtimeSummary')} +

    -
  • Provider: summary model/transport selection
  • -
  • Output Shape: layered/file_list/security_first
  • -
  • Prompt Reset: restore default template instantly
  • +
  • {translate($appLocale, 'help.runtimeProvider')}
  • +
  • {translate($appLocale, 'help.runtimeShape')}
  • +
  • {translate($appLocale, 'help.runtimePrompt')}
-

Vector

+

+ {translate($appLocale, 'help.vector')} +

    -
  • Auto chunking uses session size best-practice profile
  • -
  • Manual chunking unlocks chunk size/overlap fields
  • -
  • Fix unreachable provider: start `ollama serve`
  • +
  • {translate($appLocale, 'help.vectorAutoChunk')}
  • +
  • {translate($appLocale, 'help.vectorManual')}
  • +
  • {translate($appLocale, 'help.vectorFix')}
-

Change Reader

+

+ {translate($appLocale, 'help.changeReader')} +

    -
  • Text mode: read and ask from local context
  • -
  • Voice mode: OpenAI TTS playback for narrative/answer
  • -
  • Set API key in Runtime Summary {'>'} Change Reader
  • +
  • {translate($appLocale, 'help.changeReaderText')}
  • +
  • {translate($appLocale, 'help.changeReaderVoice')}
  • +
  • {translate($appLocale, 'help.changeReaderApiKey')}
- Shift+/ opens help · Esc closes dialog - Tab focus is trapped inside this dialog + {translate($appLocale, 'help.footerShortcuts')} + {translate($appLocale, 'help.footerFocus')}
diff --git a/packages/ui/src/components/AuthCallbackPage.svelte b/packages/ui/src/components/AuthCallbackPage.svelte index 6d1dde61..8a0ed921 100644 --- a/packages/ui/src/components/AuthCallbackPage.svelte +++ b/packages/ui/src/components/AuthCallbackPage.svelte @@ -1,5 +1,6 @@ - Docs - opensession.io + {translate($appLocale, 'docs.title')}
-

Docs

+

+ {translate($appLocale, 'docs.kicker')} +

{parsed.title}

- Functional guides for register, share, inspect, and handoff workflows. + {translate($appLocale, 'docs.heroCopy')}

@@ -202,7 +233,7 @@ onMount(() => { onclick={() => onNavigate('/sessions')} class="docs-nav-btn border border-border px-3 py-1 text-text-secondary transition-colors hover:border-accent hover:text-accent" > - Sessions + {translate($appLocale, 'docs.sessions')}
@@ -210,13 +241,13 @@ onMount(() => { {#if loading}
- Loading docs... + {translate($appLocale, 'docs.loading')}
{:else if error}
{error}
{:else if parsed.chapters.length === 0}
- No documentation chapters were found. + {translate($appLocale, 'docs.notFound')}
{:else}
@@ -224,7 +255,7 @@ onMount(() => { data-testid="docs-toc" class="docs-toc hidden h-fit border border-border bg-bg-secondary/65 p-3 lg:block lg:sticky lg:top-5 lg:max-h-[calc(100vh-5rem)] lg:overflow-y-auto" > -

Contents

+

{localize('Contents', '목차')}