diff --git a/CHANGELOG.md b/CHANGELOG.md index ff4f69b..0728f80 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ - Add Windows ARM64 release artifacts - Move dictionary cache directory to platform-specific data directories instead of /tmp +- Allow overriding the global `codebook.toml` path via LSP initialization option `globalConfigPath` [0.3.18] diff --git a/README.md b/README.md index 45487a4..c07ddb9 100644 --- a/README.md +++ b/README.md @@ -43,13 +43,14 @@ If quickfix code actions are not showing up for specific languages, ensure your }, ``` -To enable DEBUG logs, add this to your settings.json: +To enable DEBUG logs or change the global config path, add this to your settings.json: ```json "lsp": { "codebook": { "initialization_options": { - "logLevel": "debug" + "logLevel": "debug", + "globalConfigPath": "~/.config/codebook/codebook.toml" } } }, @@ -212,6 +213,8 @@ The global configuration applies to all projects by default. Location depends on - **Linux/macOS**: `$XDG_CONFIG_HOME/codebook/codebook.toml` or `~/.config/codebook/codebook.toml` - **Windows**: `%APPDATA%\codebook\codebook.toml` or `%APPDATA%\Roaming\codebook\codebook.toml` +You can override this location if you sync your config elsewhere by providing `initializationOptions.globalConfigPath` from your LSP client. When no override is provided, the OS-specific default above is used. + ### Project Configuration Project-specific configuration is loaded from either `codebook.toml` or `.codebook.toml` in the project root. Codebook searches for this file starting from the current directory and moving up to parent directories. diff --git a/crates/codebook-config/spec.md b/crates/codebook-config/spec.md index 48200b3..cdda385 100644 --- a/crates/codebook-config/spec.md +++ b/crates/codebook-config/spec.md @@ -44,6 +44,8 @@ The data model for configuration settings: - Linux/macOS: `$XDG_CONFIG_HOME/codebook/codebook.toml` if XDG_CONFIG_HOME is set - Linux/macOS fallback: `~/.config/codebook/codebook.toml` - Windows: `%APPDATA%\codebook\codebook.toml` +- **Custom Overrides**: + - Consumers may call `CodebookConfigFile::load_with_global_config` to supply an explicit global config path (used by `codebook-lsp` when an LSP client provides `initializationOptions.globalConfigPath`). - **Configuration Precedence**: - Project configuration overrides global configuration diff --git a/crates/codebook-config/src/helpers.rs b/crates/codebook-config/src/helpers.rs index 6e499f1..ae5b1a9 100644 --- a/crates/codebook-config/src/helpers.rs +++ b/crates/codebook-config/src/helpers.rs @@ -131,6 +131,26 @@ pub(crate) fn min_word_length(settings: &ConfigSettings) -> usize { settings.min_word_length } +pub(crate) fn expand_tilde>(path_user_input: P) -> Option { + let p = path_user_input.as_ref(); + if !p.starts_with("~") { + return Some(p.to_path_buf()); + } + if p == Path::new("~") { + return dirs::home_dir(); + } + dirs::home_dir().map(|mut h| { + if h == Path::new("/") { + // Corner case: `h` root directory; + // don't prepend extra `/`, just drop the tilde. + p.strip_prefix("~").unwrap().to_path_buf() + } else { + h.push(p.strip_prefix("~/").unwrap()); + h + } + }) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/codebook-config/src/lib.rs b/crates/codebook-config/src/lib.rs index 3540d9c..54149ac 100644 --- a/crates/codebook-config/src/lib.rs +++ b/crates/codebook-config/src/lib.rs @@ -1,6 +1,7 @@ mod helpers; mod settings; mod watched_file; +use crate::helpers::expand_tilde; use crate::settings::ConfigSettings; use crate::watched_file::WatchedFile; use log::debug; @@ -72,24 +73,40 @@ impl Default for CodebookConfigFile { impl CodebookConfigFile { /// Load configuration by searching for both global and project-specific configs pub fn load(current_dir: Option<&Path>) -> Result { + Self::load_with_global_config(current_dir, None) + } + + /// Load configuration with an explicit global config override. + pub fn load_with_global_config( + current_dir: Option<&Path>, + global_config_path: Option, + ) -> Result { debug!("Initializing CodebookConfig"); if let Some(current_dir) = current_dir { let current_dir = Path::new(current_dir); - Self::load_configs(current_dir) + Self::load_configs(current_dir, global_config_path) } else { let current_dir = env::current_dir()?; - Self::load_configs(¤t_dir) + Self::load_configs(¤t_dir, global_config_path) } } /// Load both global and project configuration - fn load_configs(start_dir: &Path) -> Result { + fn load_configs( + start_dir: &Path, + global_config_override: Option, + ) -> Result { let config = Self::default(); let mut inner = config.inner.write().unwrap(); // First, try to load global config - if let Some(global_path) = Self::find_global_config_path() { + let global_config_path = match global_config_override { + Some(path) => Some(path.to_path_buf()), + None => Self::find_global_config_path(), + }; + + if let Some(global_path) = global_config_path { let global_config = WatchedFile::new(Some(global_path.clone())); if global_path.exists() { @@ -321,6 +338,10 @@ impl CodebookConfigFile { None => return Ok(()), }; + #[cfg(not(windows))] + let global_config_path = expand_tilde(global_config_path) + .expect("Failed to expand tilde in: {global_config_path}"); + let settings = match inner.global_config.content() { Some(settings) => settings, None => return Ok(()), @@ -806,7 +827,7 @@ mod tests { "# )?; - let config = CodebookConfigFile::load_configs(&sub_sub_dir)?; + let config = CodebookConfigFile::load_configs(&sub_sub_dir, None)?; assert!(config.snapshot().words.contains(&"testword".to_string())); // Check that the config file path is stored @@ -814,6 +835,32 @@ mod tests { Ok(()) } + #[test] + fn test_global_config_override_is_used() -> Result<(), io::Error> { + let temp_dir = TempDir::new().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + fs::create_dir_all(&workspace_dir)?; + let custom_global_dir = temp_dir.path().join("global"); + fs::create_dir_all(&custom_global_dir)?; + let override_path = custom_global_dir.join("codebook.toml"); + + fs::write( + &override_path, + r#" + words = ["customword"] + "#, + )?; + + let config = CodebookConfigFile::load_with_global_config( + Some(workspace_dir.as_path()), + Some(override_path.clone()), + )?; + + assert_eq!(config.global_config_path(), Some(override_path)); + assert!(config.is_allowed_word("customword")); + Ok(()) + } + #[test] fn test_should_ignore_path() { let config = CodebookConfigFile::default(); diff --git a/crates/codebook-lsp/src/init_options.rs b/crates/codebook-lsp/src/init_options.rs new file mode 100644 index 0000000..1b76897 --- /dev/null +++ b/crates/codebook-lsp/src/init_options.rs @@ -0,0 +1,106 @@ +use log::LevelFilter; +use serde::Deserialize; +use serde::de::Deserializer; +use serde_json::Value; +use std::path::PathBuf; + +fn default_log_level() -> LevelFilter { + LevelFilter::Info +} + +fn deserialize_log_level<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let s: Option = Option::deserialize(deserializer)?; + match s.as_deref() { + Some("trace") => Ok(LevelFilter::Trace), + Some("debug") => Ok(LevelFilter::Debug), + Some("warn") => Ok(LevelFilter::Warn), + Some("error") => Ok(LevelFilter::Error), + _ => Ok(LevelFilter::Info), + } +} + +fn deserialize_global_config_path<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let s: Option = Option::deserialize(deserializer)?; + match s { + Some(path) => { + let trimmed = path.trim(); + if trimmed.is_empty() { + Ok(None) + } else { + Ok(Some(PathBuf::from(trimmed))) + } + } + None => Ok(None), + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct ClientInitializationOptions { + #[serde( + default = "default_log_level", + deserialize_with = "deserialize_log_level" + )] + pub(crate) log_level: LevelFilter, + #[serde(default, deserialize_with = "deserialize_global_config_path")] + pub(crate) global_config_path: Option, +} + +impl Default for ClientInitializationOptions { + fn default() -> Self { + ClientInitializationOptions { + log_level: default_log_level(), + global_config_path: None, + } + } +} + +impl ClientInitializationOptions { + pub(crate) fn from_value(options_value: Option) -> Self { + match options_value { + None => ClientInitializationOptions::default(), + Some(value) => match serde_json::from_value(value) { + Ok(options) => options, + Err(err) => { + log::error!( + "Failed to deserialize client initialization options. Using default: {}", + err + ); + ClientInitializationOptions::default() + } + }, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn test_default() { + let default_options = ClientInitializationOptions::default(); + assert_eq!(default_options.log_level, LevelFilter::Info); + } + + #[test] + fn test_custom() { + let custom_options = ClientInitializationOptions { + log_level: LevelFilter::Debug, + ..Default::default() + }; + assert_eq!(custom_options.log_level, LevelFilter::Debug); + } + + #[test] + fn test_json() { + let json = r#"{"logLevel": "debug"}"#; + let options: ClientInitializationOptions = serde_json::from_str(json).unwrap(); + assert_eq!(options.log_level, LevelFilter::Debug); + } +} diff --git a/crates/codebook-lsp/src/lib.rs b/crates/codebook-lsp/src/lib.rs index 2be5d96..f9f83cf 100644 --- a/crates/codebook-lsp/src/lib.rs +++ b/crates/codebook-lsp/src/lib.rs @@ -1,3 +1,4 @@ pub mod file_cache; +mod init_options; pub mod lsp; pub mod lsp_logger; diff --git a/crates/codebook-lsp/src/lsp.rs b/crates/codebook-lsp/src/lsp.rs index c07f343..0bd0b5b 100644 --- a/crates/codebook-lsp/src/lsp.rs +++ b/crates/codebook-lsp/src/lsp.rs @@ -1,7 +1,7 @@ use std::collections::HashMap; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::str::FromStr as _; -use std::sync::Arc; +use std::sync::{Arc, OnceLock, RwLock}; use codebook::parser::get_word_from_string; use codebook::queries::LanguageType; @@ -9,7 +9,6 @@ use string_offsets::AllConfig; use string_offsets::Pos; use string_offsets::StringOffsets; -use log::LevelFilter; use log::error; use serde_json::Value; use tokio::task; @@ -22,16 +21,18 @@ use codebook_config::{CodebookConfig, CodebookConfigFile}; use log::{debug, info}; use crate::file_cache::TextDocumentCache; +use crate::init_options::ClientInitializationOptions; use crate::lsp_logger; const SOURCE_NAME: &str = "Codebook"; pub struct Backend { - pub client: Client, - // Wrap every call to codebook in spawn_blocking, it's not async - pub codebook: Arc, - pub config: Arc, - pub document_cache: TextDocumentCache, + client: Client, + workspace_dir: PathBuf, + codebook: OnceLock>, + config: OnceLock>, + document_cache: TextDocumentCache, + initialize_options: RwLock>, } enum CodebookCommand { @@ -64,27 +65,17 @@ impl From for String { impl LanguageServer for Backend { async fn initialize(&self, params: InitializeParams) -> RpcResult { // info!("Capabilities: {:?}", params.capabilities); - // Get log level from initialization options - let log_level = params - .initialization_options - .as_ref() - .and_then(|options| options.get("logLevel")) - .and_then(|level| level.as_str()) - .map(|level| { - if level == "debug" { - LevelFilter::Debug - } else { - LevelFilter::Info - } - }) - .unwrap_or(LevelFilter::Info); + let client_options = ClientInitializationOptions::from_value(params.initialization_options); // Attach the LSP client to the logger and flush buffered logs - lsp_logger::LspLogger::attach_client(self.client.clone(), log_level); + lsp_logger::LspLogger::attach_client(self.client.clone(), client_options.log_level); info!( "LSP logger attached to client with log level: {}", - log_level + client_options.log_level ); + + *self.initialize_options.write().unwrap() = Arc::new(client_options); + Ok(InitializeResult { capabilities: ServerCapabilities { position_encoding: Some(PositionEncodingKind::UTF16), @@ -118,16 +109,14 @@ impl LanguageServer for Backend { async fn initialized(&self, _: InitializedParams) { info!("Server ready!"); - info!( - "Project config: {}", - self.config.project_config_path().unwrap().display() - ); + let config = self.config_handle(); + match config.project_config_path() { + Some(path) => info!("Project config: {}", path.display()), + None => info!("Project config: "), + } info!( "Global config: {}", - self.config - .global_config_path() - .unwrap_or_default() - .display() + config.global_config_path().unwrap_or_default().display() ); } @@ -199,7 +188,7 @@ impl LanguageServer for Backend { if word.is_empty() || word.contains(" ") { continue; } - let cb = self.codebook.clone(); + let cb = self.codebook_handle(); let inner_word = word.clone(); let suggestions = task::spawn_blocking(move || cb.get_suggestions(&inner_word)).await; @@ -265,6 +254,7 @@ impl LanguageServer for Backend { async fn execute_command(&self, params: ExecuteCommandParams) -> RpcResult> { match CodebookCommand::from(params.command.as_str()) { CodebookCommand::AddWord => { + let config = self.config_handle(); let words = params .arguments .iter() @@ -273,21 +263,22 @@ impl LanguageServer for Backend { "Adding words to dictionary {}", words.clone().collect::>().join(", ") ); - let updated = self.add_words(words); + let updated = self.add_words(config.as_ref(), words); if updated { - let _ = self.config.save(); + let _ = config.save(); self.recheck_all().await; } Ok(None) } CodebookCommand::AddWordGlobal => { + let config = self.config_handle(); let words = params .arguments .iter() .filter_map(|arg| arg.as_str().map(|s| s.to_string())); - let updated = self.add_words_global(words); + let updated = self.add_words_global(config.as_ref(), words); if updated { - let _ = self.config.save_global(); + let _ = config.save_global(); self.recheck_all().await; } Ok(None) @@ -299,18 +290,42 @@ impl LanguageServer for Backend { impl Backend { pub fn new(client: Client, workspace_dir: &Path) -> Self { - let config = CodebookConfigFile::load(Some(workspace_dir)).expect("Unable to make config."); - let config_arc: Arc = Arc::new(config); - let cb_config = Arc::clone(&config_arc); - let codebook = Arc::new(Codebook::new(cb_config).expect("Unable to make codebook")); - Self { client, - codebook, - config: Arc::clone(&config_arc), + workspace_dir: workspace_dir.to_path_buf(), + codebook: OnceLock::new(), + config: OnceLock::new(), document_cache: TextDocumentCache::default(), + initialize_options: RwLock::new(Arc::new(ClientInitializationOptions::default())), } } + + fn config_handle(&self) -> Arc { + self.config + .get_or_init(|| { + Arc::new( + CodebookConfigFile::load_with_global_config( + Some(self.workspace_dir.as_path()), + self.initialize_options + .read() + .unwrap() + .global_config_path + .clone(), + ) + .expect("Unable to make config: {e}"), + ) + }) + .clone() + } + + fn codebook_handle(&self) -> Arc { + self.codebook + .get_or_init(|| { + Arc::new(Codebook::new(self.config_handle()).expect("Unable to make codebook: {e}")) + }) + .clone() + } + fn make_diagnostic(&self, word: &str, start_pos: &Pos, end_pos: &Pos) -> Diagnostic { let message = format!("Possible spelling issue '{word}'."); Diagnostic { @@ -335,10 +350,10 @@ impl Backend { } } - fn add_words(&self, words: impl Iterator) -> bool { + fn add_words(&self, config: &CodebookConfigFile, words: impl Iterator) -> bool { let mut should_save = false; for word in words { - match self.config.add_word(&word) { + match config.add_word(&word) { Ok(true) => { should_save = true; } @@ -352,10 +367,14 @@ impl Backend { } should_save } - fn add_words_global(&self, words: impl Iterator) -> bool { + fn add_words_global( + &self, + config: &CodebookConfigFile, + words: impl Iterator, + ) -> bool { let mut should_save = false; for word in words { - match self.config.add_word_global(&word) { + match config.add_word_global(&word) { Ok(true) => { should_save = true; } @@ -406,7 +425,8 @@ impl Backend { } async fn spell_check(&self, uri: &Url) { - let did_reload = match self.config.reload() { + let config = self.config_handle(); + let did_reload = match config.reload() { Ok(did_reload) => did_reload, Err(e) => { error!("Failed to reload config: {e}"); @@ -440,7 +460,7 @@ impl Backend { let lang = doc.language_id.as_deref(); let lang_type = lang.and_then(|lang| LanguageType::from_str(lang).ok()); debug!("Document identified as type {lang_type:?} from {lang:?}"); - let cb = self.codebook.clone(); + let cb = self.codebook_handle(); let fp = file_path.clone(); let spell_results = task::spawn_blocking(move || { cb.spell_check(&doc.text, lang_type, Some(fp.to_str().unwrap_or_default())) diff --git a/crates/codebook-lsp/src/main.rs b/crates/codebook-lsp/src/main.rs index 0bfdf7f..bc055ee 100644 --- a/crates/codebook-lsp/src/main.rs +++ b/crates/codebook-lsp/src/main.rs @@ -1,4 +1,5 @@ mod file_cache; +mod init_options; mod lsp; mod lsp_logger; @@ -9,7 +10,6 @@ use lsp::Backend; use lsp_logger::LspLogger; use std::env; use std::path::{Path, PathBuf}; -use tokio::task; use tower_lsp::{LspService, Server}; #[derive(Parser)] @@ -72,12 +72,6 @@ async fn serve_lsp(root: &Path) { info!("Starting Codebook Language Server v{version}-{build_profile}..."); let (stdin, stdout) = (tokio::io::stdin(), tokio::io::stdout()); let inner_root = root.to_owned(); - - // Some blocking setup is done, so spawn_block! - let (service, socket) = - task::spawn_blocking(move || LspService::new(|client| Backend::new(client, &inner_root))) - .await - .unwrap(); - + let (service, socket) = LspService::new(|client| Backend::new(client, &inner_root)); Server::new(stdin, stdout, socket).serve(service).await; } diff --git a/crates/downloader/src/lib.rs b/crates/downloader/src/lib.rs index 3e0a408..6025443 100644 --- a/crates/downloader/src/lib.rs +++ b/crates/downloader/src/lib.rs @@ -10,15 +10,14 @@ use sha2::{Digest, Sha256}; use std::collections::HashMap; use std::fs::{self, File}; use std::io::{BufReader, Read}; -use std::ops::Deref; use std::path::{Path, PathBuf}; -use std::sync::RwLock; +use std::sync::{OnceLock, RwLock}; use tempfile::NamedTempFile; const METADATA_FILE: &str = "_metadata.json"; const TWO_WEEKS: u64 = 14 * 24 * 3600; -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, Default)] struct Metadata { files: HashMap, } @@ -33,86 +32,124 @@ struct FileEntry { pub struct Downloader { cache_dir: PathBuf, - metadata: RwLock, - client: Client, + metadata_path: PathBuf, + metadata: OnceLock>, + _client: OnceLock, } impl Downloader { pub fn new(cache_dir: impl AsRef) -> Result { let cache_dir = cache_dir.as_ref().to_path_buf(); - fs::create_dir_all(&cache_dir)?; info!("Cache folder at: {cache_dir:?}"); let metadata_path = cache_dir.join(METADATA_FILE); - let metadata = if metadata_path.exists() { - // Try to parse the existing metadata file - match File::open(&metadata_path).and_then(|file| Ok(serde_json::from_reader(file)?)) { + + Ok(Self { + cache_dir, + metadata_path, + metadata: OnceLock::new(), + _client: OnceLock::new(), + }) + } + + fn client(&self) -> &Client { + self._client.get_or_init(|| { + // Set up rustls_platform_verifier to use OS cert chains (proxy support) + let arc_crypto_provider = + std::sync::Arc::new(rustls::crypto::aws_lc_rs::default_provider()); + let config = ClientConfig::builder_with_provider(arc_crypto_provider) + .with_safe_default_protocol_versions() + .unwrap() + .with_platform_verifier() + .unwrap() + .with_no_client_auth(); + reqwest::blocking::Client::builder() + .use_preconfigured_tls(config) + .build() + .expect("Failed to build http client") + }) + } + + fn metadata(&self) -> &RwLock { + let metadata_path = self.metadata_path.clone(); + let cache_dir = self.cache_dir.clone(); + self.metadata.get_or_init(move || { + fs::create_dir_all(&cache_dir) + .expect("Failed to create cache directory: {cache_dir:?}"); + let metadata = Self::load_metadata(&metadata_path); + RwLock::new(metadata) + }) + } + + fn load_metadata(metadata_path: &Path) -> Metadata { + match File::open(metadata_path) { + Ok(file) => match serde_json::from_reader(file) { Ok(metadata) => metadata, - Err(e) => { - // Log the error but continue with a fresh metadata file - log::warn!("Failed to load metadata file: {e}, creating a new one"); - Metadata { - files: HashMap::new(), - } + Err(err) => { + log::warn!("Failed to parse metadata file {metadata_path:?}: {err}"); + Metadata::default() } + }, + Err(err) => { + if err.kind() != std::io::ErrorKind::NotFound { + log::warn!("Failed to open metadata file {metadata_path:?}: {err}"); + } + Metadata::default() } - } else { - Metadata { - files: HashMap::new(), - } - }; + } + } - // Set up rustls_platform_verifier to use OS cert chains (proxy support) - let arc_crypto_provider = - std::sync::Arc::new(rustls::crypto::aws_lc_rs::default_provider()); - let config = ClientConfig::builder_with_provider(arc_crypto_provider) - .with_safe_default_protocol_versions() - .unwrap() - .with_platform_verifier() - .unwrap() - .with_no_client_auth(); - let client = reqwest::blocking::Client::builder() - .use_preconfigured_tls(config) - .build()?; + fn persist_metadata(&self, metadata: &Metadata) -> Result<()> { + let file = File::create(&self.metadata_path)?; + serde_json::to_writer_pretty(file, metadata)?; + Ok(()) + } - Ok(Self { - cache_dir, - metadata: RwLock::new(metadata), - client, - }) + fn purge_stale_entry(&self, url: &str, stale_path: &Path) { + let mut metadata = self.metadata().write().unwrap(); + if metadata + .files + .get(url) + .map(|entry| entry.path == stale_path) + .unwrap_or(false) + { + metadata.files.remove(url); + if let Err(err) = self.persist_metadata(&metadata) { + log::error!( + "Failed to persist metadata after removing stale entry for {url}: {err}" + ); + } + } } pub fn get(&self, url: &str) -> Result { - // First check with read lock - let (needs_update, file_exists) = { - let metadata = self.metadata.read().unwrap(); - let entry = metadata.files.get(url); + let entry = { + let metadata = self.metadata().read().unwrap(); + metadata.files.get(url).cloned() + }; - match entry { - Some(e) => { + let result = match entry { + Some(entry) => { + if !entry.path.exists() { + self.purge_stale_entry(url, &entry.path); + self.download_new(url) + } else { let needs_update = - e.last_checked.timestamp() + TWO_WEEKS as i64 <= Utc::now().timestamp(); - let file_exists = e.path.exists(); - (Some(needs_update), file_exists) + entry.last_checked.timestamp() + TWO_WEEKS as i64 <= Utc::now().timestamp(); + if needs_update { + self.try_update(url) + } else { + Ok(entry.path) + } } - None => (None, false), } + None => self.download_new(url), }; - // If the file doesn't exist but we have metadata, treat it as a new download - if !file_exists && needs_update.is_some() { - return self.download_new(url); - } - - match needs_update { - Some(true) => self.try_update(url), - Some(false) => Ok(self.metadata.read().unwrap().files[url].path.clone()), - None => self.download_new(url), - } - .or_else(|e| { + result.or_else(|e| { log::error!("Failed to update, using cached version: {e}"); let entry = { - let metadata = self.metadata.read().unwrap(); + let metadata = self.metadata().read().unwrap(); metadata .files .get(url) @@ -123,6 +160,7 @@ impl Downloader { if path.exists() { Ok(path) } else { + self.purge_stale_entry(url, &path); // If fallback path doesn't exist, try to download anyway self.download_new(url) } @@ -139,7 +177,7 @@ impl Downloader { fn try_update(&self, url: &str) -> Result { // Get last modified time with read lock let last_modified = { - self.metadata + self.metadata() .read() .unwrap() .files @@ -149,7 +187,7 @@ impl Downloader { // log::info!("{:?}", last_modified); // log::info!("URL {:?}", url); - let mut request = self.client.get(url); + let mut request = self.client().get(url); if let Some(lm) = last_modified { request = request.header(IF_MODIFIED_SINCE, lm.with_timezone(&Utc).to_rfc2822()); } @@ -174,7 +212,7 @@ impl Downloader { let new_hash = compute_file_hash(temp_file.path())?; let old_hash = { let metadata = self - .metadata + .metadata() .read() .map_err(|e| anyhow::anyhow!("Lock error: {}", e))?; metadata.files.get(url).unwrap().content_hash.clone() @@ -187,7 +225,7 @@ impl Downloader { } fn download_new(&self, url: &str) -> Result { - let response = self.client.get(url).send()?; + let response = self.client().get(url).send()?; let last_modified = parse_last_modified(&response); let temp_file = self.download_to_temp(response)?; let new_hash = compute_file_hash(temp_file.path())?; @@ -218,10 +256,10 @@ impl Downloader { content_hash, }; { - let mut metadata = self.metadata.write().unwrap(); + let mut metadata = self.metadata().write().unwrap(); metadata.files.insert(url.to_string(), entry); + self.persist_metadata(&metadata)?; } - self.save_metadata()?; Ok(path) } @@ -234,7 +272,7 @@ impl Downloader { ) -> Result { let new_path: PathBuf; { - let mut metadata = self.metadata.write().unwrap(); + let mut metadata = self.metadata().write().unwrap(); let entry = metadata.files.get_mut(url).unwrap(); let old_path = entry.path.clone(); @@ -250,31 +288,23 @@ impl Downloader { entry.last_checked = Utc::now(); entry.last_modified = last_modified; entry.content_hash = content_hash; + self.persist_metadata(&metadata)?; } - self.save_metadata()?; Ok(new_path) } fn update_check_time(&self, url: &str) -> Result { let path: PathBuf; { - let mut metadata = self.metadata.write().unwrap(); + let mut metadata = self.metadata().write().unwrap(); let entry = metadata.files.get_mut(url).unwrap(); entry.last_checked = Utc::now(); path = entry.path.clone(); + self.persist_metadata(&metadata)?; } - self.save_metadata()?; Ok(path) } - - fn save_metadata(&self) -> Result<()> { - let metadata_path = self.cache_dir.join(METADATA_FILE); - let file = File::create(metadata_path)?; - let metadata = self.metadata.read().unwrap(); - serde_json::to_writer_pretty(file, &metadata.deref())?; - Ok(()) - } } fn hash_url(url: &str) -> String { @@ -333,7 +363,7 @@ mod tests { mock.assert(); assert!(path.exists()); assert_eq!(std::fs::read_to_string(path).unwrap(), "test content"); - let metadata = downloader.metadata.read().unwrap(); + let metadata = downloader.metadata().read().unwrap(); let entry = metadata.files.get(&server.url("/test.txt")).unwrap(); assert_eq!(entry.content_hash, compute_file_hash(&entry.path).unwrap()); } @@ -387,13 +417,13 @@ mod tests { // Get stored metadata let stored_last_modified = { - let metadata = downloader.metadata.read().unwrap(); + let metadata = downloader.metadata().read().unwrap(); metadata.files[&test_path].last_modified }; // Set last checked to 3 weeks ago { - let mut metadata = downloader.metadata.write().unwrap(); + let mut metadata = downloader.metadata().write().unwrap(); let entry = metadata.files.get_mut(&test_path).unwrap(); entry.last_checked = stored_last_modified.unwrap() - Duration::weeks(3); } @@ -433,7 +463,7 @@ mod tests { // Force update check time and break the server { - let mut metadata = downloader.metadata.try_write().unwrap(); + let mut metadata = downloader.metadata().try_write().unwrap(); if let Some(entry) = metadata.files.get_mut(&server.url("/test.txt")) { entry.last_checked = Utc::now() - Duration::seconds(TWO_WEEKS as i64 * 2); } @@ -488,7 +518,7 @@ mod tests { // Force check time { - let mut metadata = downloader.metadata.write().unwrap(); + let mut metadata = downloader.metadata().write().unwrap(); if let Some(entry) = metadata.files.get_mut(&server.url("/test.txt")) { entry.last_checked = DateTime::parse_from_rfc2822("Wed, 21 Oct 2020 07:28:00 GMT") .unwrap() @@ -507,7 +537,7 @@ mod tests { let cached_path = downloader.get(&server.url("/test.txt")).unwrap(); mock2.assert(); assert_eq!(original_path, cached_path); - let metadata = downloader.metadata.read().unwrap(); + let metadata = downloader.metadata().read().unwrap(); let entry = metadata.files.get(&server.url("/test.txt")).unwrap(); assert!(entry.last_checked > Utc::now() - Duration::seconds(1)); } diff --git a/vscode-extension/CHANGELOG.md b/vscode-extension/CHANGELOG.md index 4ba10bf..cd08e98 100644 --- a/vscode-extension/CHANGELOG.md +++ b/vscode-extension/CHANGELOG.md @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Support for multiple architectures (x86_64, aarch64) and platforms (macOS, Linux, Windows) - Fallback to locally installed binaries (cargo, PATH, or custom path) - Smart detection of development binaries for contributors +- `codebook.globalConfigPath` setting that forwards a custom `codebook.toml` location to the language server through initialization options ### Changed - No longer requires manual installation of `codebook-lsp` - the extension handles this automatically @@ -66,4 +67,4 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Performance optimizations for large files - Support for workspace-specific language settings - Integration with VS Code's problems panel -- Custom severity levels for different word categories \ No newline at end of file +- Custom severity levels for different word categories diff --git a/vscode-extension/README.md b/vscode-extension/README.md index 2d83b35..37a5871 100644 --- a/vscode-extension/README.md +++ b/vscode-extension/README.md @@ -36,6 +36,7 @@ This extension contributes the following settings: * `codebook.enable`: Enable/disable the spell checker * `codebook.serverPath`: Path to the codebook-lsp executable (default: auto-download from GitHub) +* `codebook.globalConfigPath`: Override the default global `codebook.toml` location * `codebook.logLevel`: Log level for the language server (`error`, `warn`, `info`, `debug`, `trace`) * `codebook.minWordLength`: Minimum word length to check (default: 3) * `codebook.dictionaries`: Additional dictionaries to use diff --git a/vscode-extension/package.json b/vscode-extension/package.json index b01391e..2896bd8 100644 --- a/vscode-extension/package.json +++ b/vscode-extension/package.json @@ -89,6 +89,11 @@ "default": "", "description": "Optional: Path to a custom codebook-lsp executable. If not specified, the extension will automatically download the appropriate binary for your platform." }, + "codebook.globalConfigPath": { + "type": "string", + "default": "", + "description": "Optional: Override the default global codebook.toml location. Useful if you sync your config in a custom directory." + }, "codebook.logLevel": { "type": "string", "enum": [ diff --git a/vscode-extension/src/extension.ts b/vscode-extension/src/extension.ts index cbaae59..9a207d8 100644 --- a/vscode-extension/src/extension.ts +++ b/vscode-extension/src/extension.ts @@ -230,6 +230,31 @@ function saveBinaryMeta( fs.writeFileSync(metaPath, JSON.stringify(info, null, 2)); } +function resolveGlobalConfigPath( + rawPath: string | undefined, +): string | undefined { + if (!rawPath) { + return undefined; + } + + const trimmed = rawPath.trim(); + if (!trimmed) { + return undefined; + } + + let expanded = trimmed; + if (expanded.startsWith("~")) { + const home = os.homedir(); + expanded = path.join(home, expanded.slice(1)); + } + + if (!path.isAbsolute(expanded)) { + expanded = path.resolve(expanded); + } + + return expanded; +} + async function findLocalBinary(): Promise { // Check if binary exists in PATH const whichCommand = process.platform === "win32" ? "where" : "which"; @@ -420,6 +445,9 @@ export async function activate(context: vscode.ExtensionContext) { try { const serverPath = await getBinary(context); + const resolvedGlobalConfigPath = resolveGlobalConfigPath( + config.get("globalConfigPath", ""), + ); // Server options const serverOptions: ServerOptions = { @@ -429,6 +457,13 @@ export async function activate(context: vscode.ExtensionContext) { }; // Client options + const initializationOptions: Record = { + logLevel: config.get("logLevel", "info"), + }; + if (resolvedGlobalConfigPath) { + initializationOptions.globalConfigPath = resolvedGlobalConfigPath; + } + const clientOptions: LanguageClientOptions = { documentSelector: [ { scheme: "file", language: "*" }, @@ -436,6 +471,7 @@ export async function activate(context: vscode.ExtensionContext) { ], outputChannel, revealOutputChannelOn: RevealOutputChannelOn.Error, + initializationOptions, }; // Create the language client