diff --git a/.vscode/launch.json b/.vscode/launch.json index 1111349f9..0bc5c95db 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -8,7 +8,6 @@ "request": "launch", "runtimeExecutable": "${execPath}", "args": [ - "--disable-extensions", "--extensionDevelopmentPath=${workspaceFolder}/editors/code" ], "outFiles": [ @@ -29,7 +28,6 @@ "request": "launch", "runtimeExecutable": "${execPath}", "args": [ - "--disable-extensions", "--extensionDevelopmentPath=${workspaceFolder}/editors/code" ], "outFiles": [ diff --git a/CHANGELOG.md b/CHANGELOG.md index f4fa29f74..a8be4874b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ ## Release Notes +## [0.11.0] + +### Added + +- Added automatic mainPath detection. The mainPath setting does not exist anymore! +- Added support for Apple Silicon. +- Switched to a path interner for better performance when resolving references (no longer using URIs). +- Added better progress report. + ## [0.10.19] ### Added @@ -291,7 +300,7 @@ ### Added - Added status notifications. -- Added support for folder rename in includeDirectories. +- Added support for folder rename in includesDirectories. ## [0.6.0] @@ -299,7 +308,7 @@ - Added `typedef` and `typeset` support. - Added callback completions. -- Added file rename/deletion support in includeDirectories. +- Added file rename/deletion support in includesDirectories. ## [0.5.1] @@ -309,7 +318,7 @@ ### Fixed -- Fixed changes in IncludeDirectories not being detected. +- Fixed changes in IncludesDirectories not being detected. - Fixed some references not being resolved on the initial parse. ## [0.5.0] diff --git a/Cargo.lock b/Cargo.lock index e3fdc6130..c78bd1cb7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -251,7 +251,7 @@ checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" dependencies = [ "bitflags", "clap_lex 0.2.4", - "indexmap", + "indexmap 1.9.3", "textwrap", ] @@ -451,7 +451,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" dependencies = [ "cfg-if", - "hashbrown", + "hashbrown 0.12.3", "lock_api", "once_cell", "parking_lot_core", @@ -509,6 +509,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "errno" version = "0.3.1" @@ -720,7 +726,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap", + "indexmap 1.9.3", "slab", "tokio", "tokio-util", @@ -739,6 +745,12 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" + [[package]] name = "heck" version = "0.4.1" @@ -880,7 +892,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", ] [[package]] @@ -1188,6 +1210,12 @@ dependencies = [ "tempfile", ] +[[package]] +name = "nohash-hasher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" + [[package]] name = "notify" version = "5.1.0" @@ -1620,7 +1648,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf" dependencies = [ "countme", - "hashbrown", + "hashbrown 0.12.3", "memoffset", "rustc-hash", "text-size", @@ -1955,7 +1983,7 @@ dependencies = [ [[package]] name = "sourcepawn_lsp" -version = "0.10.19" +version = "0.11.0" dependencies = [ "anyhow", "assert_unordered", @@ -2001,10 +2029,12 @@ version = "0.1.0" dependencies = [ "anyhow", "fxhash", + "indexmap 2.0.0", "lazy_static", "linter", "log", "lsp-types", + "nohash-hasher", "notify", "parking_lot", "parser", @@ -2013,6 +2043,7 @@ dependencies = [ "semantic_analyzer", "serde", "serde_json", + "sourcepawn_lexer", "strip_bom", "syntax", "tree-sitter", @@ -2063,6 +2094,7 @@ dependencies = [ "lazy_static", "log", "lsp-types", + "nohash-hasher", "parking_lot", "regex", "serde", diff --git a/Cargo.toml b/Cargo.toml index 28bef65b6..5041f511c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -53,6 +53,8 @@ tree-sitter-sourcepawn = { git = "https://github.com/Sarrus1/tree-sitter-sourcep strip_bom = "1.0.0" serde = "1.0.147" serde_json = "^1.0.83" +nohash-hasher = "^0.2.0" +indexmap = "^2.0.0" [workspace.dependencies.uuid] version = "1.3.0" diff --git a/crates/linter/src/spcomp.rs b/crates/linter/src/spcomp.rs index 4b658781e..7ee922bc2 100644 --- a/crates/linter/src/spcomp.rs +++ b/crates/linter/src/spcomp.rs @@ -1,4 +1,4 @@ -use anyhow::{anyhow, Context}; +use anyhow::{anyhow, bail}; use fxhash::FxHashMap; use lazy_static::lazy_static; use lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range, Url}; @@ -93,31 +93,37 @@ impl SPCompDiagnostic { /// # Arguments /// /// * `uri` - [Uri](Url) of the file to compile. +/// * `spcomp_path` - [Path](Path) of the spcomp executable. +/// * `includes_directories` - [Paths](PathBuf) of include directories to pass to spcomp. +/// * `linter_arguments` - Additional arguments to pass to spcomp. pub fn get_spcomp_diagnostics( uri: Url, spcomp_path: &Path, includes_directories: &[PathBuf], linter_arguments: &[String], ) -> anyhow::Result>> { - let output = Command::new( - spcomp_path - .to_str() - .context("Failed to convert spcomp path to string.")?, - ) - .args(build_args(&uri, includes_directories, linter_arguments)?) - .output(); + // Handle Apple Silicon + let output = if std::env::consts::OS == "macos" && std::env::consts::ARCH == "aarch64" { + Command::new("arch") + .arg("-x86_64") + .arg(spcomp_path) + .args(build_args(&uri, includes_directories, linter_arguments)?) + .output() + } else { + Command::new(spcomp_path) + .args(build_args(&uri, includes_directories, linter_arguments)?) + .output() + }; + let out_path = get_out_path(); if out_path.exists() { let _ = fs::remove_file(out_path); } - let output = output?; + let output = output?; // Unwrap output here to allow removing the existing file first. let stderr = String::from_utf8_lossy(&output.stderr); if !stderr.is_empty() { - return Err(anyhow::anyhow!( - "Failed to run spcomp with error: {}", - stderr - )); + bail!("Failed to run spcomp with error: {}", stderr); } let stdout = String::from_utf8_lossy(&output.stdout); diff --git a/crates/parser/src/comment_parser.rs b/crates/parser/src/comment_parser.rs index 29b709a89..8b7786708 100644 --- a/crates/parser/src/comment_parser.rs +++ b/crates/parser/src/comment_parser.rs @@ -33,7 +33,9 @@ impl<'a> Parser<'a> { } pub fn push_inline_comment(&mut self, item: &Arc>) { - let Ok(description) = self.find_doc(item.read().range().end.line as usize, true) else {return}; + let Ok(description) = self.find_doc(item.read().range().end.line as usize, true) else { + return; + }; match &mut *item.write() { SPItem::EnumMember(enum_member_item) => { enum_member_item.description = description; diff --git a/crates/parser/src/define_parser.rs b/crates/parser/src/define_parser.rs index 21765e119..b632eb864 100644 --- a/crates/parser/src/define_parser.rs +++ b/crates/parser/src/define_parser.rs @@ -25,6 +25,7 @@ impl<'a> Parser<'a> { value: value.unwrap_or_default().to_string(), description: self.find_doc(node.start_position().row, true)?, uri: self.uri.clone(), + file_id: self.file_id, references: vec![], }; diff --git a/crates/parser/src/enum_parser.rs b/crates/parser/src/enum_parser.rs index 771a4b39a..75c0cb4d0 100644 --- a/crates/parser/src/enum_parser.rs +++ b/crates/parser/src/enum_parser.rs @@ -26,6 +26,7 @@ impl<'a> Parser<'a> { v_full_range: self.build_v_range(&full_range), description, uri: self.uri.clone(), + file_id: self.file_id, references: vec![], children: vec![], }; @@ -86,7 +87,9 @@ impl<'a> Parser<'a> { "comment" => { self.push_comment(child); if let Some(items) = enum_item.read().children() { - let Some(item) = items.last()else{continue;}; + let Some(item) = items.last() else { + continue; + }; self.push_inline_comment(item); } } @@ -107,6 +110,7 @@ impl<'a> Parser<'a> { let enum_member_item = EnumMemberItem { name, uri: self.uri.clone(), + file_id: self.file_id, range, v_range: self.build_v_range(&range), parent: Arc::downgrade(enum_item), diff --git a/crates/parser/src/enum_struct_parser.rs b/crates/parser/src/enum_struct_parser.rs index 0530e3eee..0e4488deb 100644 --- a/crates/parser/src/enum_struct_parser.rs +++ b/crates/parser/src/enum_struct_parser.rs @@ -30,6 +30,7 @@ impl<'a> Parser<'a> { v_full_range: self.build_v_range(&full_range), description, uri: self.uri.clone(), + file_id: self.file_id, references: vec![], children: vec![], }; @@ -101,6 +102,7 @@ impl<'a> Parser<'a> { v_range: self.build_v_range(&range), description: Description::default(), uri: self.uri.clone(), + file_id: self.file_id, detail: format!("{} {}{}", type_, name, dimensions.join("")), visibility: vec![], storage_class: vec![], diff --git a/crates/parser/src/function_parser.rs b/crates/parser/src/function_parser.rs index 46be5759c..5cb5e8ca8 100644 --- a/crates/parser/src/function_parser.rs +++ b/crates/parser/src/function_parser.rs @@ -157,6 +157,7 @@ impl<'a> Parser<'a> { v_full_range: self.build_v_range(&full_range), description: description.clone(), uri: self.uri.clone(), + file_id: self.file_id, detail: attributes.build_detail(self.source).unwrap_or_default(), visibility: attributes.visibility, definition_type: attributes.definition_type, @@ -259,6 +260,7 @@ impl<'a> Parser<'a> { v_range: self.build_v_range(&range), description: description.clone(), uri: self.uri.clone(), + file_id: self.file_id, detail: detail.to_string(), visibility: vec![], storage_class, diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index 8436db4f7..26adca773 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -4,7 +4,7 @@ use lsp_types::{Range, Url}; use parking_lot::RwLock; use preprocessor::Offset; use std::sync::Arc; -use syntax::{comment::Comment, deprecated::Deprecated, SPItem}; +use syntax::{comment::Comment, deprecated::Deprecated, FileId, SPItem}; use tree_sitter::Query; pub mod comment_parser; @@ -38,6 +38,7 @@ pub struct Parser<'a> { pub offsets: &'a FxHashMap>, pub source: &'a String, pub uri: Arc, + pub file_id: FileId, } pub fn build_v_range(offsets: &FxHashMap>, range: &Range) -> Range { diff --git a/crates/parser/src/macro_parser.rs b/crates/parser/src/macro_parser.rs index 915ddc012..30ab881e9 100644 --- a/crates/parser/src/macro_parser.rs +++ b/crates/parser/src/macro_parser.rs @@ -23,6 +23,7 @@ impl<'a> Parser<'a> { value: "".to_string(), description: self.find_doc(node.start_position().row, true)?, uri: self.uri.clone(), + file_id: self.file_id, references: vec![], }; diff --git a/crates/parser/src/methodmap_parser.rs b/crates/parser/src/methodmap_parser.rs index 96e308b06..4c13b6ec6 100644 --- a/crates/parser/src/methodmap_parser.rs +++ b/crates/parser/src/methodmap_parser.rs @@ -28,6 +28,7 @@ impl<'a> Parser<'a> { .find_doc(node.start_position().row, false) .unwrap_or_default(), uri: self.uri.clone(), + file_id: self.file_id, references: vec![], tmp_parent: inherit, children: vec![], diff --git a/crates/parser/src/property_parser.rs b/crates/parser/src/property_parser.rs index 4510535df..c39e71d73 100644 --- a/crates/parser/src/property_parser.rs +++ b/crates/parser/src/property_parser.rs @@ -34,6 +34,7 @@ impl<'a> Parser<'a> { .find_doc(node.start_position().row, false) .unwrap_or_default(), uri: self.uri.clone(), + file_id: self.file_id, references: vec![], parent: Arc::downgrade(&parent), }; diff --git a/crates/parser/src/typedef_parser.rs b/crates/parser/src/typedef_parser.rs index 550a3016e..bd05dc1f1 100644 --- a/crates/parser/src/typedef_parser.rs +++ b/crates/parser/src/typedef_parser.rs @@ -64,6 +64,7 @@ impl<'a> Parser<'a> { v_full_range: self.build_v_range(&full_range), description: description.clone(), uri: self.uri.clone(), + file_id: self.file_id, detail: node .utf8_text(self.source.as_bytes()) .unwrap_or_default() @@ -159,7 +160,7 @@ impl<'a> Parser<'a> { for child in argument_type_node.children(&mut cursor) { match child.kind() { - // FIXME: Handle oldtypes. + // TODO: Handle oldtypes. "type" => { type_.name = child.utf8_text(self.source.as_bytes()).ok()?.to_string(); } diff --git a/crates/parser/src/typeset_parser.rs b/crates/parser/src/typeset_parser.rs index 91bbdb78a..8601f2ad7 100644 --- a/crates/parser/src/typeset_parser.rs +++ b/crates/parser/src/typeset_parser.rs @@ -29,6 +29,7 @@ impl<'a> Parser<'a> { v_full_range: self.build_v_range(&full_range), description, uri: self.uri.clone(), + file_id: self.file_id, references: vec![], children: vec![], }; @@ -94,6 +95,7 @@ impl<'a> Parser<'a> { v_full_range: self.build_v_range(&full_range), description: description.clone(), uri: self.uri.clone(), + file_id: self.file_id, detail: child .utf8_text(self.source.as_bytes()) .unwrap_or_default() diff --git a/crates/parser/src/variable_parser.rs b/crates/parser/src/variable_parser.rs index 849fd65f9..9f98e12b7 100644 --- a/crates/parser/src/variable_parser.rs +++ b/crates/parser/src/variable_parser.rs @@ -120,6 +120,7 @@ impl<'a> Parser<'a> { v_range: self.build_v_range(&range), description: Description::default(), uri: self.uri.clone(), + file_id: self.file_id, detail: "".to_string(), visibility: visibility.to_vec(), storage_class: storage_class.to_vec(), diff --git a/crates/preprocessor/src/evaluator.rs b/crates/preprocessor/src/evaluator.rs index 33207b392..7ebfaf777 100644 --- a/crates/preprocessor/src/evaluator.rs +++ b/crates/preprocessor/src/evaluator.rs @@ -40,7 +40,7 @@ impl<'a> IfCondition<'a> { ); let mut symbol_iter = self .symbols - .clone() // FIXME: This is horrible. + .clone() // TODO: This is horrible. .into_iter() .peekable(); while let Some(symbol) = if !self.expansion_stack.is_empty() { diff --git a/crates/preprocessor/src/lib.rs b/crates/preprocessor/src/lib.rs index 918dc52ef..286650b65 100644 --- a/crates/preprocessor/src/lib.rs +++ b/crates/preprocessor/src/lib.rs @@ -1,4 +1,4 @@ -use anyhow::{anyhow, Context}; +use anyhow::{anyhow, bail, Context}; use fxhash::FxHashMap; use lazy_static::lazy_static; use lsp_types::{Diagnostic, Position, Range, Url}; @@ -214,10 +214,10 @@ impl<'a> SourcepawnPreprocessor<'a> { } Err(ExpansionError::MacroNotFound(err)) => { self.macro_not_found_errors.push(err.clone()); - return Err(anyhow!("{}", err)); + bail!("{}", err); } Err(ExpansionError::Parse(err)) => { - return Err(anyhow!("{}", err)); + bail!("{}", err); } } } @@ -394,10 +394,10 @@ impl<'a> SourcepawnPreprocessor<'a> { ))? as usize; if idx >= args.len() { - return Err(anyhow!( + bail!( "Argument index out of bounds for macro {}", symbol.text() - )); + ); } args[idx] = args_idx; } @@ -406,10 +406,7 @@ impl<'a> SourcepawnPreprocessor<'a> { } TokenKind::Operator(Operator::Percent) => (), _ => { - return Err(anyhow!( - "Unexpected symbol {} in macro args", - symbol.text() - )) + bail!("Unexpected symbol {} in macro args", symbol.text()) } } } diff --git a/crates/preprocessor/src/preprocessor_operator.rs b/crates/preprocessor/src/preprocessor_operator.rs index 555e425df..af4e1b6d3 100644 --- a/crates/preprocessor/src/preprocessor_operator.rs +++ b/crates/preprocessor/src/preprocessor_operator.rs @@ -1,4 +1,4 @@ -use anyhow::anyhow; +use anyhow::bail; use lsp_types::Range; use sourcepawn_lexer::Operator; @@ -113,7 +113,7 @@ impl PreOperator { Operator::NotEquals => PreOperator::NotEquals, Operator::And => PreOperator::And, Operator::Or => PreOperator::Or, - _ => return Err(anyhow!("Operator {:?} is not a preprocessor operator.", op)), + _ => bail!("Operator {:?} is not a preprocessor operator.", op), }; Ok(res) diff --git a/crates/semantic_analyzer/src/analyzer.rs b/crates/semantic_analyzer/src/analyzer.rs index c4bba6582..65120840b 100644 --- a/crates/semantic_analyzer/src/analyzer.rs +++ b/crates/semantic_analyzer/src/analyzer.rs @@ -2,7 +2,7 @@ use parking_lot::RwLock; use parser::build_v_range; use preprocessor::Offset; use std::sync::Arc; -use syntax::SPItem; +use syntax::{FileId, SPItem}; use fxhash::FxHashMap; use lsp_types::{Range, Url}; @@ -25,12 +25,19 @@ pub struct Analyzer<'a> { pub mm_es_idx: usize, pub token_idx: u32, pub offsets: &'a FxHashMap>, + pub file_id: FileId, + pub uri: Arc, } impl<'a> Analyzer<'a> { + /// Create a new [Analyzer] for a document. + /// + /// This constructor makes sure to remove all references of items that point to this document. + /// This avoids creating duplicate references. pub fn new( all_items: Vec>>, - uri: Arc, + uri: &Arc, + file_id: FileId, source: &str, offsets: &'a FxHashMap>, ) -> Self { @@ -39,7 +46,7 @@ impl<'a> Analyzer<'a> { let mut mm_es_in_file = vec![]; for item in all_items.iter() { - purge_references(item, &uri); + purge_references(item, file_id); match &*item.read() { // Match variables SPItem::Variable(variable_item) => { @@ -48,53 +55,53 @@ impl<'a> Analyzer<'a> { } SPItem::Function(function_item) => { // First level function. - if *function_item.uri == *uri { + if function_item.file_id == file_id { funcs_in_file.push(item.clone()); } tokens_map.insert(function_item.key(), item.clone()); // All variables of the function. for child in &function_item.children { - purge_references(child, &uri); + purge_references(child, file_id); tokens_map.insert(child.read().key(), child.clone()); } } SPItem::Methodmap(methodmap_item) => { - if *methodmap_item.uri == *uri { + if methodmap_item.file_id == file_id { mm_es_in_file.push(item.clone()); } tokens_map.insert(methodmap_item.key(), item.clone()); // All properties and methods of the enum struct. for child in &methodmap_item.children { - purge_references(child, &uri); + purge_references(child, file_id); tokens_map.insert(child.read().key(), child.clone()); if let SPItem::Function(method_item) = &*child.read() { - if *method_item.uri == *uri { + if method_item.file_id == file_id { funcs_in_file.push(child.clone()); } // All variables of the method. for sub_child in &method_item.children { - purge_references(sub_child, &uri); + purge_references(sub_child, file_id); tokens_map.insert(sub_child.read().key(), sub_child.clone()); } } } } SPItem::EnumStruct(enum_struct_item) => { - if *enum_struct_item.uri == *uri { + if enum_struct_item.file_id == file_id { mm_es_in_file.push(item.clone()); } tokens_map.insert(enum_struct_item.key(), item.clone()); // All fields and methods of the enum struct. for child in &enum_struct_item.children { - purge_references(child, &uri); + purge_references(child, file_id); tokens_map.insert(child.read().key(), child.clone()); if let SPItem::Function(method_item) = &*child.read() { - if *method_item.uri == *uri { + if method_item.file_id == file_id { funcs_in_file.push(child.clone()); } // All variables of the method. for sub_child in &method_item.children { - purge_references(sub_child, &uri); + purge_references(sub_child, file_id); tokens_map.insert(sub_child.read().key(), sub_child.clone()); } } @@ -104,7 +111,7 @@ impl<'a> Analyzer<'a> { tokens_map.insert(enum_item.key(), item.clone()); // All enum members of the enum. for child in &enum_item.children { - purge_references(child, &uri); + purge_references(child, file_id); tokens_map.insert(child.read().key(), child.clone()); } } @@ -123,7 +130,7 @@ impl<'a> Analyzer<'a> { tokens_map.insert(typeset_item.key(), item.clone()); // All typedef members of the typeset. for child in &typeset_item.children { - purge_references(child, &uri); + purge_references(child, file_id); tokens_map.insert(child.read().key(), child.clone()); } } @@ -148,6 +155,8 @@ impl<'a> Analyzer<'a> { mm_es_idx: 0, token_idx: 0, offsets, + file_id, + uri: uri.clone(), } } diff --git a/crates/semantic_analyzer/src/lib.rs b/crates/semantic_analyzer/src/lib.rs index eadf9ee8b..f67afa889 100644 --- a/crates/semantic_analyzer/src/lib.rs +++ b/crates/semantic_analyzer/src/lib.rs @@ -3,7 +3,7 @@ use lsp_types::{Range, Url}; use parking_lot::RwLock; use preprocessor::Offset; use std::sync::Arc; -use syntax::SPItem; +use syntax::{FileId, SPItem}; mod analyzer; mod inherit; @@ -15,15 +15,16 @@ pub use {resolvers::is_ctor_call, token::*}; use crate::analyzer::Analyzer; -pub fn find_references( +pub fn resolve_references( all_items: Vec>>, uri: &Arc, + file_id: FileId, source: &str, tokens: &mut [SPToken], offsets: &mut FxHashMap>, ) -> Option> { let mut unresolved_tokens = FxHashSet::default(); - let mut analyzer = Analyzer::new(all_items, uri.clone(), source, offsets); + let mut analyzer = Analyzer::new(all_items, uri, file_id, source, offsets); tokens.sort_by_key(|sp_token| match sp_token { SPToken::Symbol(token) => token.range.start.line, SPToken::Method((_, field)) => field.range.start.line, @@ -33,11 +34,11 @@ pub fn find_references( SPToken::Symbol(token) => { analyzer.update_scope(token.range); analyzer.update_line_context(token); - if analyzer.resolve_this(token, uri) { + if analyzer.resolve_this(token) { analyzer.token_idx += 1; continue; } - if analyzer.resolve_non_method_item(token, uri).is_ok() { + if analyzer.resolve_non_method_item(token).is_ok() { analyzer.token_idx += 1; continue; } @@ -47,7 +48,7 @@ pub fn find_references( SPToken::Method((parent, field)) => { analyzer.update_scope(parent.range); analyzer.update_line_context(parent); - if analyzer.resolve_method_item(parent, field, uri).is_none() { + if analyzer.resolve_method_item(parent, field).is_none() { // Token was not resolved unresolved_tokens.insert(field.text.clone()); } @@ -62,7 +63,12 @@ pub fn find_references( Some(unresolved_tokens) } -pub fn purge_references(item: &Arc>, uri: &Arc) { +/// Purge the references of an [item](SPItem) from a file. +/// +/// # Arguments +/// * `item` - [Item](SPItem) to purge the references from. +/// * `file_id` - [Id](FileId) of the file from which we want to remove the references. +pub fn purge_references(item: &Arc>, file_id: FileId) { let mut new_references = vec![]; let mut item_lock = item.write(); let old_references = item_lock.references(); @@ -71,7 +77,7 @@ pub fn purge_references(item: &Arc>, uri: &Arc) { } let old_references = old_references.unwrap(); for reference in old_references { - if reference.uri != *uri { + if reference.file_id != file_id { new_references.push(reference.clone()); } } diff --git a/crates/semantic_analyzer/src/resolvers.rs b/crates/semantic_analyzer/src/resolvers.rs index 710394520..b2b7e3657 100644 --- a/crates/semantic_analyzer/src/resolvers.rs +++ b/crates/semantic_analyzer/src/resolvers.rs @@ -1,9 +1,8 @@ use lazy_static::lazy_static; -use lsp_types::Url; use parking_lot::RwLock; use regex::Regex; use std::sync::Arc; -use syntax::{Location, SPItem}; +use syntax::{Reference, SPItem}; use crate::{range_contains_range, token::Token}; @@ -16,7 +15,7 @@ impl<'a> Analyzer<'a> { /// # Arguments /// /// * `token` - [Token] to analyze. - pub(super) fn resolve_this(&mut self, token: &Arc, uri: &Url) -> bool { + pub(super) fn resolve_this(&mut self, token: &Arc) -> bool { if token.text != "this" { return false; } @@ -24,7 +23,7 @@ impl<'a> Analyzer<'a> { let item_lock = item.read(); match &*item_lock { SPItem::Methodmap(mm_item) => { - if *mm_item.uri == *uri + if mm_item.file_id == self.file_id && range_contains_range(&mm_item.full_range, &token.range) { self.previous_items.insert(token.text.clone(), item.clone()); @@ -32,7 +31,7 @@ impl<'a> Analyzer<'a> { } } SPItem::EnumStruct(es_item) => { - if *es_item.uri == *uri + if es_item.file_id == self.file_id && range_contains_range(&es_item.full_range, &token.range) { self.previous_items.insert(token.text.clone(), item.clone()); @@ -55,11 +54,7 @@ impl<'a> Analyzer<'a> { /// # Arguments /// /// * `token` - [Token] to analyze. - pub(super) fn resolve_non_method_item( - &mut self, - token: &Arc, - uri: &Arc, - ) -> anyhow::Result<()> { + pub(super) fn resolve_non_method_item(&mut self, token: &Arc) -> anyhow::Result<()> { let full_key = format!( "{}-{}-{}", self.scope.mm_es_key(), @@ -77,8 +72,9 @@ impl<'a> Analyzer<'a> { .or_else(|| self.tokens_map.get(&token.text)); if let Some(item) = item { - let reference = Location { - uri: uri.clone(), + let reference = Reference { + file_id: self.file_id, + uri: self.uri.clone(), range: token.range, v_range: if let SPItem::Define(_) = &*item.read() { token.range @@ -118,7 +114,6 @@ impl<'a> Analyzer<'a> { &mut self, parent: &Arc, field: &Arc, - uri: &Arc, ) -> Option<()> { if self.previous_items.is_empty() { return None; @@ -152,8 +147,9 @@ impl<'a> Analyzer<'a> { item.as_ref()?; let item = item.unwrap(); - let reference = Location { - uri: uri.clone(), + let reference = Reference { + file_id: self.file_id, + uri: self.uri.clone(), range: field.range, v_range: self.build_v_range(&field.range), }; diff --git a/crates/sourcepawn_lsp/Cargo.toml b/crates/sourcepawn_lsp/Cargo.toml index efaabfaf9..f480cae85 100644 --- a/crates/sourcepawn_lsp/Cargo.toml +++ b/crates/sourcepawn_lsp/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sourcepawn_lsp" -version = "0.10.19" +version = "0.11.0" description = "Language Server implemention for the SourcePawn programming language." include = ["Cargo.toml", "src/"] edition.workspace = true diff --git a/crates/sourcepawn_lsp/src/client.rs b/crates/sourcepawn_lsp/src/client.rs index 158d59217..0798c9ddd 100644 --- a/crates/sourcepawn_lsp/src/client.rs +++ b/crates/sourcepawn_lsp/src/client.rs @@ -47,6 +47,27 @@ impl LspClient { Ok(()) } + pub fn send_request_without_response(&self, params: R::Params) -> anyhow::Result<()> + where + R: lsp_types::request::Request, + R::Params: Serialize, + R::Result: DeserializeOwned, + { + let id = RequestId::from(self.raw.next_id.fetch_add(1, Ordering::SeqCst)); + + let (tx, _) = crossbeam_channel::bounded(1); + self.raw.pending.insert(id.clone(), tx); + + let request = Request::new(id, R::METHOD.to_string(), params); + log::trace!( + "Sending request without waiting for a response {:?}", + request + ); + self.raw.sender.send(request.into())?; + + Ok(()) + } + pub fn send_request(&self, params: R::Params) -> Result where R: lsp_types::request::Request, @@ -77,7 +98,11 @@ impl LspClient { .raw .pending .remove(&response.id) - .expect("response with known request id received"); + .expect("response with unknown request id received"); + if response.result.is_none() { + // Ignore null responses, as they will be sent on a disconnected channel. + return Ok(()); + } log::trace!("Sending received response {:?}", response); tx.send(response)?; Ok(()) diff --git a/crates/sourcepawn_lsp/src/lsp_ext.rs b/crates/sourcepawn_lsp/src/lsp_ext.rs index c9a12d378..946473326 100644 --- a/crates/sourcepawn_lsp/src/lsp_ext.rs +++ b/crates/sourcepawn_lsp/src/lsp_ext.rs @@ -1,4 +1,4 @@ -use lsp_types::{notification::Notification, request::Request, TextDocumentIdentifier}; +use lsp_types::{notification::Notification, request::Request, TextDocumentIdentifier, Url}; use serde::{Deserialize, Serialize}; pub enum PreprocessedDocument {} @@ -15,6 +15,34 @@ pub struct PreprocessedDocumentParams { pub text_document: Option, } +pub enum ProjectMainPath {} + +impl Request for ProjectMainPath { + type Params = ProjectMainPathParams; + type Result = Url; + const METHOD: &'static str = "sourcepawn-lsp/projectMainPath"; +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ProjectMainPathParams { + pub uri: Option, +} + +pub enum ProjectsGraphviz {} + +impl Request for ProjectsGraphviz { + type Params = ProjectsGraphvizParams; + type Result = String; + const METHOD: &'static str = "sourcepawn-lsp/projectsGraphviz"; +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct ProjectsGraphvizParams { + pub text_document: Option, +} + pub enum ServerStatusNotification {} impl Notification for ServerStatusNotification { diff --git a/crates/sourcepawn_lsp/src/providers/call_hierarchy.rs b/crates/sourcepawn_lsp/src/providers/call_hierarchy.rs index a60d41256..3f29e64a3 100644 --- a/crates/sourcepawn_lsp/src/providers/call_hierarchy.rs +++ b/crates/sourcepawn_lsp/src/providers/call_hierarchy.rs @@ -43,12 +43,13 @@ pub fn outgoing( let mut outgoing_calls = vec![]; let origin_item = &*items[0].read(); if let SPItem::Function(function_origin_item) = origin_item { - for item in store.get_all_items(true).iter() { + let file_id = store.path_interner.get(¶ms.item.uri)?; + for item in store.get_all_items(&file_id, true).iter() { if let SPItem::Function(function_item) = &*item.read() { let mut from_ranges = vec![]; for reference in function_item.references.iter() { if range_contains_range(&function_origin_item.full_range, &reference.range) - && function_origin_item.uri == reference.uri + && function_origin_item.file_id == reference.file_id { from_ranges.push(reference.range); } @@ -83,12 +84,13 @@ pub fn incoming( let mut incoming_calls = vec![]; let origin_item = &*items[0].read(); if let SPItem::Function(function_origin_item) = origin_item { - for item in store.get_all_items(true).iter() { + let file_id = store.path_interner.get(¶ms.item.uri)?; + for item in store.get_all_items(&file_id, true).iter() { if let SPItem::Function(function_item) = &*item.read() { let mut from_ranges = vec![]; for reference in function_origin_item.references.iter() { if range_contains_range(&function_item.full_range, &reference.range) - && function_item.uri == reference.uri + && function_item.file_id == reference.file_id { from_ranges.push(reference.range); } diff --git a/crates/sourcepawn_lsp/src/providers/completion.rs b/crates/sourcepawn_lsp/src/providers/completion.rs index 961f4c8dc..7df26a3d5 100644 --- a/crates/sourcepawn_lsp/src/providers/completion.rs +++ b/crates/sourcepawn_lsp/src/providers/completion.rs @@ -5,7 +5,7 @@ use lsp_types::{CompletionItem, CompletionList, CompletionParams}; use semantic_analyzer::is_ctor_call; use sourcepawn_lexer::{SourcepawnLexer, TokenKind}; use store::Store; -use syntax::range_contains_pos; +use syntax::{range_contains_pos, FileId}; use self::{ context::{is_callback_completion_request, is_doc_completion, is_method_call}, @@ -24,10 +24,10 @@ pub(crate) fn provide_completions( params: CompletionParams, ) -> Option { log::debug!("Providing completions."); - let document = store - .documents - .get(¶ms.text_document_position.text_document.uri)?; - let all_items = store.get_all_items(false); + let uri = ¶ms.text_document_position.text_document.uri; + let file_id = store.path_interner.get(uri)?; + let document = store.documents.get(&file_id)?; + let all_items = store.get_all_items(&file_id, false); let position = ¶ms.text_document_position.position; let line = document.line(position.line)?; let pre_line: String = line.chars().take(position.character as usize).collect(); @@ -51,7 +51,7 @@ pub(crate) fn provide_completions( '<' | '"' | '\'' | '/' | '\\' => { let include_st = is_include_statement(&pre_line); if let Some(include_st) = include_st { - return get_include_completions(store, include_st); + return get_include_completions(store, include_st, uri); } return None; } @@ -89,7 +89,7 @@ pub(crate) fn provide_completions( // Therefore, this block must cover all possibilities. let include_st = is_include_statement(&pre_line); if let Some(include_st) = include_st { - return get_include_completions(store, include_st); + return get_include_completions(store, include_st, uri); } if is_callback_completion_request(params.context.clone()) { @@ -119,8 +119,21 @@ pub(crate) fn resolve_completion_item( completion_item: CompletionItem, ) -> Option { let mut completion_item = completion_item; - let key = completion_item.data.clone()?; - if let Some(sp_item) = store.get_item_from_key(key.to_string().replace('"', "")) { + let mut data: Vec = completion_item + .data + .clone()? + .to_string() + .replace('"', "") + .split('$') + .map(|s| s.to_string()) + .collect(); + if data.len() != 2 { + return None; + } + let key = data.remove(0); + let file_id: FileId = data[0].parse::().ok()?.into(); + + if let Some(sp_item) = store.get_item_from_key(key, file_id) { let sp_item = &*sp_item.read(); completion_item.detail = Some(sp_item.formatted_text()); completion_item.documentation = sp_item.documentation(); diff --git a/crates/sourcepawn_lsp/src/providers/completion/include.rs b/crates/sourcepawn_lsp/src/providers/completion/include.rs index f32da6143..159122726 100644 --- a/crates/sourcepawn_lsp/src/providers/completion/include.rs +++ b/crates/sourcepawn_lsp/src/providers/completion/include.rs @@ -45,8 +45,13 @@ pub(super) fn is_include_statement(pre_line: &str) -> Option { pub(super) fn get_include_completions( store: &Store, include_st: IncludeStatement, + uri: &Url, ) -> Option { - let include_paths = store.environment.options.get_all_possible_include_folders(); + let main_path = store.get_project_main_path_from_id(&store.path_interner.get(uri)?)?; + let include_paths = store + .environment + .options + .get_all_possible_include_folders(main_path); let mut inc_uri_folders: Vec = vec![]; for inc_path in include_paths { @@ -87,15 +92,16 @@ fn get_include_file_completions( // Extract everything that has already been typed in the statement. let typed_path = RE1.replace(&include_st.text, "$a").to_string(); - for inc_uri in store.documents.keys() { + for document in store.documents.values() { for inc_uri_folder in inc_uri_folders.iter() { - if !inc_uri + if !document + .uri .to_string() .contains(&format!("{}/{}", inc_uri_folder, typed_path)) { continue; } - if let Ok(inc_path) = inc_uri.to_file_path() { + if let Ok(inc_path) = document.uri.to_file_path() { let parent_folder = inc_uri_folder .to_file_path() .unwrap() @@ -114,7 +120,7 @@ fn get_include_file_completions( let mut trail = ">"; if !include_st.use_chevron { // Don't insert anything as VSCode already autocompletes the second ". - // FIXME: This could be fixed programmatically to account for other editors. + // TODO: This could be fixed programmatically to account for other editors. trail = ""; } items.push(CompletionItem { diff --git a/crates/sourcepawn_lsp/src/providers/document_symbol.rs b/crates/sourcepawn_lsp/src/providers/document_symbol.rs index 9614a914a..79e115f15 100644 --- a/crates/sourcepawn_lsp/src/providers/document_symbol.rs +++ b/crates/sourcepawn_lsp/src/providers/document_symbol.rs @@ -5,8 +5,8 @@ pub fn provide_document_symbol( store: &Store, params: DocumentSymbolParams, ) -> Option> { - let uri = params.text_document.uri; - let document = store.documents.get(&uri)?; + let file_id = store.path_interner.get(¶ms.text_document.uri)?; + let document = store.documents.get(&file_id)?; let mut symbols: Vec = vec![]; for item in document.sp_items.clone() { let symbol = item.read().to_document_symbol(); diff --git a/crates/sourcepawn_lsp/src/providers/reference.rs b/crates/sourcepawn_lsp/src/providers/reference.rs index c72cdc5b0..e426c45a5 100644 --- a/crates/sourcepawn_lsp/src/providers/reference.rs +++ b/crates/sourcepawn_lsp/src/providers/reference.rs @@ -15,5 +15,16 @@ pub fn provide_reference(store: &Store, params: ReferenceParams) -> Option Option Option { - let all_items = &store.get_all_items(false); + let file_id = store.path_interner.get(¶ms.text_document.uri)?; + let all_items = &store.get_all_items(&file_id, false); if all_items.is_empty() { return None; } @@ -40,22 +41,14 @@ pub fn provide_semantic_tokens( for item in all_items.iter() { let item_lock = item.read(); match &*item_lock { - SPItem::Enum(enum_item) => builder.build_enum(enum_item, ¶ms.text_document.uri), + SPItem::Enum(enum_item) => builder.build_enum(enum_item, file_id), SPItem::Variable(variable_item) => { - builder.build_global_variable(variable_item, ¶ms.text_document.uri) - } - SPItem::Define(define_item) => { - builder.build_define(define_item, ¶ms.text_document.uri) - } - SPItem::Function(function_item) => { - builder.build_function(function_item, ¶ms.text_document.uri) - } - SPItem::Methodmap(mm_item) => { - builder.build_methodmap(mm_item, ¶ms.text_document.uri) - } - SPItem::EnumStruct(es_item) => { - builder.build_enum_struct(es_item, ¶ms.text_document.uri) + builder.build_global_variable(variable_item, file_id) } + SPItem::Define(define_item) => builder.build_define(define_item, file_id), + SPItem::Function(function_item) => builder.build_function(function_item, file_id), + SPItem::Methodmap(mm_item) => builder.build_methodmap(mm_item, file_id), + SPItem::EnumStruct(es_item) => builder.build_enum_struct(es_item, file_id), _ => Ok(()), } .unwrap_or_default(); diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder.rs index df275d67c..ebdaeb551 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder.rs @@ -1,4 +1,4 @@ -use anyhow::anyhow; +use anyhow::bail; use fxhash::FxHashMap; use lsp_types::{ Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, @@ -59,13 +59,13 @@ impl SemanticTokensBuilder { token_modifiers: Option>, ) -> anyhow::Result<()> { if !self.has_legend { - return Err(anyhow!("Legend must be provided in constructor")); + bail!("Legend must be provided in constructor"); } if range.start.line != range.end.line { - return Err(anyhow!("{:?} cannot span multiple lines", range)); + bail!("{:?} cannot span multiple lines", range); } if !self.token_type_str_to_int.contains_key(&token_type) { - return Err(anyhow!("{:?} is not in the provided legend", token_type)); + bail!("{:?} is not in the provided legend", token_type); } let line = range.start.line; @@ -88,10 +88,7 @@ impl SemanticTokensBuilder { }; n_token_modifiers |= (1 << n_token_modifier) >> c_as_u32; } else { - return Err(anyhow!( - "{:?} is not in the provided legend", - token_modifier - )); + bail!("{:?} is not in the provided legend", token_modifier); } } } diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_define.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_define.rs index 9e4561dad..848153136 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_define.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_define.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::define_item::DefineItem; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{define_item::DefineItem, FileId}; use super::SemanticTokensBuilder; @@ -7,9 +7,9 @@ impl SemanticTokensBuilder { pub(crate) fn build_define( &mut self, define_item: &DefineItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *define_item.uri == *uri { + if define_item.file_id == file_id { self.push( define_item.v_range, SemanticTokenType::MACRO, @@ -19,10 +19,10 @@ impl SemanticTokensBuilder { ]), )?; } - for ref_ in define_item.references.iter() { - if *ref_.uri == *uri { + for reference in define_item.references.iter() { + if reference.file_id == file_id { self.push( - ref_.v_range, + reference.v_range, SemanticTokenType::MACRO, Some(vec![SemanticTokenModifier::READONLY]), )?; diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum.rs index 214a247c9..eec6cb17b 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum.rs @@ -1,25 +1,29 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::{enum_item::EnumItem, SPItem}; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{enum_item::EnumItem, FileId, SPItem}; use super::SemanticTokensBuilder; impl SemanticTokensBuilder { - pub(crate) fn build_enum(&mut self, enum_item: &EnumItem, uri: &Url) -> anyhow::Result<()> { - if *enum_item.uri == *uri { + pub(crate) fn build_enum( + &mut self, + enum_item: &EnumItem, + file_id: FileId, + ) -> anyhow::Result<()> { + if enum_item.file_id == file_id { self.push( enum_item.v_range, SemanticTokenType::ENUM, Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in enum_item.references.iter() { - if *ref_.uri == *uri { - self.push(ref_.v_range, SemanticTokenType::ENUM, None)?; + for reference in enum_item.references.iter() { + if reference.file_id == file_id { + self.push(reference.v_range, SemanticTokenType::ENUM, None)?; } } enum_item.children.iter().for_each(|child| { if let SPItem::EnumMember(enum_member_item) = &*child.read() { - self.build_enum_member(enum_member_item, uri) + self.build_enum_member(enum_member_item, file_id) .unwrap_or_default(); } }); diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_member.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_member.rs index 2cb351ea9..b1e95a5a7 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_member.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_member.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::enum_member_item::EnumMemberItem; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{enum_member_item::EnumMemberItem, FileId}; use super::SemanticTokensBuilder; @@ -7,9 +7,9 @@ impl SemanticTokensBuilder { pub(crate) fn build_enum_member( &mut self, enum_member_item: &EnumMemberItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *enum_member_item.uri == *uri { + if enum_member_item.file_id == file_id { self.push( enum_member_item.v_range, SemanticTokenType::ENUM_MEMBER, @@ -19,10 +19,10 @@ impl SemanticTokensBuilder { ]), )?; } - for ref_ in enum_member_item.references.iter() { - if *ref_.uri == *uri { + for reference in enum_member_item.references.iter() { + if reference.file_id == file_id { self.push( - ref_.v_range, + reference.v_range, SemanticTokenType::ENUM_MEMBER, Some(vec![SemanticTokenModifier::READONLY]), )?; diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_struct.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_struct.rs index 350caef4f..06e342bb3 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_struct.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_enum_struct.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::{enum_struct_item::EnumStructItem, SPItem}; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{enum_struct_item::EnumStructItem, FileId, SPItem}; use super::SemanticTokensBuilder; @@ -7,24 +7,24 @@ impl SemanticTokensBuilder { pub(crate) fn build_enum_struct( &mut self, es_item: &EnumStructItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *es_item.uri == *uri { + if es_item.file_id == file_id { self.push( es_item.v_range, SemanticTokenType::STRUCT, Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in es_item.references.iter() { - if *ref_.uri == *uri { - self.push(ref_.v_range, SemanticTokenType::STRUCT, None)?; + for reference in es_item.references.iter() { + if reference.file_id == file_id { + self.push(reference.v_range, SemanticTokenType::STRUCT, None)?; } } es_item.children.iter().for_each(|child| { match &*child.read() { - SPItem::Function(method_item) => self.build_method(method_item, uri, ""), - SPItem::Variable(es_field) => self.build_es_field(es_field, uri), + SPItem::Function(method_item) => self.build_method(method_item, file_id, ""), + SPItem::Variable(es_field) => self.build_es_field(es_field, file_id), _ => Ok(()), } .unwrap_or_default(); diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_es_field.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_es_field.rs index 9c2ff01c4..fb950b8f5 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_es_field.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_es_field.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::variable_item::VariableItem; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{variable_item::VariableItem, FileId}; use super::SemanticTokensBuilder; @@ -7,18 +7,18 @@ impl SemanticTokensBuilder { pub(crate) fn build_es_field( &mut self, field_item: &VariableItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *field_item.uri == *uri { + if field_item.file_id == file_id { self.push( field_item.v_range, SemanticTokenType::PROPERTY, Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in field_item.references.iter() { - if *ref_.uri == *uri { - self.push(ref_.v_range, SemanticTokenType::PROPERTY, Some(vec![]))?; + for reference in field_item.references.iter() { + if reference.file_id == file_id { + self.push(reference.v_range, SemanticTokenType::PROPERTY, Some(vec![]))?; } } diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_function.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_function.rs index 28b44d9b7..b148aeedb 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_function.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_function.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::{function_item::FunctionItem, SPItem}; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{function_item::FunctionItem, FileId, SPItem}; use super::SemanticTokensBuilder; @@ -7,7 +7,7 @@ impl SemanticTokensBuilder { pub(crate) fn build_function( &mut self, function_item: &FunctionItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { let type_ = { if function_item.parent.is_some() { @@ -16,28 +16,28 @@ impl SemanticTokensBuilder { SemanticTokenType::FUNCTION } }; - if *function_item.uri == *uri { + if function_item.file_id == file_id { self.push( function_item.v_range, type_.clone(), Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in function_item.references.iter() { - if *ref_.uri == *uri { + for reference in function_item.references.iter() { + if reference.file_id == file_id { let mut modifiers = vec![]; - if function_item.v_range.eq(&ref_.v_range) { + if function_item.v_range.eq(&reference.v_range) { modifiers.push(SemanticTokenModifier::DECLARATION); } if function_item.description.deprecated.is_some() { modifiers.push(SemanticTokenModifier::DEPRECATED); } - self.push(ref_.v_range, type_.clone(), Some(modifiers))?; + self.push(reference.v_range, type_.clone(), Some(modifiers))?; } } function_item.children.iter().for_each(|child| { if let SPItem::Variable(variable_item) = &*child.read() { - self.build_local_variable(variable_item, uri) + self.build_local_variable(variable_item, file_id) .unwrap_or_default(); } }); diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_global_variable.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_global_variable.rs index 98c526672..462edeb8a 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_global_variable.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_global_variable.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::variable_item::VariableItem; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{variable_item::VariableItem, FileId}; use super::SemanticTokensBuilder; @@ -7,19 +7,19 @@ impl SemanticTokensBuilder { pub(crate) fn build_global_variable( &mut self, variable_item: &VariableItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *variable_item.uri == *uri { + if variable_item.file_id == file_id { self.push( variable_item.v_range, SemanticTokenType::VARIABLE, Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in variable_item.references.iter() { - if *ref_.uri == *uri { + for reference in variable_item.references.iter() { + if reference.file_id == file_id { self.push( - ref_.v_range, + reference.v_range, SemanticTokenType::VARIABLE, Some(vec![SemanticTokenModifier::MODIFICATION]), )?; diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_local_variable.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_local_variable.rs index 677f5ddd0..ebebca79a 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_local_variable.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_local_variable.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::variable_item::VariableItem; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{variable_item::VariableItem, FileId}; use super::SemanticTokensBuilder; @@ -7,18 +7,18 @@ impl SemanticTokensBuilder { pub(crate) fn build_local_variable( &mut self, variable_item: &VariableItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *variable_item.uri == *uri { + if variable_item.file_id == file_id { self.push( variable_item.v_range, SemanticTokenType::VARIABLE, Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in variable_item.references.iter() { - if *ref_.uri == *uri { - self.push(ref_.v_range, SemanticTokenType::VARIABLE, Some(vec![]))?; + for reference in variable_item.references.iter() { + if reference.file_id == file_id { + self.push(reference.v_range, SemanticTokenType::VARIABLE, Some(vec![]))?; } } diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_method.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_method.rs index e2c103ced..d33abe879 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_method.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_method.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::{function_item::FunctionItem, SPItem}; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{function_item::FunctionItem, FileId, SPItem}; use super::SemanticTokensBuilder; @@ -7,28 +7,28 @@ impl SemanticTokensBuilder { pub(crate) fn build_method( &mut self, method_item: &FunctionItem, - uri: &Url, + file_id: FileId, methodmap_name: &str, ) -> anyhow::Result<()> { let mut token_type = SemanticTokenType::METHOD; if methodmap_name == method_item.name { token_type = SemanticTokenType::CLASS } - if *method_item.uri == *uri { + if method_item.file_id == file_id { self.push( method_item.v_range, token_type.clone(), Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in method_item.references.iter() { - if *ref_.uri == *uri { - self.push(ref_.v_range, token_type.clone(), Some(vec![]))?; + for reference in method_item.references.iter() { + if reference.file_id == file_id { + self.push(reference.v_range, token_type.clone(), Some(vec![]))?; } } method_item.children.iter().for_each(|child| { if let SPItem::Variable(variable_item) = &*child.read() { - self.build_local_variable(variable_item, uri) + self.build_local_variable(variable_item, file_id) .unwrap_or_default(); } }); diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_methodmap.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_methodmap.rs index 53963bef5..7764f58b7 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_methodmap.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_methodmap.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::{methodmap_item::MethodmapItem, SPItem}; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{methodmap_item::MethodmapItem, FileId, SPItem}; use super::SemanticTokensBuilder; @@ -7,24 +7,26 @@ impl SemanticTokensBuilder { pub(crate) fn build_methodmap( &mut self, mm_item: &MethodmapItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *mm_item.uri == *uri { + if mm_item.file_id == file_id { self.push( mm_item.v_range, SemanticTokenType::CLASS, Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in mm_item.references.iter() { - if *ref_.uri == *uri { - self.push(ref_.v_range, SemanticTokenType::CLASS, None)?; + for reference in mm_item.references.iter() { + if reference.file_id == file_id { + self.push(reference.v_range, SemanticTokenType::CLASS, None)?; } } mm_item.children.iter().for_each(|child| { match &*child.read() { - SPItem::Function(method_item) => self.build_method(method_item, uri, &mm_item.name), - SPItem::Property(property_item) => self.build_property(property_item, uri), + SPItem::Function(method_item) => { + self.build_method(method_item, file_id, &mm_item.name) + } + SPItem::Property(property_item) => self.build_property(property_item, file_id), _ => Ok(()), } .unwrap_or_default(); diff --git a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_property.rs b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_property.rs index fecae7477..841bc680a 100644 --- a/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_property.rs +++ b/crates/sourcepawn_lsp/src/providers/semantic_tokens/builder/build_property.rs @@ -1,5 +1,5 @@ -use lsp_types::{SemanticTokenModifier, SemanticTokenType, Url}; -use syntax::property_item::PropertyItem; +use lsp_types::{SemanticTokenModifier, SemanticTokenType}; +use syntax::{property_item::PropertyItem, FileId}; use super::SemanticTokensBuilder; @@ -7,18 +7,18 @@ impl SemanticTokensBuilder { pub(crate) fn build_property( &mut self, property_item: &PropertyItem, - uri: &Url, + file_id: FileId, ) -> anyhow::Result<()> { - if *property_item.uri == *uri { + if property_item.file_id == file_id { self.push( property_item.v_range, SemanticTokenType::PROPERTY, Some(vec![SemanticTokenModifier::DECLARATION]), )?; } - for ref_ in property_item.references.iter() { - if *ref_.uri == *uri { - self.push(ref_.v_range, SemanticTokenType::PROPERTY, Some(vec![]))?; + for reference in property_item.references.iter() { + if reference.file_id == file_id { + self.push(reference.v_range, SemanticTokenType::PROPERTY, Some(vec![]))?; } } diff --git a/crates/sourcepawn_lsp/src/providers/signature_help.rs b/crates/sourcepawn_lsp/src/providers/signature_help.rs index f86157ad9..ab4e3af56 100644 --- a/crates/sourcepawn_lsp/src/providers/signature_help.rs +++ b/crates/sourcepawn_lsp/src/providers/signature_help.rs @@ -6,7 +6,8 @@ mod signature_attributes; pub fn provide_signature_help(store: &Store, params: SignatureHelpParams) -> Option { let uri = params.text_document_position_params.text_document.uri; - let document = store.documents.get(&uri)?; + let file_id = store.path_interner.get(&uri)?; + let document = store.documents.get(&file_id)?; let signature_attributes = SignatureAttributes::get_signature_attributes( document, params.text_document_position_params.position, diff --git a/crates/sourcepawn_lsp/src/server/diagnostics.rs b/crates/sourcepawn_lsp/src/server/diagnostics.rs index 77c6aaa6c..fdaade707 100644 --- a/crates/sourcepawn_lsp/src/server/diagnostics.rs +++ b/crates/sourcepawn_lsp/src/server/diagnostics.rs @@ -1,7 +1,7 @@ use linter::spcomp::get_spcomp_diagnostics; use lsp_types::{ notification::{PublishDiagnostics, ShowMessage}, - MessageType, PublishDiagnosticsParams, ShowMessageParams, + MessageType, PublishDiagnosticsParams, ShowMessageParams, Url, }; use std::sync::Arc; @@ -9,45 +9,74 @@ use super::InternalMessage; use crate::{lsp_ext, LspClient, Server}; impl Server { - /// Reload the diagnostics of the workspace, by running spcomp. - pub(crate) fn reload_diagnostics(&mut self) { + /// Runs [`Server::reload_project_diagnostics()`](#method.reload_project_diagnostics) by getting the main path + /// from the uri provided. + /// + /// # Arguments + /// * `uri` - [Url] of a file in the project to reload the diagnostics of. + pub(crate) fn reload_diagnostics(&mut self, uri: Url) { + let Some(file_id) = self.store.read().path_interner.get(&uri) else { + return; + }; + let Some(main_node) = self.store.read().projects.find_root_from_id(file_id) else { + return; + }; + let main_path_uri = self + .store + .read() + .path_interner + .lookup(main_node.file_id) + .clone(); + self.reload_project_diagnostics(main_path_uri); + } + + /// Reload the diagnostics of a project, by running spcomp and the server's linter. + /// + /// # Arguments + /// * `main_path_uri` - [Url] of the main file of the project. + pub(crate) fn reload_project_diagnostics(&mut self, main_path_uri: Url) { self.store.write().diagnostics.clear_all_diagnostics(); - self.lint_all_documents(); + self.lint_project(&main_path_uri); let client = self.client.clone(); let sender = self.internal_tx.clone(); let store = Arc::clone(&self.store); - if let Ok(Some(main_path_uri)) = self.store.read().environment.options.get_main_path_uri() { - // Only reload the diagnostics if the main path is defined. - self.pool.execute(move || { - let _ = client.send_spcomp_status(false); - if let Ok(diagnostics_map) = get_spcomp_diagnostics( - main_path_uri, - &store.read().environment.options.spcomp_path, - &store.read().environment.options.includes_directories, - &store.read().environment.options.linter_arguments, - ) { + // Only reload the diagnostics if the main path is defined. + self.pool.execute(move || { + let _ = client.send_spcomp_status(false); + let result = get_spcomp_diagnostics( + main_path_uri, + &store.read().environment.options.spcomp_path, + &store.read().environment.options.includes_directories, + &store.read().environment.options.linter_arguments, + ); + match result { + Ok(diagnostics_map) => { let _ = sender.send(InternalMessage::Diagnostics(diagnostics_map)); - } else { + } + Err(err) => { // Failed to run spcomp. let _ = client.send_notification::(ShowMessageParams { - message: "Failed to run spcomp.\nIs the path valid?".to_string(), + message: format!("Failed to run spcomp.\n{:?}", err), typ: MessageType::ERROR, }); } - let _ = client.send_spcomp_status(true); - }); - } + } + let _ = client.send_spcomp_status(true); + }); } - /// Lint all documents with the custom linter. - pub fn lint_all_documents(&mut self) { + /// Lint all documents in the project with the custom linter. + pub fn lint_project(&mut self, uri: &Url) { + let Some(file_id) = self.store.read().path_interner.get(uri) else { + return; + }; self.store .write() .diagnostics .clear_all_global_diagnostics(); - let all_items_flat = self.store.read().get_all_items(true); + let all_items_flat = self.store.read().get_all_items(&file_id, true); // TODO: Make diagnostics an external crate to avoid having to pass the store as writable. self.store .write() @@ -59,9 +88,8 @@ impl Server { /// Publish all the diagnostics of the store. This will override all diagnostics that have already /// been sent to the client. pub fn publish_diagnostics(&mut self) { - eprintln!("publishing {:#?}", self.store.read().diagnostics); + log::debug!("publishing {:#?}", self.store.read().diagnostics); for (uri, diagnostics) in self.store.read().diagnostics.iter() { - eprintln!("{:#?}", diagnostics); let _ = self .client .send_notification::(PublishDiagnosticsParams { diff --git a/crates/sourcepawn_lsp/src/server/files.rs b/crates/sourcepawn_lsp/src/server/files.rs index 087e6582d..16165e599 100644 --- a/crates/sourcepawn_lsp/src/server/files.rs +++ b/crates/sourcepawn_lsp/src/server/files.rs @@ -1,8 +1,9 @@ -use anyhow::{anyhow, Context}; +use anyhow::{anyhow, bail, Context}; use lsp_types::{notification::ShowMessage, MessageType, ShowMessageParams, Url}; -use std::{sync::Arc, time::Instant}; +use std::sync::Arc; +use syntax::{uri_to_file_name, FileId}; -use crate::{lsp_ext, server::InternalMessage, Server}; +use crate::{lsp_ext, server::progress::Progress, Server}; mod events; mod watching; @@ -11,75 +12,41 @@ impl Server { pub(super) fn reparse_all(&mut self) -> anyhow::Result<()> { log::debug!("Scanning all the files."); self.indexing = true; + self.store.write().first_parse = true; let _ = self.send_status(lsp_ext::ServerStatusParams { health: crate::lsp_ext::Health::Ok, quiescent: !self.indexing, message: None, }); + self.parse_directories(); - let main_uri = self.store.read().environment.options.get_main_path_uri(); - let now_parse = Instant::now(); - if let Ok(main_uri) = main_uri { - if let Some(main_uri) = main_uri { - log::debug!("Main path is set, parsing files."); - self.parse_files_for_main_path(&main_uri)?; - } else { - if let Some(uri) = self.store.read().find_main_with_heuristic() { - log::debug!("Main path was not set, and was infered as {:?}", uri); - let path = uri.to_file_path().unwrap(); - let mut options = self.store.read().environment.options.as_ref().clone(); - options.main_path = path.clone(); - self.internal_tx - .send(InternalMessage::SetOptions(Arc::new(options))) - .unwrap(); - let _ = self - .client - .send_notification::(ShowMessageParams { - message: format!( - "MainPath was not set and was automatically infered as {}.", - path.file_name().unwrap().to_str().unwrap() - ), - typ: MessageType::INFO, - }); - return Ok(()); - } - log::debug!("Main path was not set, and could not be infered."); - let _ = self - .client - .send_notification::(ShowMessageParams { - message: "No MainPath setting and none could be infered.".to_string(), - typ: MessageType::WARNING, - }); - self.parse_files_for_missing_main_path(); + + self.report_progress("Resolving roots", Progress::Begin, None, None, None); + let projects = self.store.write().load_projects_graph(); + let roots = projects.find_roots(); + self.report_progress("Resolving roots", Progress::End, None, None, None); + + self.report_progress("Parsing", Progress::Begin, None, None, None); + for node in roots { + let main_file_name = + uri_to_file_name(self.store.read().path_interner.lookup(node.file_id)); + if let Some(main_file_name) = main_file_name { + self.report_progress( + "Parsing", + Progress::Report, + Some(format!("({})", main_file_name)), + None, + None, + ); } - } else if main_uri.is_err() { - log::debug!("Main path is invalid."); - let _ = self - .client - .send_notification::(ShowMessageParams { - message: "Invalid MainPath setting.".to_string(), - typ: MessageType::WARNING, - }); - self.parse_files_for_missing_main_path(); + let _ = self.parse_project(node.file_id); } - let now_analysis = Instant::now(); - self.store.write().find_all_references(); - self.store.write().first_parse = false; - let parse_duration = now_parse.elapsed(); - let analysis_duration = now_analysis.elapsed(); - log::info!( - r#"Scanned all the files in {:.2?}: - - {} file(s) were scanned. - - Parsing took {:.2?}. - - Analysis took {:.2?}. - "#, - parse_duration, - self.store.read().documents.len(), - parse_duration - analysis_duration, - analysis_duration, - ); + self.report_progress("Parsing", Progress::End, None, None, None); + + self.store.write().projects = projects; + self.indexing = false; - self.reload_diagnostics(); + self.store.write().first_parse = false; let _ = self.send_status(lsp_ext::ServerStatusParams { health: crate::lsp_ext::Health::Ok, quiescent: !self.indexing, @@ -89,51 +56,26 @@ impl Server { Ok(()) } - fn parse_files_for_missing_main_path(&mut self) { - let mut uris: Vec = vec![]; - for uri in self.store.read().documents.keys() { - uris.push(uri.as_ref().clone()); - } - for uri in uris.iter() { - let document = self.store.read().get(uri); - if let Some(document) = document { - match self.store.write().handle_open_document( - &document.uri, - document.text, - &mut self.parser, - ) { - Ok(_) => {} - Err(error) => { - log::error!("Error while parsing file: {}", error); - } - } - } - } - } - - fn parse_files_for_main_path(&mut self, main_uri: &Url) -> anyhow::Result<()> { + fn parse_project(&mut self, main_id: FileId) -> anyhow::Result<()> { let document = self .store .read() - .get(main_uri) - .context(format!("Main Path does not exist at uri {:?}", main_uri))?; + .get_cloned(&main_id) + .context(format!("Main Path does not exist for id {:?}", main_id))?; self.store .write() .handle_open_document(&document.uri, document.text, &mut self.parser) - .context(format!("Could not parse file at uri {:?}", main_uri))?; + .context(format!("Could not parse file at id {:?}", main_id))?; Ok(()) } - fn parse_directories(&mut self) { - let directories = self - .store - .read() - .environment - .options - .includes_directories - .clone(); - for path in directories { + pub(crate) fn parse_directories(&mut self) { + self.report_progress("Indexing", Progress::Begin, None, None, None); + let store = self.store.read(); + let folders = store.folders(); + drop(store); + for (i, path) in folders.iter().enumerate() { if !path.exists() { self.client .send_notification::(ShowMessageParams { @@ -146,8 +88,16 @@ impl Server { .unwrap_or_default(); continue; } - self.store.write().find_documents(&path); + self.report_progress( + "Indexing", + Progress::Report, + Some(format!("{}/{} folders", i + 1, folders.len())), + None, + None, + ); + self.store.write().discover_documents(path); } + self.report_progress("Indexing", Progress::End, None, None, None); } /// Check if a [uri](Url) is know or not. If it is not, scan its parent folder and analyze all the documents that @@ -157,24 +107,21 @@ impl Server { /// /// * `uri` - [Uri](Url) of the document to test for. pub(super) fn read_unscanned_document(&mut self, uri: Arc) -> anyhow::Result<()> { - if self.store.read().documents.get(&uri).is_some() { + let file_id = self.store.read().path_interner.get(&uri).ok_or(anyhow!( + "Couldn't get a file id from the path interner for {}", + uri + ))?; + if self.store.read().documents.get(&file_id).is_some() { return Ok(()); } if uri.to_file_path().is_err() { - return Err(anyhow!("Couldn't extract a path from {}", uri)); + bail!("Couldn't extract a path from {}", uri); } let path = uri.to_file_path().unwrap(); let parent_dir = path.parent().unwrap().to_path_buf(); - self.store.write().find_documents(&parent_dir); - let uris: Vec = self - .store - .read() - .documents - .keys() - .map(|uri| uri.as_ref().clone()) - .collect(); - for uri in uris { - if let Some(document) = self.store.read().documents.get(&uri) { + self.store.write().discover_documents(&parent_dir); + for file_id in self.store.read().documents.keys() { + if let Some(document) = self.store.read().documents.get(file_id) { if !document.parsed { self.store .write() diff --git a/crates/sourcepawn_lsp/src/server/files/events.rs b/crates/sourcepawn_lsp/src/server/files/events.rs index 303b34538..e449a55d0 100644 --- a/crates/sourcepawn_lsp/src/server/files/events.rs +++ b/crates/sourcepawn_lsp/src/server/files/events.rs @@ -10,16 +10,17 @@ impl Server { match event.kind { notify::EventKind::Create(_) => { for path in event.paths { + let Ok(uri) = Url::from_file_path(path.clone()) else { + continue; + }; let _ = self.store.write().load(path, &mut self.parser); + self.reload_diagnostics(uri); } - self.reload_diagnostics(); } notify::EventKind::Modify(modify_event) => { - let uri = Url::from_file_path(event.paths[0].clone()); - if uri.is_err() { + let Ok(mut uri) = Url::from_file_path(event.paths[0].clone()) else { return; - } - let mut uri = uri.unwrap(); + }; normalize_uri(&mut uri); match modify_event { notify::event::ModifyKind::Name(_) => { @@ -41,11 +42,9 @@ impl Server { return; } } - let uri = Url::from_file_path(&event.paths[0]); - if uri.is_err() { + let Ok(mut uri) = Url::from_file_path(&event.paths[0]) else { return; - } - let mut uri = uri.unwrap(); + }; normalize_uri(&mut uri); let mut uris = self.store.write().get_all_files_in_folder(&uri); if uris.is_empty() { @@ -70,7 +69,7 @@ impl Server { } } for uri in uris.iter() { - if self.store.read().documents.contains_key(uri) { + if self.store.read().contains_uri(uri) { self.store.write().remove(uri, &mut self.parser); } else { let _ = self @@ -81,7 +80,7 @@ impl Server { } } _ => { - if self.store.read().documents.contains_key(&uri) { + if self.store.read().contains_uri(&uri) { let _ = self .store .write() @@ -89,14 +88,14 @@ impl Server { } } } - self.reload_diagnostics(); + self.reload_diagnostics(uri); } notify::EventKind::Remove(_) => { for mut uri in event.paths.iter().flat_map(Url::from_file_path) { normalize_uri(&mut uri); self.store.write().remove(&uri, &mut self.parser); + self.reload_diagnostics(uri); } - self.reload_diagnostics(); } notify::EventKind::Any | notify::EventKind::Access(_) | notify::EventKind::Other => {} }; diff --git a/crates/sourcepawn_lsp/src/server/mod.rs b/crates/sourcepawn_lsp/src/server/mod.rs index 34af81464..ad3ab2592 100644 --- a/crates/sourcepawn_lsp/src/server/mod.rs +++ b/crates/sourcepawn_lsp/src/server/mod.rs @@ -24,6 +24,7 @@ use crate::{capabilities::ClientCapabilitiesExt, client::LspClient, lsp_ext}; mod diagnostics; mod files; mod notifications; +mod progress; mod requests; #[derive(Debug)] @@ -126,23 +127,13 @@ impl Server { }; let client = self.client.clone(); let sender = self.internal_tx.clone(); - let root_uri = self.store.read().environment.root_uri.clone(); self.pool.execute(move || { match client.send_request::(params) { Ok(mut json) => { log::info!("Received config {:#?}", json); - let mut options = client + let options = client .parse_options(json.pop().expect("invalid configuration request")) .unwrap(); - if !(options.main_path.is_absolute() - || options.main_path.to_str().unwrap().is_empty()) - { - if let Some(root_uri) = root_uri { - // Try to resolve the main path as relative. - options.main_path = - root_uri.to_file_path().unwrap().join(options.main_path); - } - } sender .send(InternalMessage::SetOptions(Arc::new(options))) .unwrap(); @@ -154,6 +145,21 @@ impl Server { }); } + /// Resolve the references in a project if they have not been resolved yet. Will return early if the project + /// has been resolved at least once. + /// + /// This should be called before every feature request. + /// + /// # Arguments + /// * `uri` - [Url] of a file in the project. + fn initialize_project_resolution(&mut self, uri: &Url) { + let main_id = self.store.write().resolve_project_references(uri); + if let Some(main_id) = main_id { + let main_path_uri = self.store.read().path_interner.lookup(main_id).clone(); + self.reload_project_diagnostics(main_path_uri); + } + } + fn initialize(&mut self) -> anyhow::Result<()> { let (id, params) = self.connection.initialize_start()?; let params: InitializeParams = serde_json::from_value(params)?; @@ -238,15 +244,12 @@ impl Server { self.pull_config(); - params + self.store.write().folders = params .workspace_folders .unwrap_or_default() .iter() - .for_each(|folder| { - if let Ok(folder_path) = folder.uri.to_file_path() { - self.store.write().find_documents(&folder_path) - } - }); + .filter_map(|folder| folder.uri.to_file_path().ok()) + .collect(); let _ = self.send_status(lsp_ext::ServerStatusParams { health: crate::lsp_ext::Health::Ok, diff --git a/crates/sourcepawn_lsp/src/server/notifications.rs b/crates/sourcepawn_lsp/src/server/notifications.rs index cff8e5f76..69cb5d057 100644 --- a/crates/sourcepawn_lsp/src/server/notifications.rs +++ b/crates/sourcepawn_lsp/src/server/notifications.rs @@ -20,16 +20,21 @@ impl Server { if !self.config_pulled { log::trace!("File {:?} was opened before the config was pulled.", uri); - self.store + let file_id = self + .store .write() - .documents - .insert(uri.clone(), Document::new(uri, params.text_document.text)); + .path_interner + .intern(uri.as_ref().clone()); + self.store.write().documents.insert( + file_id, + Document::new(uri, file_id, params.text_document.text), + ); return Ok(()); } // Don't parse the document if it has already been opened. // GoToDefinition request will trigger a new parse. - if let Some(document) = self.store.read().documents.get(&uri) { + if let Some(document) = self.store.read().get_from_uri(&uri) { if document.parsed { return Ok(()); } @@ -40,6 +45,13 @@ impl Server { .handle_open_document(&uri, text, &mut self.parser) .expect("Couldn't parse file"); + // In the first parse, it is expected that includes are missing. + if !self.store.read().first_parse { + self.store + .write() + .resolve_missing_includes(&mut self.parser); + } + Ok(()) } @@ -50,13 +62,17 @@ impl Server { normalize_uri(&mut params.text_document.uri); let uri = Arc::new(params.text_document.uri.clone()); - let Some(document) = self.store.read().get(&uri).or_else(|| { + let Some(document) = self.store.read().get_cloned_from_uri(&uri).or_else(|| { // If the document was not known, read its content first. self.store - .write().load(uri.to_file_path().ok()?, &mut self.parser) + .write() + .load(uri.to_file_path().ok()?, &mut self.parser) .ok()? }) else { - bail!("Failed to apply document edit on {}", params.text_document.uri); + bail!( + "Failed to apply document edit on {}", + params.text_document.uri + ); }; let mut text = document.text().to_string(); @@ -65,7 +81,7 @@ impl Server { .write() .handle_open_document(&uri, text, &mut self.parser)?; - self.lint_all_documents(); + self.lint_project(¶ms.text_document.uri); Ok(()) } @@ -82,11 +98,11 @@ impl Server { .store .write() .reload(change.uri.to_file_path().unwrap(), &mut self.parser); - self.reload_diagnostics(); + self.reload_diagnostics(change.uri); } FileChangeType::DELETED => { self.store.write().remove(&change.uri, &mut self.parser); - self.reload_diagnostics(); + self.reload_diagnostics(change.uri); } FileChangeType::CREATED => { if let Ok(path) = change.uri.to_file_path() { @@ -94,7 +110,7 @@ impl Server { .store .write() .load(path.as_path().to_path_buf(), &mut self.parser); - self.reload_diagnostics(); + self.reload_diagnostics(change.uri); } } _ => {} diff --git a/crates/sourcepawn_lsp/src/server/progress.rs b/crates/sourcepawn_lsp/src/server/progress.rs new file mode 100644 index 000000000..1b7405d83 --- /dev/null +++ b/crates/sourcepawn_lsp/src/server/progress.rs @@ -0,0 +1,66 @@ +use crate::{capabilities::ClientCapabilitiesExt, Server}; + +#[derive(Debug, Eq, PartialEq)] +pub(crate) enum Progress { + Begin, + Report, + End, +} + +impl Server { + pub(crate) fn report_progress( + &mut self, + title: &str, + state: Progress, + message: Option, + fraction: Option, + cancel_token: Option, + ) { + if !self.client_capabilities.has_work_done_progress_support() { + return; + } + let percentage = fraction.map(|f| { + assert!((0.0..=1.0).contains(&f)); + (f * 100.0) as u32 + }); + let cancellable = Some(cancel_token.is_some()); + let token = lsp_types::ProgressToken::String( + cancel_token.unwrap_or_else(|| format!("sourcepawnLsp/{title}")), + ); + + let work_done_progress = match state { + Progress::Begin => { + let _ = self + .client + .send_request_without_response::( + lsp_types::WorkDoneProgressCreateParams { + token: token.clone(), + }, + ); + + lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { + title: title.into(), + cancellable, + message, + percentage, + }) + } + Progress::Report => { + lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { + cancellable, + message, + percentage, + }) + } + Progress::End => { + lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message }) + } + }; + let _ = self + .client + .send_notification::(lsp_types::ProgressParams { + token, + value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress), + }); + } +} diff --git a/crates/sourcepawn_lsp/src/server/requests.rs b/crates/sourcepawn_lsp/src/server/requests.rs index cb59f1119..c802bbfa6 100644 --- a/crates/sourcepawn_lsp/src/server/requests.rs +++ b/crates/sourcepawn_lsp/src/server/requests.rs @@ -14,6 +14,8 @@ mod definition; mod document_symbol; mod hover; mod preprocessed_document; +mod project_main_path; +mod projects_graphviz; mod reference; mod rename; mod semantic_tokens; @@ -41,6 +43,8 @@ impl Server { .on::(|id, params| { self.preprocessed_document(id, params) })? + .on::(|id, params| self.projects_graphviz(id, params))? + .on::(|id, params| self.project_main_path(id, params))? .default() { self.connection.sender.send(response.into())?; diff --git a/crates/sourcepawn_lsp/src/server/requests/call_hierarchy.rs b/crates/sourcepawn_lsp/src/server/requests/call_hierarchy.rs index 053706dc0..9d484ef66 100644 --- a/crates/sourcepawn_lsp/src/server/requests/call_hierarchy.rs +++ b/crates/sourcepawn_lsp/src/server/requests/call_hierarchy.rs @@ -22,6 +22,7 @@ impl Server { .clone(), ); + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { providers::call_hierarchy::prepare(store, params) diff --git a/crates/sourcepawn_lsp/src/server/requests/completion.rs b/crates/sourcepawn_lsp/src/server/requests/completion.rs index 0bba97399..4e16a488a 100644 --- a/crates/sourcepawn_lsp/src/server/requests/completion.rs +++ b/crates/sourcepawn_lsp/src/server/requests/completion.rs @@ -13,7 +13,10 @@ impl Server { ) -> anyhow::Result<()> { normalize_uri(&mut params.text_document_position.text_document.uri); let uri = Arc::new(params.text_document_position.text_document.uri.clone()); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); + self.run_query(id, move |store| { providers::completion::provide_completions(store, params) }); diff --git a/crates/sourcepawn_lsp/src/server/requests/definition.rs b/crates/sourcepawn_lsp/src/server/requests/definition.rs index 091741d58..6d533bfb5 100644 --- a/crates/sourcepawn_lsp/src/server/requests/definition.rs +++ b/crates/sourcepawn_lsp/src/server/requests/definition.rs @@ -19,6 +19,8 @@ impl Server { .uri .clone(), ); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { diff --git a/crates/sourcepawn_lsp/src/server/requests/document_symbol.rs b/crates/sourcepawn_lsp/src/server/requests/document_symbol.rs index 701dec2e4..20f157b58 100644 --- a/crates/sourcepawn_lsp/src/server/requests/document_symbol.rs +++ b/crates/sourcepawn_lsp/src/server/requests/document_symbol.rs @@ -14,6 +14,8 @@ impl Server { ) -> anyhow::Result<()> { normalize_uri(&mut params.text_document.uri); let uri = Arc::new(params.text_document.uri.clone()); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { diff --git a/crates/sourcepawn_lsp/src/server/requests/hover.rs b/crates/sourcepawn_lsp/src/server/requests/hover.rs index 454ecccd0..7721d6b3c 100644 --- a/crates/sourcepawn_lsp/src/server/requests/hover.rs +++ b/crates/sourcepawn_lsp/src/server/requests/hover.rs @@ -15,6 +15,8 @@ impl Server { .uri .clone(), ); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { diff --git a/crates/sourcepawn_lsp/src/server/requests/preprocessed_document.rs b/crates/sourcepawn_lsp/src/server/requests/preprocessed_document.rs index a900cdef8..fb0a18b1a 100644 --- a/crates/sourcepawn_lsp/src/server/requests/preprocessed_document.rs +++ b/crates/sourcepawn_lsp/src/server/requests/preprocessed_document.rs @@ -10,9 +10,11 @@ impl Server { id: RequestId, params: PreprocessedDocumentParams, ) -> anyhow::Result<()> { - let Some(mut text_document) = params.text_document else { bail!("No TextDocument passed to command");}; + let Some(mut text_document) = params.text_document else { + bail!("No TextDocument passed to command"); + }; normalize_uri(&mut text_document.uri); - if let Some(document) = self.store.read().documents.get(&text_document.uri) { + if let Some(document) = self.store.read().get_from_uri(&text_document.uri) { let text = document.preprocessed_text.clone(); self.run_query(id, move |_store| text); diff --git a/crates/sourcepawn_lsp/src/server/requests/project_main_path.rs b/crates/sourcepawn_lsp/src/server/requests/project_main_path.rs new file mode 100644 index 000000000..cd5431dd0 --- /dev/null +++ b/crates/sourcepawn_lsp/src/server/requests/project_main_path.rs @@ -0,0 +1,33 @@ +use anyhow::bail; +use lsp_server::RequestId; +use store::normalize_uri; + +use crate::{lsp_ext::ProjectMainPathParams, Server}; + +impl Server { + pub(super) fn project_main_path( + &mut self, + id: RequestId, + params: ProjectMainPathParams, + ) -> anyhow::Result<()> { + let Some(mut uri) = params.uri else { + bail!("No uri passed to command"); + }; + normalize_uri(&mut uri); + let Some(file_id) = self.store.read().path_interner.get(&uri) else { + bail!("No file ID found for URI {:?}", uri); + }; + let Some(root_node) = self.store.read().projects.find_root_from_id(file_id) else { + bail!("No project root found for file ID {:?}", file_id); + }; + let main_uri = self + .store + .read() + .path_interner + .lookup(root_node.file_id) + .clone(); + self.run_query(id, move |_store| main_uri); + + Ok(()) + } +} diff --git a/crates/sourcepawn_lsp/src/server/requests/projects_graphviz.rs b/crates/sourcepawn_lsp/src/server/requests/projects_graphviz.rs new file mode 100644 index 000000000..77fb0b911 --- /dev/null +++ b/crates/sourcepawn_lsp/src/server/requests/projects_graphviz.rs @@ -0,0 +1,19 @@ +use anyhow::bail; +use lsp_server::RequestId; + +use crate::{lsp_ext::ProjectsGraphvizParams, Server}; + +impl Server { + pub(super) fn projects_graphviz( + &mut self, + id: RequestId, + _params: ProjectsGraphvizParams, + ) -> anyhow::Result<()> { + if let Some(graphviz) = self.store.read().represent_graphs() { + self.run_query(id, move |_store| graphviz); + return Ok(()); + } + + bail!("Failed to load projects graph."); + } +} diff --git a/crates/sourcepawn_lsp/src/server/requests/reference.rs b/crates/sourcepawn_lsp/src/server/requests/reference.rs index c61660a05..e35d9d606 100644 --- a/crates/sourcepawn_lsp/src/server/requests/reference.rs +++ b/crates/sourcepawn_lsp/src/server/requests/reference.rs @@ -13,6 +13,8 @@ impl Server { ) -> anyhow::Result<()> { normalize_uri(&mut params.text_document_position.text_document.uri); let uri = Arc::new(params.text_document_position.text_document.uri.clone()); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { diff --git a/crates/sourcepawn_lsp/src/server/requests/rename.rs b/crates/sourcepawn_lsp/src/server/requests/rename.rs index e6f244f67..cecbbda85 100644 --- a/crates/sourcepawn_lsp/src/server/requests/rename.rs +++ b/crates/sourcepawn_lsp/src/server/requests/rename.rs @@ -9,6 +9,8 @@ impl Server { pub(super) fn rename(&mut self, id: RequestId, mut params: RenameParams) -> anyhow::Result<()> { normalize_uri(&mut params.text_document_position.text_document.uri); let uri = Arc::new(params.text_document_position.text_document.uri.clone()); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { diff --git a/crates/sourcepawn_lsp/src/server/requests/semantic_tokens.rs b/crates/sourcepawn_lsp/src/server/requests/semantic_tokens.rs index 90f44a2ac..cad390f0f 100644 --- a/crates/sourcepawn_lsp/src/server/requests/semantic_tokens.rs +++ b/crates/sourcepawn_lsp/src/server/requests/semantic_tokens.rs @@ -13,6 +13,8 @@ impl Server { ) -> anyhow::Result<()> { normalize_uri(&mut params.text_document.uri); let uri = Arc::new(params.text_document.uri.clone()); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { diff --git a/crates/sourcepawn_lsp/src/server/requests/signature_help.rs b/crates/sourcepawn_lsp/src/server/requests/signature_help.rs index f9400476a..58fead107 100644 --- a/crates/sourcepawn_lsp/src/server/requests/signature_help.rs +++ b/crates/sourcepawn_lsp/src/server/requests/signature_help.rs @@ -19,6 +19,8 @@ impl Server { .uri .clone(), ); + + self.initialize_project_resolution(&uri); let _ = self.read_unscanned_document(uri); self.run_query(id, move |store| { diff --git a/crates/sourcepawn_lsp/tests/fixture.rs b/crates/sourcepawn_lsp/tests/fixture.rs index 46420d001..dae75b437 100644 --- a/crates/sourcepawn_lsp/tests/fixture.rs +++ b/crates/sourcepawn_lsp/tests/fixture.rs @@ -184,14 +184,6 @@ impl TestBed { }) .collect(); - let main_path = locations - .iter() - .find(|e| e.uri.as_str().contains("main.sp")) - .unwrap() - .uri - .to_file_path() - .unwrap(); - let (server_conn, client_conn) = Connection::memory(); let (internal_tx, internal_rx) = crossbeam_channel::unbounded(); @@ -206,10 +198,7 @@ impl TestBed { match message { lsp_server::Message::Request(request) => { if request.method == "workspace/configuration" { - let options = Options { - main_path: main_path.clone(), - ..Default::default() - }; + let options = Options::default(); client .send_response(Response::new_ok(request.id, vec![options])) .unwrap(); diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 49c18713e..b7d6176eb 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -11,6 +11,7 @@ doctest = false [dependencies] semantic_analyzer.workspace = true +sourcepawn_lexer.workspace = true linter.workspace = true lsp-types.workspace = true anyhow.workspace = true @@ -29,3 +30,5 @@ strip_bom.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true +nohash-hasher.workspace = true +indexmap.workspace = true diff --git a/crates/store/src/document.rs b/crates/store/src/document.rs index f724d6dc8..11bdc55ac 100644 --- a/crates/store/src/document.rs +++ b/crates/store/src/document.rs @@ -9,6 +9,7 @@ use preprocessor::{Macro, Offset}; use semantic_analyzer::{SPToken, Token}; use std::{path::PathBuf, sync::Arc}; use strip_bom::StripBom; +use syntax::FileId; use syntax::SPItem; use tree_sitter::{Node, Query, QueryCursor}; @@ -27,15 +28,35 @@ lazy_static! { }; } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +pub enum FileExtension { + #[default] + Sp, + Inc, +} + +pub fn uri_to_file_extension(uri: &Url) -> Option { + let path = uri.to_file_path().ok()?; + let extension = path.extension()?; + match extension.to_str()? { + "sp" => Some(FileExtension::Sp), + "inc" => Some(FileExtension::Inc), + _ => None, + } +} + #[derive(Debug, Clone)] pub struct Document { pub uri: Arc, + pub file_id: FileId, + extension: FileExtension, pub text: String, pub preprocessed_text: String, pub(super) being_preprocessed: bool, pub sp_items: Vec>>, - pub(crate) includes: FxHashMap, + pub(crate) includes: FxHashMap, pub parsed: bool, + resolved: bool, pub(crate) tokens: Vec, pub missing_includes: FxHashMap, pub unresolved_tokens: FxHashSet, @@ -46,8 +67,21 @@ pub struct Document { } impl Document { - pub fn new(uri: Arc, text: String) -> Self { + pub fn new(uri: Arc, file_id: FileId, text: String) -> Self { Self { + extension: { + if let Ok(file_path) = uri.to_file_path() { + if file_path.ends_with(".sp") { + FileExtension::Sp + } else { + FileExtension::Inc + } + } else { + // This happens when using the debug preprocessed_text command in VSCode. + FileExtension::Sp + } + }, + file_id, uri, preprocessed_text: String::new(), being_preprocessed: false, @@ -62,23 +96,26 @@ impl Document { macros: FxHashMap::default(), macro_symbols: vec![], offsets: FxHashMap::default(), + resolved: false, } } + /// Return `true` if the document tokens have been resolved at least once, `false` otherwise. + pub fn is_resolved(&self) -> bool { + self.resolved + } + + /// Mark the document tokens as resolved at least once. + pub fn mark_as_resolved(&mut self) { + self.resolved = true; + } + pub fn text(&self) -> &str { &self.text } - pub fn extension(&self) -> anyhow::Result { - let extension = self - .path()? - .extension() - .ok_or_else(|| anyhow!("Failed to get file extension."))? - .to_str() - .ok_or_else(|| anyhow!("Failed to convert extension to string."))? - .to_string(); - - Ok(extension) + pub fn extension(&self) -> FileExtension { + self.extension } pub(crate) fn path(&self) -> anyhow::Result { diff --git a/crates/store/src/graph.rs b/crates/store/src/graph.rs new file mode 100644 index 000000000..cd585adb1 --- /dev/null +++ b/crates/store/src/graph.rs @@ -0,0 +1,427 @@ +use std::hash::{Hash, Hasher}; + +use anyhow::bail; +use fxhash::{FxHashMap, FxHashSet}; +use lazy_static::lazy_static; +use lsp_types::Url; +use regex::Regex; +use sourcepawn_lexer::{PreprocDir, SourcepawnLexer, TokenKind}; +use syntax::{uri_to_file_name, FileId}; + +use crate::{ + document::{uri_to_file_extension, Document, FileExtension}, + Store, +}; + +#[derive(Debug, Clone)] +pub struct Node { + pub file_id: FileId, + pub extension: FileExtension, +} + +impl PartialEq for Node { + fn eq(&self, other: &Node) -> bool { + self.file_id == other.file_id + } +} + +impl Eq for Node {} + +impl Hash for Node { + fn hash(&self, state: &mut H) { + self.file_id.hash(state); + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Edge { + pub source: Node, + pub target: Node, +} + +#[derive(Debug, Default, Clone)] +pub struct Graph { + pub edges: FxHashSet, + pub missing: Vec, + pub nodes: FxHashSet, +} + +#[derive(Debug)] +pub struct SubGraph { + pub root: Node, + pub nodes: Vec, + pub edges: Vec, +} + +impl Store { + /// Get all the files that are included in the given document. + fn get_include_ids_from_document(&self, document: &Document) -> Vec<(FileId, FileExtension)> { + let mut file_ids = vec![]; + let lexer = SourcepawnLexer::new(&document.text); + for symbol in lexer { + if symbol.token_kind != TokenKind::PreprocDir(PreprocDir::MInclude) { + continue; + } + let text = symbol.text(); + lazy_static! { + static ref RE1: Regex = Regex::new(r"<([^>]+)>").unwrap(); + static ref RE2: Regex = Regex::new("\"([^>]+)\"").unwrap(); + } + let mut file_id = None; + if let Some(caps) = RE1.captures(&text) { + if let Some(path) = caps.get(1) { + file_id = + self.resolve_import(&mut path.as_str().to_string(), &document.uri, false); + } + } else if let Some(caps) = RE2.captures(&text) { + if let Some(path) = caps.get(1) { + file_id = + self.resolve_import(&mut path.as_str().to_string(), &document.uri, true); + } + } + if let Some(file_id) = file_id { + file_ids.push(( + file_id, + uri_to_file_extension(self.path_interner.lookup(file_id)).unwrap_or_default(), + )); + } + } + + file_ids + } + + pub fn load_projects_graph(&mut self) -> Graph { + let mut graph = Graph::default(); + + for document in self.documents.values() { + let source = Node { + file_id: document.file_id, + extension: document.extension(), + }; + graph.nodes.insert(source.clone()); + for (file_id, extension) in self.get_include_ids_from_document(document) { + let target = Node { file_id, extension }; + graph.edges.insert(Edge { + source: source.clone(), + target: target.clone(), + }); + graph.nodes.insert(target); + } + } + + graph + } + + pub fn add_file_to_projects(&mut self, file_id: &FileId) -> anyhow::Result<()> { + let Some(document) = self.documents.get(file_id) else { + bail!( + "Could not find document to insert from uri {:?}", + self.path_interner.lookup(*file_id) + ); + }; + for (file_id, extension) in self.get_include_ids_from_document(document) { + self.projects + .add_file_id(document.file_id, document.extension(), file_id, extension) + } + + Ok(()) + } + + pub fn remove_file_from_projects(&mut self, file_id: &FileId) { + self.projects + .edges + .retain(|edge| &edge.source.file_id != file_id || &edge.target.file_id != file_id); + self.projects.nodes.remove(&Node { + file_id: *file_id, + extension: FileExtension::Sp, // We don't care about the extension here. + }); + } +} + +impl Graph { + pub fn add_file_id( + &mut self, + source_id: FileId, + source_extension: FileExtension, + target_id: FileId, + target_extension: FileExtension, + ) { + let source = Node { + file_id: source_id, + extension: source_extension, + }; + let target = Node { + file_id: target_id, + extension: target_extension, + }; + self.edges.insert(Edge { + source: source.clone(), + target: target.clone(), + }); + self.nodes.insert(source); + self.nodes.insert(target); + } + + fn get_adjacent_targets(&self) -> FxHashMap> { + let mut adj_targets: FxHashMap> = FxHashMap::default(); + for edge in self.edges.iter() { + adj_targets + .entry(edge.source.clone()) + .or_insert_with(FxHashSet::default) + .insert(edge.target.clone()); + } + + adj_targets + } + + pub fn find_roots(&self) -> Vec { + let mut adj_map: FxHashMap = FxHashMap::default(); + for edge in self.edges.iter() { + adj_map + .entry(edge.source.clone()) + .or_insert_with(|| (0, 0)) + .1 += 1; + adj_map + .entry(edge.target.clone()) + .or_insert_with(|| (0, 0)) + .0 += 1; + } + for node in self.nodes.iter() { + adj_map.entry(node.clone()).or_insert_with(|| (0, 0)); + } + adj_map + .iter() + .filter_map(|(node, (nb_source, nb_target))| { + if *nb_target != 0 || *nb_source == 0 { + Some(node.clone()) + } else { + None + } + }) + .collect::>() + } + + /// Get the root of the [subgraph](SubGraph) from a given [file_id](FileId). + /// + /// - If the [file_id](FileId) is not in the graph, return [None]. + /// - If the [file_id](FileId) or one of its parent has more than one parent, return [None]. + /// - If the [file_id](FileId) or one of its parent is an include file, return the [file_id](FileId) of the include file. + pub fn find_root_from_id(&self, file_id: FileId) -> Option { + let mut adj_sources: FxHashMap> = FxHashMap::default(); + for edge in self.edges.iter() { + adj_sources + .entry(edge.target.clone()) + .or_insert_with(FxHashSet::default) + .insert(edge.source.clone()); + } + let mut child = &Node { + file_id, + extension: FileExtension::Sp, + }; + while let Some(parents) = adj_sources.get(child) { + if parents.len() == 1 { + let parent = parents.iter().next().unwrap(); + + // If the parent is an include file, we don't want to go further. + // Include files can be included in multiple files. + if child.extension == FileExtension::Inc && parent.extension == FileExtension::Sp { + return Some(child.clone()); + } + child = parent; + } else if child.extension == FileExtension::Inc { + return Some(child.clone()); + } else { + return None; + } + } + + Some(child.clone()) + } + + pub fn get_subgraph_ids_from_root(&self, root_id: FileId) -> FxHashSet { + let adj_targets = self.get_adjacent_targets(); + let mut visited = FxHashSet::default(); + let mut nodes = vec![]; + let mut edges = vec![]; + let root = Node { + file_id: root_id, + extension: FileExtension::Sp, + }; + dfs(&root, &adj_targets, &mut visited, &mut nodes, &mut edges); + visited.insert(root.clone()); + + nodes.iter().map(|node| node.file_id).collect() + } + + pub fn find_subgraphs(&self) -> Vec { + let adj_targets = self.get_adjacent_targets(); + let mut subgraphs = vec![]; + for root in self.find_roots() { + let mut visited = FxHashSet::default(); + let mut nodes = vec![]; + let mut edges = vec![]; + dfs(&root, &adj_targets, &mut visited, &mut nodes, &mut edges); + visited.insert(root.clone()); + subgraphs.push(SubGraph { + root: root.clone(), + nodes, + edges, + }); + } + + subgraphs + } +} + +impl Store { + pub fn represent_graphs(&self) -> Option { + let mut out = vec!["digraph G {".to_string()]; + let subgraphs = self.projects.find_subgraphs(); + for (i, sub_graph) in subgraphs.iter().enumerate() { + out.push(format!( + r#" subgraph cluster_{} {{ + style=filled; + color={}; + node [style=filled,color=white]; + label = "Project n°{}";"#, + i, + COLORS[i % COLORS.len()], + i + )); + for edge in sub_graph.edges.iter() { + out.push(format!( + "\"{}\" -> \"{}\";", + uri_to_file_name(self.path_interner.lookup(edge.source.file_id))?, + uri_to_file_name(self.path_interner.lookup(edge.target.file_id))? + )); + } + out.push("}".to_string()); + } + for sub_graph in subgraphs.iter() { + if sub_graph.root.extension == FileExtension::Inc { + continue; + } + out.push(format!( + "\"{}\" [shape=Mdiamond];", + uri_to_file_name(self.path_interner.lookup(sub_graph.root.file_id))? + )) + } + out.push("}".to_string()); + + Some(out.join("\n")) + } +} + +fn dfs( + node: &Node, + adj_map: &FxHashMap>, + visited: &mut FxHashSet, + nodes: &mut Vec, + edges: &mut Vec, +) { + visited.insert(node.clone()); + nodes.push(node.clone()); + + if let Some(neighbors) = adj_map.get(node) { + for neighbor in neighbors { + if !visited.contains(neighbor) { + edges.push(Edge { + source: node.clone(), + target: neighbor.clone(), + }); + dfs(neighbor, adj_map, visited, nodes, edges); + } + } + } +} + +static COLORS: [&str; 88] = [ + "aliceblue", + "antiquewhite", + "aqua", + "aquamarine", + "azure", + "beige", + "bisque", + "black", + "blanchedalmond", + "blue", + "blueviolet", + "brown", + "burlywood", + "cadetblue", + "chartreuse", + "chocolate", + "coral", + "cornflowerblue", + "cornsilk", + "crimson", + "cyan", + "darkblue", + "darkcyan", + "darkgoldenrod", + "darkgray", + "darkgreen", + "darkkhaki", + "darkmagenta", + "darkolivegreen", + "darkorange", + "darkorchid", + "darkred", + "darksalmon", + "darkseagreen", + "darkslateblue", + "darkslategray", + "darkturquoise", + "darkviolet", + "deeppink", + "deepskyblue", + "dimgray", + "dodgerblue", + "firebrick", + "floralwhite", + "forestgreen", + "fuchsia", + "gainsboro", + "ghostwhite", + "gold", + "goldenrod", + "gray", + "green", + "greenyellow", + "honeydew", + "hotpink", + "indianred", + "indigo", + "ivory", + "khaki", + "lavender", + "lavenderblush", + "lawngreen", + "lemonchiffon", + "lightblue", + "lightcoral", + "lightcyan", + "lightgoldenrodyellow", + "lightgray", + "lightgreen", + "lightpink", + "lightsalmon", + "lightseagreen", + "lightskyblue", + "lightslategray", + "lightsteelblue", + "lightyellow", + "lime", + "limegreen", + "linen", + "magenta", + "maroon", + "mediumaquamarine", + "mediumblue", + "mediumorchid", + "mediumpurple", + "mediumseagreen", + "mediumslateblue", + "mediumspringgreen", +]; diff --git a/crates/store/src/include.rs b/crates/store/src/include.rs index 54c554876..e4feb581c 100644 --- a/crates/store/src/include.rs +++ b/crates/store/src/include.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use lsp_types::{Range, Url}; use parking_lot::RwLock; use semantic_analyzer::Token; -use syntax::{include_item::IncludeItem, SPItem}; +use syntax::{include_item::IncludeItem, FileId, SPItem}; use crate::{document::Document, Store}; @@ -33,12 +33,12 @@ impl Store { /// * `include_text` - Text of the include such as `"file.sp"` or ``. /// * `documents` - Set of known documents. /// * `document_uri` - Uri of the document where the include declaration is parsed from. - pub(crate) fn resolve_import( - &mut self, + pub fn resolve_import( + &self, include_text: &mut String, document_uri: &Arc, quoted: bool, - ) -> Option { + ) -> Option { // Add the extension to the file if needed. let include_text = add_include_extension(include_text, self.environment.amxxpawn_mode); @@ -46,31 +46,10 @@ impl Store { // Search for the relative path. let document_path = document_uri.to_file_path().ok()?; let parent_path = document_path.parent()?; - let mut include_file_path = parent_path.join(include_text); - let mut uri = Url::from_file_path(&include_file_path).ok()?; - if self.documents.contains_key(&uri) { - return Some(uri); - } - if let Ok(Some(main_path_uri)) = self.environment.options.get_main_path_uri() { - let main_path = main_path_uri.to_file_path().ok()?; - let main_path_parent = main_path.parent()?; - if parent_path != main_path_parent { - // Don't look for includes in the include folder if we are not at the root - // of the project. - return None; - } - include_file_path = main_path_parent.join("include").join(include_text); - log::trace!( - "Looking for {:#?} in {:#?}", - include_text, - include_file_path - ); - - uri = Url::from_file_path(&include_file_path).ok()?; - if self.documents.contains_key(&uri) { - return Some(uri); - } - return None; + let include_file_path = parent_path.join(include_text); + let uri = Url::from_file_path(&include_file_path).ok()?; + if self.contains_uri(&uri) { + return self.path_interner.get(&uri); } } @@ -93,41 +72,48 @@ impl Store { ); } let uri = Url::from_file_path(&include_file_path).ok()?; - if self.documents.contains_key(&uri) { - return Some(uri); + if self.contains_uri(&uri) { + return self.path_interner.get(&uri); } // Look for the includes in the include directories. for include_directory in self.environment.options.includes_directories.iter() { let path = include_directory.clone().join(include_text); let uri = Url::from_file_path(path).ok()?; - if self.documents.contains_key(&uri) { - return Some(uri); + if self.contains_uri(&uri) { + return self.path_interner.get(&uri); } } None } -} - -pub fn add_include(document: &mut Document, include_uri: Url, path: String, range: Range) { - document.includes.insert( - include_uri.clone(), - Token { - text: path.clone(), - range, - }, - ); - let include_uri = Arc::new(include_uri); + pub fn add_include( + &self, + document: &mut Document, + include_id: FileId, + path: String, + range: Range, + ) { + let include_uri = Arc::new(self.path_interner.lookup(include_id).clone()); + document.includes.insert( + include_id, + Token { + text: path.clone(), + range, + }, + ); - let include_item = IncludeItem { - name: path, - range, - v_range: document.build_v_range(&range), - uri: document.uri.clone(), - include_uri, - }; - let include_item = Arc::new(RwLock::new(SPItem::Include(include_item))); - document.sp_items.push(include_item); + let include_item = IncludeItem { + name: path, + range, + v_range: document.build_v_range(&range), + uri: document.uri.clone(), + file_id: document.file_id, + include_uri, + include_id, + }; + let include_item = Arc::new(RwLock::new(SPItem::Include(include_item))); + document.sp_items.push(include_item); + } } diff --git a/crates/store/src/lib.rs b/crates/store/src/lib.rs index 06373a379..bbe69cb62 100644 --- a/crates/store/src/lib.rs +++ b/crates/store/src/lib.rs @@ -1,11 +1,12 @@ -use ::syntax::SPItem; +use ::syntax::{FileId, SPItem}; use anyhow::anyhow; use fxhash::{FxHashMap, FxHashSet}; -use include::add_include; +use graph::Graph; use linter::DiagnosticsManager; use lsp_types::{Range, Url}; use parking_lot::RwLock; use parser::Parser; +use path_interner::PathInterner; use preprocessor::{Macro, SourcepawnPreprocessor}; use semantic_analyzer::{purge_references, Token}; use std::{ @@ -18,18 +19,22 @@ use walkdir::WalkDir; pub mod document; pub mod environment; +pub mod graph; pub mod include; pub mod main_heuristic; pub mod options; +mod path_interner; mod semantics; pub mod syntax; use crate::{document::Document, environment::Environment}; -#[derive(Clone, Default)] +#[derive(Debug, Clone, Default)] pub struct Store { /// Any documents the server has handled, indexed by their URL. - pub documents: FxHashMap, Document>, + pub documents: FxHashMap, + + pub path_interner: PathInterner, pub environment: Environment, @@ -39,6 +44,10 @@ pub struct Store { pub watcher: Option>>, pub diagnostics: DiagnosticsManager, + + pub folders: Vec, + + pub projects: Graph, } impl Store { @@ -54,35 +63,74 @@ impl Store { self.documents.values().cloned() } - pub fn get(&self, uri: &Url) -> Option { - self.documents.get(uri).cloned() + /// Returns the [MainPath](PathBuf) of a project given a the [FileId](FileId) of a file in the project. + pub fn get_project_main_path_from_id(&self, file_id: &FileId) -> Option { + if let Some(node) = self.projects.find_root_from_id(*file_id) { + return self.path_interner.lookup(node.file_id).to_file_path().ok(); + } + + None + } + + pub fn contains_uri(&self, uri: &Url) -> bool { + let Some(file_id) = self.path_interner.get(uri) else { + return false; + }; + self.documents.contains_key(&file_id) + } + + pub fn get_from_uri(&self, uri: &Url) -> Option<&Document> { + self.documents.get(&self.path_interner.get(uri)?) } - pub fn get_text(&self, uri: &Url) -> Option { - if let Some(document) = self.documents.get(uri) { + pub fn get_cloned_from_uri(&self, uri: &Url) -> Option { + self.documents.get(&self.path_interner.get(uri)?).cloned() + } + + pub fn get_cloned(&self, file_id: &FileId) -> Option { + self.documents.get(file_id).cloned() + } + + pub fn get_text(&self, file_id: &FileId) -> Option { + if let Some(document) = self.documents.get(file_id) { return Some(document.text.clone()); } None } + pub fn folders(&self) -> Vec { + let mut res = self.folders.clone(); + res.extend( + self.environment + .options + .includes_directories + .iter() + .cloned(), + ); + + res + } + pub fn remove(&mut self, uri: &Url, parser: &mut tree_sitter::Parser) { + let Some(file_id) = self.path_interner.get(uri) else { + return; + }; // Open the document as empty to delete the references. let _ = self.handle_open_document(&Arc::new((*uri).clone()), "".to_string(), parser); - self.documents.remove(uri); - let uri_arc = Arc::new(uri.clone()); + self.documents.remove(&file_id); for document in self.documents.values_mut() { - if let Some(include) = document.includes.get(uri) { + if let Some(include) = document.includes.get(&file_id) { // Consider the include to be missing. document .missing_includes .insert(include.text.clone(), include.range); } - document.includes.remove(uri); + document.includes.remove(&file_id); let mut sp_items = vec![]; // Purge references to the deleted file. for item in document.sp_items.iter() { - purge_references(item, &uri_arc); + purge_references(item, file_id); // Delete Include items. match &*item.read() { SPItem::Include(include_item) => { @@ -95,6 +143,8 @@ impl Store { } document.sp_items = sp_items; } + + self.remove_file_from_projects(&file_id); } pub fn register_watcher(&mut self, watcher: notify::RecommendedWatcher) { @@ -106,24 +156,26 @@ impl Store { path: PathBuf, parser: &mut tree_sitter::Parser, ) -> anyhow::Result> { - let uri = Arc::new(Url::from_file_path(&path).map_err(|err| { + let mut uri = Url::from_file_path(&path).map_err(|err| { anyhow!( "Failed to convert path to URI while loading a file: {:?}", err ) - })?); - - if let Some(document) = self.get(&uri) { - return Ok(Some(document)); - } - + })?; + normalize_uri(&mut uri); if !self.is_sourcepawn_file(&path) { return Ok(None); } + let file_id = self.path_interner.intern(uri.clone()); + if let Some(document) = self.get_cloned(&file_id) { + return Ok(Some(document)); + } + + self.add_file_to_projects(&file_id)?; let data = fs::read(&path)?; let text = String::from_utf8_lossy(&data).into_owned(); - let document = self.handle_open_document(&uri, text, parser)?; + let document = self.handle_open_document(&Arc::new(uri), text, parser)?; self.resolve_missing_includes(parser); Ok(Some(document)) @@ -134,44 +186,50 @@ impl Store { path: PathBuf, parser: &mut tree_sitter::Parser, ) -> anyhow::Result> { - let uri = Arc::new(Url::from_file_path(&path).map_err(|err| { + let mut uri = Url::from_file_path(&path).map_err(|err| { anyhow!( - "Failed to convert path to URI while reloading a file: {:?}", + "Failed to convert path to URI while loading a file: {:?}", err ) - })?); - + })?; + normalize_uri(&mut uri); if !self.is_sourcepawn_file(&path) { return Ok(None); } + let file_id = self.path_interner.intern(uri.clone()); let data = fs::read(&path)?; let text = String::from_utf8_lossy(&data).into_owned(); - let document = self.handle_open_document(&uri, text, parser)?; + let document = self.handle_open_document(&Arc::new(uri), text, parser)?; self.resolve_missing_includes(parser); + self.remove_file_from_projects(&file_id); + self.add_file_to_projects(&file_id)?; + Ok(Some(document)) } - fn resolve_missing_includes(&mut self, parser: &mut tree_sitter::Parser) { + pub fn resolve_missing_includes(&mut self, parser: &mut tree_sitter::Parser) { let mut to_reload = FxHashSet::default(); for document in self.documents.values() { for missing_include in document.missing_includes.keys() { - for uri in self.documents.keys() { - if uri.as_str().contains(missing_include) { - to_reload.insert(document.uri.clone()); + for document_ in self.documents.values() { + if document_.uri.as_str().contains(missing_include) { + to_reload.insert(document.file_id); } } } } - for uri in to_reload { - if let Some(document) = self.documents.get(&uri) { - let _ = self.handle_open_document(&uri, document.text.clone(), parser); + for file_id in to_reload { + if let Some(document) = self.documents.get(&file_id) { + let _ = + self.handle_open_document(&document.uri.clone(), document.text.clone(), parser); } } } - pub fn find_documents(&mut self, base_path: &PathBuf) { + pub fn discover_documents(&mut self, base_path: &PathBuf) { + eprintln!("Finding documents in {:?}", base_path); for entry in WalkDir::new(base_path) .follow_links(true) .into_iter() @@ -187,15 +245,16 @@ impl Store { if let Ok(mut uri) = Url::from_file_path(entry.path()) { log::debug!("URI: {:?} path: {:?}", uri, entry.path()); normalize_uri(&mut uri); - if self.documents.contains_key(&uri) { + let file_id = self.path_interner.intern(uri.clone()); + if self.documents.contains_key(&file_id) { continue; } let Ok(text) = read_to_string_lossy(entry.path().to_path_buf()) else { - log::error!("Failed to read file {:?} ", entry.path()); - continue; - }; - let document = Document::new(Arc::new(uri.clone()), text.clone()); - self.documents.insert(Arc::new(uri), document); + log::error!("Failed to read file {:?} ", entry.path()); + continue; + }; + let document = Document::new(Arc::new(uri.clone()), file_id, text.clone()); + self.documents.insert(file_id, document); } } } @@ -207,19 +266,20 @@ impl Store { parser: &mut tree_sitter::Parser, ) -> Result { log::trace!("Opening file {:?}", uri); + let file_id = self.path_interner.intern(uri.as_ref().clone()); self.diagnostics.reset(uri); - let prev_declarations = match self.documents.get(&(*uri).clone()) { + let prev_declarations = match self.documents.get(&file_id) { Some(document) => document.declarations.clone(), None => FxHashMap::default(), }; - let mut document = Document::new(uri.clone(), text); + let mut document = Document::new(uri.clone(), file_id, text); self.preprocess_document(&mut document); self.add_sourcemod_include(&mut document); self.parse(&mut document, parser) .expect("Couldn't parse document"); if !self.first_parse { // Don't try to find references yet, all the tokens might not be referenced. - self.find_references(uri); + self.resolve_file_references(&file_id); self.sync_references(&mut document, prev_declarations); } log::trace!("Done opening file {:?}", uri); @@ -229,8 +289,8 @@ impl Store { fn add_sourcemod_include(&mut self, document: &mut Document) { let mut sourcemod_path = "sourcemod".to_string(); - if let Some(uri) = self.resolve_import(&mut sourcemod_path, &document.uri, false) { - add_include(document, uri, sourcemod_path, Range::default()); + if let Some(include_id) = self.resolve_import(&mut sourcemod_path, &document.uri, false) { + self.add_include(document, include_id, sourcemod_path, Range::default()); } } @@ -248,31 +308,31 @@ impl Store { for sub_doc in self.documents.values() { for item in added_declarations.values() { if sub_doc.unresolved_tokens.contains(&item.read().name()) { - to_reload.push(sub_doc.uri.clone()); + to_reload.push(sub_doc.file_id); break; } } } - for uri_to_reload in to_reload.iter() { + for file_id in to_reload { // resolve includes - if let Some(doc_to_reload) = self.documents.get_mut(uri_to_reload) { + if let Some(doc_to_reload) = self.documents.get_mut(&file_id) { for (mut missing_inc_path, range) in doc_to_reload.missing_includes.clone() { - // FIXME: The false in this method call may be problematic. + // TODO: The false in this method call may be problematic. if let Some(include_uri) = self.resolve_import(&mut missing_inc_path, &document.uri, false) { - add_include(document, include_uri, missing_inc_path, range); + self.add_include(document, include_uri, missing_inc_path, range); } } } - self.find_references(uri_to_reload); + self.resolve_file_references(&file_id); } for item in deleted_declarations.values() { let item = item.read(); let references = item.references(); if let Some(references) = references { for ref_ in references.iter() { - if let Some(ref_document) = self.documents.get_mut(&ref_.uri) { + if let Some(ref_document) = self.documents.get_mut(&ref_.file_id) { ref_document.unresolved_tokens.insert(item.name()); } } @@ -325,23 +385,24 @@ impl Store { Some(preprocessor.macros) } - pub(crate) fn preprocess_document_by_uri( + pub(crate) fn preprocess_document_by_id( &mut self, - uri: Arc, + file_id: &FileId, ) -> Option> { - log::trace!("Preprocessing document by uri {:?}", uri); - if let Some(document) = self.documents.get(&uri) { + let document_uri = Arc::new(self.path_interner.lookup(*file_id).clone()); + log::trace!("Preprocessing document by uri {:?}", document_uri); + if let Some(document) = self.documents.get(file_id) { // Don't reprocess the text if it has not changed. if !document.preprocessed_text.is_empty() || document.being_preprocessed { - log::trace!("Skipped preprocessing document by uri {:?}", uri); + log::trace!("Skipped preprocessing document by uri {:?}", document_uri); return Some(document.macros.clone()); } } - if let Some(document) = self.documents.get_mut(&uri) { + if let Some(document) = self.documents.get_mut(file_id) { document.being_preprocessed = true; } - if let Some(text) = self.get_text(&uri) { - let mut preprocessor = SourcepawnPreprocessor::new(uri.clone(), &text); + if let Some(text) = self.get_text(file_id) { + let mut preprocessor = SourcepawnPreprocessor::new(document_uri, &text); let preprocessed_text = preprocessor .preprocess_input( &mut (|macros: &mut FxHashMap, @@ -356,7 +417,7 @@ impl Store { text.clone() }); - if let Some(document) = self.documents.get_mut(&uri) { + if let Some(document) = self.documents.get_mut(file_id) { document.preprocessed_text = preprocessed_text; document.macros = preprocessor.macros.clone(); preprocessor.add_diagnostics( @@ -373,10 +434,10 @@ impl Store { } return Some(preprocessor.macros); } - if let Some(document) = self.documents.get_mut(&uri) { + if let Some(document) = self.documents.get_mut(file_id) { document.being_preprocessed = false; } - log::trace!("Done preprocessing document by uri {:?}", uri); + log::trace!("Done preprocessing document by uri {:?}", document_uri); None } @@ -388,10 +449,10 @@ impl Store { document_uri: &Url, quoted: bool, ) -> anyhow::Result<()> { - if let Some(include_uri) = + if let Some(file_id) = self.resolve_import(&mut include_text, &Arc::new(document_uri.clone()), quoted) { - if let Some(include_macros) = self.preprocess_document_by_uri(Arc::new(include_uri)) { + if let Some(include_macros) = self.preprocess_document_by_id(&file_id) { macros.extend(include_macros); } return Ok(()); @@ -422,6 +483,7 @@ impl Store { offsets: &document.offsets, source: &document.preprocessed_text, uri: document.uri.clone(), + file_id: document.file_id, }; let mut cursor = root_node.walk(); @@ -453,7 +515,9 @@ impl Store { "enum_struct" => walker.parse_enum_struct(&mut node), "comment" => { walker.push_comment(node); - let Some(item) = walker.sp_items.pop() else {continue;}; + let Some(item) = walker.sp_items.pop() else { + continue; + }; walker.push_inline_comment(&item); walker.sp_items.push(item); Ok(()) @@ -472,8 +536,7 @@ impl Store { root_node, self.environment.options.disable_syntax_linter, ); - self.documents - .insert(document.uri.clone(), document.clone()); + self.documents.insert(document.file_id, document.clone()); self.read_unscanned_imports(&document.includes, parser); log::trace!("Done parsing document {:?}", document.uri); @@ -488,7 +551,7 @@ impl Store { for mut include in includes_to_add { match self.resolve_import(&mut include.path, &document.uri, include.quoted) { Some(uri) => { - add_include(document, uri, include.path, include.range); + self.add_include(document, uri, include.path, include.range); } None => { document @@ -501,11 +564,13 @@ impl Store { pub(crate) fn read_unscanned_imports( &mut self, - includes: &FxHashMap, + includes: &FxHashMap, parser: &mut tree_sitter::Parser, ) { for include_uri in includes.keys() { - let document = self.get(include_uri).expect("Include does not exist."); + let document = self + .get_cloned(include_uri) + .expect("Include does not exist."); if document.parsed { continue; } @@ -516,30 +581,42 @@ impl Store { } } - pub fn find_all_references(&mut self) { - let uris: Vec = - if let Ok(Some(main_path_uri)) = self.environment.options.get_main_path_uri() { - let mut includes = FxHashSet::default(); - includes.insert(main_path_uri.clone()); - if let Some(document) = self.documents.get(&main_path_uri) { - self.get_included_files(document, &mut includes); - includes.iter().map(|uri| (*uri).clone()).collect() - } else { - self.documents.values().map(|doc| doc.uri()).collect() + /// Resolve all the references in a project, given the [file_id](FileId) of a file in the project. + /// Will not run if the main file has already been resolved at least once. + /// Returns [None] if it did not run and [Some(file_id)] if it did, with [file_id](FileId) being the id of the + /// main file. + /// + /// # Arguments + /// * `uri` - The [uri](Url) of a file in the project. Does not have to be the root. + pub fn resolve_project_references(&mut self, uri: &Url) -> Option { + let file_id = self.path_interner.get(uri)?; + let main_id = self.projects.find_root_from_id(file_id)?.file_id; + let file_ids: Vec = { + let mut includes = FxHashSet::default(); + includes.insert(main_id); + if let Some(document) = self.documents.get(&main_id) { + if document.is_resolved() { + // Main file has already been resolved, assume the rest of the project has been too. + return None; } + self.get_included_files(document, &mut includes); + includes.iter().cloned().collect() } else { - self.documents.values().map(|doc| doc.uri()).collect() - }; - uris.iter().for_each(|uri| { - self.find_references(uri); + self.documents.values().map(|doc| doc.file_id).collect() + } + }; + file_ids.iter().for_each(|file_id: &FileId| { + self.resolve_file_references(file_id); }); + + Some(main_id) } pub fn get_all_files_in_folder(&self, folder_uri: &Url) -> Vec { let mut children = vec![]; - for uri in self.documents.keys() { - if uri.as_str().contains(folder_uri.as_str()) { - children.push((**uri).clone()); + for document in self.documents.values() { + if document.uri.as_str().contains(folder_uri.as_str()) { + children.push(document.uri.as_ref().clone()); } } diff --git a/crates/store/src/main_heuristic.rs b/crates/store/src/main_heuristic.rs index 62b614bf4..f90941f3b 100644 --- a/crates/store/src/main_heuristic.rs +++ b/crates/store/src/main_heuristic.rs @@ -1,6 +1,9 @@ use lsp_types::Url; -use crate::{document::Document, Store}; +use crate::{ + document::{Document, FileExtension}, + Store, +}; impl Store { /// Check if a document is a potential main file. @@ -23,7 +26,7 @@ impl Store { return None; } } - if document.extension().ok()? == "sp" && document.text.contains("OnPluginStart()") { + if document.extension() == FileExtension::Sp && document.text.contains("OnPluginStart()") { return Some(document.uri()); } diff --git a/crates/store/src/options.rs b/crates/store/src/options.rs index c0abb3a91..308d65a60 100644 --- a/crates/store/src/options.rs +++ b/crates/store/src/options.rs @@ -1,15 +1,11 @@ -use lsp_types::Url; use serde::{Deserialize, Serialize}; use std::path::PathBuf; -use crate::normalize_uri; - #[derive(Debug, Clone, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[serde(default)] pub struct Options { pub includes_directories: Vec, - pub main_path: PathBuf, pub spcomp_path: PathBuf, pub linter_arguments: Vec, pub disable_syntax_linter: bool, @@ -17,7 +13,10 @@ pub struct Options { impl Options { /// Return all possible include folder paths. - pub fn get_all_possible_include_folders(&self) -> Vec { + /// + /// # Arguments + /// * `main_path` - [Path](PathBuf) of the main file. + pub fn get_all_possible_include_folders(&self, main_path: PathBuf) -> Vec { let mut res: Vec = vec![]; for path in self.includes_directories.iter() { if path.exists() { @@ -25,7 +24,7 @@ impl Options { } } - if let Some(scripting_folder) = self.main_path.parent() { + if let Some(scripting_folder) = main_path.parent() { if scripting_folder.exists() { res.push(scripting_folder.to_path_buf()); } @@ -38,28 +37,7 @@ impl Options { res } - /// Return the [uri](lsp_types::Url) main path. [None] if it is empty. [Err] otherwise. - pub fn get_main_path_uri(&self) -> anyhow::Result> { - if let Some(main_path_str) = self.main_path.to_str() { - if main_path_str.is_empty() { - return Ok(None); - } - } - if !self.main_path.exists() || !self.main_path.is_file() { - return Err(anyhow::anyhow!("Main path does not exist.")); - } - let main_uri = Url::from_file_path(&self.main_path); - if let Ok(mut main_uri) = main_uri { - normalize_uri(&mut main_uri); - return Ok(Some(main_uri)); - } - - Err(anyhow::anyhow!( - "Main path could not be converted to a Uri." - )) - } - - /// Returns true if the given path is a parent or one of the IncludeDirectories. + /// Returns true if the given path is a parent or one of the IncludesDirectories. /// /// # Arguments /// diff --git a/crates/store/src/path_interner.rs b/crates/store/src/path_interner.rs new file mode 100644 index 000000000..c7b077b3c --- /dev/null +++ b/crates/store/src/path_interner.rs @@ -0,0 +1,45 @@ +//! Maps [uris](Url) to compact integer ids. We don't care about clearings uris which +//! no longer exist -- the assumption is total size of uris we ever look at is +//! not too big. + +use fxhash::FxHasher; +use lsp_types::Url; +use syntax::FileId; + +use std::hash::BuildHasherDefault; + +use indexmap::IndexSet; + +/// Structure to map between [`VfsPath`] and [`FileId`]. +#[derive(Default, Debug, Clone)] +pub struct PathInterner { + map: IndexSet>, +} + +impl PathInterner { + /// Get the id corresponding to `path`. + /// + /// If `path` does not exists in `self`, returns [`None`]. + pub fn get(&self, uri: &Url) -> Option { + self.map.get_index_of(uri).map(|i| FileId(i as u32)) + } + + /// Insert `path` in `self`. + /// + /// - If `path` already exists in `self`, returns its associated id; + /// - Else, returns a newly allocated id. + pub fn intern(&mut self, uri: Url) -> FileId { + let (id, _added) = self.map.insert_full(uri); + assert!(id < u32::MAX as usize); + FileId(id as u32) + } + + /// Returns the path corresponding to `id`. + /// + /// # Panics + /// + /// Panics if `id` does not exists in `self`. + pub fn lookup(&self, id: FileId) -> &Url { + self.map.get_index(id.0 as usize).unwrap() + } +} diff --git a/crates/store/src/semantics.rs b/crates/store/src/semantics.rs index e9639b313..cbe2c4b05 100644 --- a/crates/store/src/semantics.rs +++ b/crates/store/src/semantics.rs @@ -1,26 +1,39 @@ -use lsp_types::Url; -use semantic_analyzer::find_references; +use semantic_analyzer::resolve_references; +use syntax::FileId; use crate::Store; impl Store { - pub fn find_references(&mut self, uri: &Url) { - log::trace!("Resolving references for document {:?}", uri); - if !self.documents.contains_key(uri) { - log::trace!("Skipped resolving references for document {:?}", uri); + /// Resolve all references in a document given by its [file_id](FileId). + /// + /// # Arguments + /// * `file_id` - The [file_id](FileId) of the document to resolve. + pub fn resolve_file_references(&mut self, file_id: &FileId) { + log::trace!( + "Resolving references for document {:?}", + self.path_interner.lookup(*file_id) + ); + if !self.documents.contains_key(file_id) { + log::trace!( + "Skipped resolving references for document {:?}", + self.path_interner.lookup(*file_id) + ); return; } - let all_items = self.get_all_items(false); - let Some(document) = self.documents.get_mut(uri) else{return;}; - - if let Some(unresolved_tokens) = find_references( + let all_items = self.get_all_items(file_id, false); + let Some(document) = self.documents.get_mut(file_id) else { + return; + }; + if let Some(unresolved_tokens) = resolve_references( all_items, &document.uri, + document.file_id, &document.preprocessed_text, &mut document.tokens, &mut document.offsets, ) { document.unresolved_tokens = unresolved_tokens; } + document.mark_as_resolved(); } } diff --git a/crates/store/src/syntax.rs b/crates/store/src/syntax.rs index 1e7daf338..26c9ae8ea 100644 --- a/crates/store/src/syntax.rs +++ b/crates/store/src/syntax.rs @@ -2,51 +2,43 @@ use fxhash::FxHashSet; use lsp_types::{Position, Url}; use parking_lot::RwLock; use std::sync::Arc; -use syntax::{range_contains_pos, SPItem}; +use syntax::{range_contains_pos, FileId, SPItem}; use crate::{document::Document, Store}; impl Store { - pub fn get_all_items(&self, flat: bool) -> Vec>> { + pub fn get_all_items(&self, file_id: &FileId, flat: bool) -> Vec>> { log::debug!("Getting all items from store. flat: {}", flat); let mut all_items = vec![]; - if let Ok(Some(main_path_uri)) = self.environment.options.get_main_path_uri() { - let mut includes = FxHashSet::default(); - includes.insert(main_path_uri.clone()); - if let Some(document) = self.documents.get(&main_path_uri) { - self.get_included_files(document, &mut includes); - for include in includes.iter() { - if let Some(document) = self.documents.get(include) { - if flat { - all_items.extend(document.get_sp_items_flat()); - } else { - all_items.extend(document.get_sp_items()) - } + let Some(main_node) = self.projects.find_root_from_id(*file_id) else { + return all_items; + }; + let main_file_id = main_node.file_id; + let mut includes = FxHashSet::default(); + includes.insert(main_file_id); + if let Some(document) = self.documents.get(&main_file_id) { + self.get_included_files(document, &mut includes); + for include in includes.iter() { + if let Some(document) = self.documents.get(include) { + if flat { + all_items.extend(document.get_sp_items_flat()); + } else { + all_items.extend(document.get_sp_items()) } } } - log::trace!("Done getting {} item(s)", all_items.len()); - return all_items; - } - for document in self.documents.values() { - for item in document.sp_items.iter() { - all_items.push(item.clone()); - } } + log::trace!("Done getting {} item(s)", all_items.len()); - log::trace!( - "Done getting {} item(s) without the main path.", - all_items.len() - ); all_items } - pub(crate) fn get_included_files(&self, document: &Document, includes: &mut FxHashSet) { + pub(crate) fn get_included_files(&self, document: &Document, includes: &mut FxHashSet) { for include_uri in document.includes.keys() { if includes.contains(include_uri) { continue; } - includes.insert(include_uri.clone()); + includes.insert(*include_uri); if let Some(include_document) = self.documents.get(include_uri) { self.get_included_files(include_document, includes); } @@ -63,8 +55,11 @@ impl Store { position, uri ); + let Some(file_id) = self.path_interner.get(uri) else { + return vec![]; + }; + let all_items = self.get_all_items(&file_id, true); let uri = Arc::new(uri); - let all_items = self.get_all_items(true); let mut res = vec![]; for item in all_items.iter() { let item_lock = item.read(); @@ -78,7 +73,7 @@ impl Store { Some(references) => { for reference in references.iter() { if range_contains_pos(&reference.v_range, &position) - && (*reference.uri).eq(*uri) + && reference.file_id == file_id { res.push(item.clone()); break; @@ -95,9 +90,9 @@ impl Store { res } - pub fn get_item_from_key(&self, key: String) -> Option>> { + pub fn get_item_from_key(&self, key: String, file_id: FileId) -> Option>> { log::debug!("Getting item from key {:?}.", key); - let all_items = self.get_all_items(false); + let all_items = self.get_all_items(&file_id, false); let sub_keys: Vec<&str> = key.split('-').collect(); if sub_keys.is_empty() { return None; diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index af27b2a59..ac369cfd1 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -20,3 +20,4 @@ tree-sitter.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true +nohash-hasher.workspace = true diff --git a/crates/syntax/src/define_item.rs b/crates/syntax/src/define_item.rs index f192429fa..45fa0db26 100644 --- a/crates/syntax/src/define_item.rs +++ b/crates/syntax/src/define_item.rs @@ -5,7 +5,7 @@ use lsp_types::{ }; use std::sync::Arc; -use crate::{description::Description, uri_to_file_name, Location}; +use crate::{description::Description, uri_to_file_name, FileId, Reference}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn define. @@ -34,8 +34,11 @@ pub struct DefineItem { /// Uri of the file where the define is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the define is declared. + pub file_id: FileId, + /// References to this define. - pub references: Vec, + pub references: Vec, } impl DefineItem { @@ -58,7 +61,7 @@ impl DefineItem { detail: None, description: uri_to_file_name(&self.uri), }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -120,6 +123,10 @@ impl DefineItem { self.name.clone() } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of the define. /// /// # Exemple diff --git a/crates/syntax/src/enum_item.rs b/crates/syntax/src/enum_item.rs index d1298a40e..d60245297 100644 --- a/crates/syntax/src/enum_item.rs +++ b/crates/syntax/src/enum_item.rs @@ -6,7 +6,7 @@ use lsp_types::{ use parking_lot::RwLock; use std::sync::Arc; -use crate::{description::Description, uri_to_file_name, Location, SPItem}; +use crate::{description::Description, uri_to_file_name, FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn enum. @@ -32,8 +32,11 @@ pub struct EnumItem { /// Uri of the file where the enum is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the enum is declared. + pub file_id: FileId, + /// References to this enum. - pub references: Vec, + pub references: Vec, /// Children ([EnumMemberItem](super::enum_member_item::EnumMemberItem)) of this enum. pub children: Vec>>, @@ -64,7 +67,7 @@ impl EnumItem { None }, }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -138,6 +141,10 @@ impl EnumItem { self.name.clone() } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of the enum. /// /// # Exemple diff --git a/crates/syntax/src/enum_member_item.rs b/crates/syntax/src/enum_member_item.rs index d6965c725..67414671e 100644 --- a/crates/syntax/src/enum_member_item.rs +++ b/crates/syntax/src/enum_member_item.rs @@ -8,7 +8,7 @@ use parking_lot::RwLock; use std::sync::{Arc, Weak}; use crate::description::Description; -use crate::{Location, SPItem}; +use crate::{FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn enum member. @@ -31,8 +31,11 @@ pub struct EnumMemberItem { /// Uri of the file where the enum member is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the enum member is declared. + pub file_id: FileId, + /// References to this enum. - pub references: Vec, + pub references: Vec, } impl EnumMemberItem { @@ -66,7 +69,7 @@ impl EnumMemberItem { } }, }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -128,6 +131,10 @@ impl EnumMemberItem { self.name.clone() } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of the enum member. /// /// # Exemple diff --git a/crates/syntax/src/enum_struct_item.rs b/crates/syntax/src/enum_struct_item.rs index 2bf0da87d..927977b62 100644 --- a/crates/syntax/src/enum_struct_item.rs +++ b/crates/syntax/src/enum_struct_item.rs @@ -6,7 +6,7 @@ use lsp_types::{ use parking_lot::RwLock; use std::sync::Arc; -use crate::{description::Description, uri_to_file_name, Location, SPItem}; +use crate::{description::Description, uri_to_file_name, FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn enum struct. @@ -32,8 +32,11 @@ pub struct EnumStructItem { /// Uri of the file where the enum struct is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the enum struct is declared. + pub file_id: FileId, + /// References to this enum struct. - pub references: Vec, + pub references: Vec, /// Children ([FunctionItem](super::function_item::FunctionItem), /// [VariableItem](super::variable_item::VariableItem)) of this enum struct. @@ -64,7 +67,7 @@ impl EnumStructItem { None }, }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }); @@ -137,6 +140,10 @@ impl EnumStructItem { self.name.clone() } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of an [EnumStructItem]. /// /// # Exemple diff --git a/crates/syntax/src/function_item.rs b/crates/syntax/src/function_item.rs index db6b4ff3b..12595e080 100644 --- a/crates/syntax/src/function_item.rs +++ b/crates/syntax/src/function_item.rs @@ -12,7 +12,7 @@ use std::sync::{Arc, Weak}; use crate::description::Description; use crate::parameter::Parameter; -use crate::{uri_to_file_name, Location, SPItem}; +use crate::{uri_to_file_name, FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a first order SourcePawn function, which can be converted to a @@ -42,6 +42,9 @@ pub struct FunctionItem { /// Uri of the file where the function is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the function is declared. + pub file_id: FileId, + /// Full function signature. pub detail: String, @@ -52,7 +55,7 @@ pub struct FunctionItem { pub definition_type: FunctionDefinitionType, /// References to this function. - pub references: Vec, + pub references: Vec, /// Parameters of the function. pub params: Vec>>, @@ -115,7 +118,7 @@ impl FunctionItem { }, }), deprecated: Some(self.is_deprecated()), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }); @@ -261,7 +264,7 @@ impl FunctionItem { })), deprecated: Some(self.is_deprecated()), insert_text_format: Some(InsertTextFormat::SNIPPET), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -384,6 +387,10 @@ impl FunctionItem { } } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Return whether or not the method is a methodmap constructor. pub fn is_ctor(&self) -> bool { if let Some(parent) = &self.parent { diff --git a/crates/syntax/src/include_item.rs b/crates/syntax/src/include_item.rs index 4906bdfdd..89bbb39b4 100644 --- a/crates/syntax/src/include_item.rs +++ b/crates/syntax/src/include_item.rs @@ -5,6 +5,8 @@ use lsp_types::{ LocationLink, MarkedString, Position, Range, Url, }; +use crate::FileId; + #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn include. pub struct IncludeItem { @@ -20,8 +22,14 @@ pub struct IncludeItem { /// Uri of the file where the include is declared. pub uri: Arc, - /// Uri of the file which the include points to. + /// [FileId](FileId) of the file where the include is declared. + pub file_id: FileId, + + /// [Uri](Url) of the file which the include points to. pub include_uri: Arc, + + /// [FileId](FileId) of the file which the include points to. + pub include_id: FileId, } impl IncludeItem { diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 804533604..4a0245f59 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -5,7 +5,7 @@ use lsp_types::{ SignatureInformation, Url, }; use parking_lot::RwLock; -use std::sync::Arc; +use std::{fmt::Display, sync::Arc}; use self::parameter::Parameter; @@ -26,28 +26,41 @@ pub mod typeset_item; pub mod utils; pub mod variable_item; +/// Handle to a file. +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct FileId(pub u32); + +/// safe because `FileId` is a newtype of `u32` +impl nohash_hasher::IsEnabled for FileId {} + +impl From for FileId { + fn from(id: u32) -> Self { + Self(id) + } +} + +impl Display for FileId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + /// Represents a location inside a resource, such as a line inside a text file. #[derive(Debug, Eq, PartialEq, Clone)] -pub struct Location { - // Uri of the location. +pub struct Reference { + /// [FileId](FileId) of the location. + pub file_id: FileId, + + /// [Uri](Url) of the location. pub uri: Arc, - // Range of the location. + /// Range of the location. pub range: Range, - // User visible range of the location. + /// User visible range of the location. pub v_range: Range, } -impl Location { - pub fn to_lsp_location(&self) -> lsp_types::Location { - lsp_types::Location { - uri: self.uri.as_ref().clone(), - range: self.v_range, - } - } -} - #[derive(Debug, Clone)] /// Generic representation of an item, which can be converted to a /// [CompletionItem](lsp_types::CompletionItem), [Location](lsp_types::Location), etc. @@ -210,7 +223,23 @@ impl SPItem { } } - pub fn references(&self) -> Option<&Vec> { + pub fn file_id(&self) -> FileId { + match self { + SPItem::Variable(item) => item.file_id, + SPItem::Function(item) => item.file_id, + SPItem::Enum(item) => item.file_id, + SPItem::EnumMember(item) => item.file_id, + SPItem::EnumStruct(item) => item.file_id, + SPItem::Define(item) => item.file_id, + SPItem::Methodmap(item) => item.file_id, + SPItem::Property(item) => item.file_id, + SPItem::Typedef(item) => item.file_id, + SPItem::Typeset(item) => item.file_id, + SPItem::Include(item) => item.file_id, + } + } + + pub fn references(&self) -> Option<&Vec> { match self { SPItem::Variable(item) => Some(&item.references), SPItem::Function(item) => Some(&item.references), @@ -237,8 +266,10 @@ impl SPItem { } } - pub fn push_reference(&mut self, reference: Location) { - if range_equals_range(&self.range(), &reference.range) && self.uri().eq(&reference.uri) { + pub fn push_reference(&mut self, reference: Reference) { + if range_equals_range(&self.range(), &reference.range) + && self.file_id() == reference.file_id + { return; } match self { @@ -267,7 +298,7 @@ impl SPItem { } } - pub fn set_new_references(&mut self, references: Vec) { + pub fn set_new_references(&mut self, references: Vec) { match self { SPItem::Variable(item) => item.references = references, SPItem::Function(item) => item.references = references, diff --git a/crates/syntax/src/methodmap_item.rs b/crates/syntax/src/methodmap_item.rs index e04cabb13..561c40320 100644 --- a/crates/syntax/src/methodmap_item.rs +++ b/crates/syntax/src/methodmap_item.rs @@ -7,7 +7,7 @@ use parking_lot::RwLock; use std::sync::Arc; use crate::description::Description; -use crate::{uri_to_file_name, Location, SPItem}; +use crate::{uri_to_file_name, FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn methodmap. @@ -39,8 +39,11 @@ pub struct MethodmapItem { /// Uri of the file where the methodmap is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the methodmap is declared. + pub file_id: FileId, + /// References to this methodmap. - pub references: Vec, + pub references: Vec, /// Children ([FunctionItem](super::function_item::FunctionItem), /// [PropertyItem](super::property_item::PropertyItem)) of this methodmap. @@ -72,7 +75,7 @@ impl MethodmapItem { None }, }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }); @@ -145,6 +148,10 @@ impl MethodmapItem { self.name.clone() } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Returns the constructor of the methodmap if it exists. pub fn ctor(&self) -> Option>> { self.children diff --git a/crates/syntax/src/property_item.rs b/crates/syntax/src/property_item.rs index f8b307de6..8572ce84c 100644 --- a/crates/syntax/src/property_item.rs +++ b/crates/syntax/src/property_item.rs @@ -7,7 +7,7 @@ use parking_lot::RwLock; use std::sync::{Arc, Weak}; use crate::description::Description; -use crate::{Location, SPItem}; +use crate::{FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn property, which can be converted to a @@ -40,8 +40,11 @@ pub struct PropertyItem { /// Uri of the file where the property is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the property is declared. + pub file_id: FileId, + /// References to this property. - pub references: Vec, + pub references: Vec, } impl PropertyItem { @@ -83,7 +86,7 @@ impl PropertyItem { )), }), deprecated: Some(self.is_deprecated()), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -149,6 +152,10 @@ impl PropertyItem { ) } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of a [PropertyItem]. /// /// # Exemple diff --git a/crates/syntax/src/typedef_item.rs b/crates/syntax/src/typedef_item.rs index 044695b23..bd05d2fbe 100644 --- a/crates/syntax/src/typedef_item.rs +++ b/crates/syntax/src/typedef_item.rs @@ -9,7 +9,7 @@ use std::sync::{Arc, Weak}; use crate::description::Description; use crate::parameter::Parameter; -use crate::{uri_to_file_name, Location, SPItem}; +use crate::{uri_to_file_name, FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn typedef/functag, which can be converted to a @@ -39,11 +39,14 @@ pub struct TypedefItem { /// Uri of the file where the typedef is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the typedef is declared. + pub file_id: FileId, + /// Full typedef text. pub detail: String, /// References to this typedef. - pub references: Vec, + pub references: Vec, /// Parameters of the typedef. pub params: Vec>>, @@ -81,7 +84,7 @@ impl TypedefItem { }, }), deprecated: Some(self.is_deprecated()), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -190,7 +193,7 @@ impl TypedefItem { })), deprecated: Some(self.is_deprecated()), insert_text_format: Some(InsertTextFormat::SNIPPET), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -203,6 +206,10 @@ impl TypedefItem { } } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of a [TypedefItem]. /// /// # Exemple diff --git a/crates/syntax/src/typeset_item.rs b/crates/syntax/src/typeset_item.rs index 2096230ac..7e12e0a44 100644 --- a/crates/syntax/src/typeset_item.rs +++ b/crates/syntax/src/typeset_item.rs @@ -7,7 +7,7 @@ use parking_lot::RwLock; use std::sync::Arc; use crate::description::Description; -use crate::{uri_to_file_name, Location, SPItem}; +use crate::{uri_to_file_name, FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn typeset/funcenum, which can be converted to a @@ -34,8 +34,11 @@ pub struct TypesetItem { /// Uri of the file where the typeset is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the typeset is declared. + pub file_id: FileId, + /// References to this typeset. - pub references: Vec, + pub references: Vec, /// Parameters of the typeset. pub children: Vec>>, @@ -70,7 +73,7 @@ impl TypesetItem { }, }), deprecated: Some(self.is_deprecated()), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -156,6 +159,10 @@ impl TypesetItem { self.name.clone() } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of a [TypesetItem]. /// /// # Exemple diff --git a/crates/syntax/src/variable_item.rs b/crates/syntax/src/variable_item.rs index d47ada52d..da0e19570 100644 --- a/crates/syntax/src/variable_item.rs +++ b/crates/syntax/src/variable_item.rs @@ -7,7 +7,7 @@ use parking_lot::RwLock; use std::sync::{Arc, Weak}; use crate::description::Description; -use crate::{range_contains_pos, Location, SPItem}; +use crate::{range_contains_pos, FileId, Reference, SPItem}; #[derive(Debug, Clone)] /// SPItem representation of a SourcePawn variable. @@ -30,6 +30,9 @@ pub struct VariableItem { /// Uri of the file where the variable is declared. pub uri: Arc, + /// [FileId](FileId) of the file where the variable is declared. + pub file_id: FileId, + /// Full variable signature. pub detail: String, @@ -40,7 +43,7 @@ pub struct VariableItem { pub storage_class: Vec, /// References to this variable. - pub references: Vec, + pub references: Vec, /// Parent of this variable, if it is not global. pub parent: Option>>, @@ -85,7 +88,7 @@ impl VariableItem { detail: Some(self.type_.clone()), description: Some("local".to_string()), }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -102,7 +105,7 @@ impl VariableItem { detail: Some(self.type_.clone()), description: Some(format!("{}::{}", parent.name, self.name)), }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }) } @@ -119,7 +122,7 @@ impl VariableItem { detail: Some(self.type_.clone()), description: Some("global".to_string()), }), - data: Some(serde_json::Value::String(self.key())), + data: Some(serde_json::Value::String(self.completion_data())), ..Default::default() }), } @@ -191,6 +194,10 @@ impl VariableItem { } } + pub fn completion_data(&self) -> String { + format!("{}${}", self.key(), self.file_id) + } + /// Formatted representation of a [VariableItem]. /// /// # Exemple diff --git a/editors/code/CHANGELOG.md b/editors/code/CHANGELOG.md index 0f37252df..82e3a09c3 100644 --- a/editors/code/CHANGELOG.md +++ b/editors/code/CHANGELOG.md @@ -1,5 +1,16 @@ ## Release Notes +## [6.0.0] + +### Added + +- Added automatic mainPath detection. The mainPath setting does not exist anymore! +- Added support for Apple Silicon. + +### Chore + +- LSP version bump. + ## [5.5.5] ### Chore diff --git a/editors/code/package.json b/editors/code/package.json index 1c4f1f48d..975e28f95 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -2,7 +2,7 @@ "name": "sourcepawn-vscode", "displayName": "SourcePawn", "description": "SourcePawn highlighting, autocompletion and much more", - "version": "5.5.5", + "version": "6.0.0", "publisher": "Sarrus", "main": "./dist/spIndex.js", "icon": "sm.png", @@ -46,7 +46,6 @@ "onCommand:sourcepawn-vscode.uploadToServer", "onCommand:sourcepawn-vscode.refreshPlugins", "onCommand:sourcepawn-vscode.insertParameters", - "onCommand:sourcepawn-vscode.setFileAsMain", "onCommand:sourcepawn-vscode.createChangelog", "onCommand:sourcepawn-vscode.changeSMApi", "onCommand:sourcepawn-vscode.startServer" @@ -107,11 +106,6 @@ "title": "Insert Function Parameters", "category": "SM" }, - { - "command": "sourcepawn-vscode.setFileAsMain", - "title": "Set current file as main", - "category": "SM" - }, { "command": "sourcepawn-vscode.installSM", "title": "Install Sourcemod", @@ -142,6 +136,11 @@ "title": "Reveal preprocessed document (as seen by the server)", "category": "sourcepawn-lsp (debug command)" }, + { + "command": "sourcepawn-vscode.projectsGraphviz", + "title": "Generate Graphviz file for projects in the workspace", + "category": "sourcepawn-lsp (debug command)" + }, { "command": "sourcepawn-vscode.installLanguageServer", "title": "Install SourcePawn Language Server" @@ -164,20 +163,12 @@ { "when": "resourceLangId == sourcepawn", "command": "sourcepawn-vscode.compileSM" - }, - { - "when": "resourceLangId == sourcepawn", - "command": "sourcepawn-vscode.setFileAsMain" } ], "explorer/context": [ { "when": "resourceLangId == sourcepawn", "command": "sourcepawn-vscode.compileSM" - }, - { - "when": "resourceLangId == sourcepawn", - "command": "sourcepawn-vscode.setFileAsMain" } ], "editor/title": [ @@ -231,17 +222,6 @@ "description": "Location of the SourceMod compiler (spcomp)", "scope": "resource" }, - "sourcepawn.MainPath": { - "type": "string", - "description": "The location of the main.sp file, which allows the linter and the compiler to work correctly. You should use this setting per workspace.", - "scope": "resource", - "deprecationMessage": "Use `sourcepawn.mainPath` instead." - }, - "SourcePawnLanguageServer.mainPath": { - "type": "string", - "description": "The location of the main.sp file, which allows the linter and the compiler to work correctly. You should use this setting per workspace.", - "scope": "resource" - }, "sourcepawn.MainPathCompilation": { "type": "boolean", "default": true, @@ -280,7 +260,7 @@ "default": [], "description": "Optional additional include folders paths for the compiler and the linter. Use this if you know what you are doing. Leave blank to disable.", "scope": "resource", - "deprecationMessage": "Use `SourcePawnLanguageServer.includeDirectories` instead." + "deprecationMessage": "Use `SourcePawnLanguageServer.includesDirectories` instead." }, "SourcePawnLanguageServer.disableSyntaxLinter": { "type": "boolean", @@ -321,7 +301,7 @@ "default": [ { "name": "", - "includeDirectories": [], + "includesDirectories": [], "spcompPath": "", "compilerArguments": [], "linterArguments": [] diff --git a/editors/code/src/Commands/changeSMApi.ts b/editors/code/src/Commands/changeSMApi.ts index 5f545e8c9..407a02577 100644 --- a/editors/code/src/Commands/changeSMApi.ts +++ b/editors/code/src/Commands/changeSMApi.ts @@ -7,7 +7,7 @@ import { type AvailableAPIOptional = { name: string | undefined; - includeDirectories: string[] | undefined; + includesDirectories: string[] | undefined; spcompPath: string | undefined; compilerArguments: string[] | undefined; linterArguments: string[] | undefined; @@ -15,7 +15,7 @@ type AvailableAPIOptional = { type AvailableAPI = { name: string; - includeDirectories: string[]; + includesDirectories: string[]; spcompPath: string; compilerArguments: string[]; linterArguments: string[]; @@ -39,7 +39,7 @@ export async function run(args: any) { const chosenAPI = availableAPIs.find((e) => e.name === item.label); await Workspace.getConfiguration("SourcePawnLanguageServer").update( "includesDirectories", - chosenAPI.includeDirectories + chosenAPI.includesDirectories ); await Workspace.getConfiguration("SourcePawnLanguageServer").update( "spcompPath", @@ -63,8 +63,8 @@ function buildAvailableAPIFromOptional( ): AvailableAPI { const name = "name" in optional ? optional.name : ""; const spcompPath = "spcompPath" in optional ? optional.spcompPath : ""; - const includeDirectories = - "includeDirectories" in optional ? optional.includeDirectories : []; + const includesDirectories = + "includesDirectories" in optional ? optional.includesDirectories : []; const compilerArguments = "compilerArguments" in optional ? optional.compilerArguments : []; const linterArguments = @@ -73,7 +73,7 @@ function buildAvailableAPIFromOptional( return { name, spcompPath, - includeDirectories, + includesDirectories, compilerArguments, linterArguments, }; diff --git a/editors/code/src/Commands/compileSM.ts b/editors/code/src/Commands/compileSM.ts index 727e07a49..e51251e86 100644 --- a/editors/code/src/Commands/compileSM.ts +++ b/editors/code/src/Commands/compileSM.ts @@ -10,9 +10,9 @@ import { existsSync, mkdirSync } from "fs"; import { execFile } from "child_process"; import { run as uploadToServerCommand } from "./uploadToServer"; -import { findMainPath } from "../spUtils"; import { run as refreshPluginsCommand } from "./refreshPlugins"; import { ctx } from "../spIndex"; +import { ProjectMainPathParams, projectMainPath } from "../lsp_ext"; // Create an OutputChannel variable here but do not initialize yet. let output: OutputChannel; @@ -26,7 +26,6 @@ export async function run(args: URI): Promise { const uri = args === undefined ? window.activeTextEditor.document.uri : args; const workspaceFolder = Workspace.getWorkspaceFolder(uri); - const mainPath = findMainPath(uri); const alwaysCompileMainPath: boolean = Workspace.getConfiguration( "sourcepawn", workspaceFolder @@ -34,8 +33,14 @@ export async function run(args: URI): Promise { // Decide which file to compile here. let fileToCompilePath: string; - if (alwaysCompileMainPath && mainPath !== undefined && mainPath !== "") { - fileToCompilePath = mainPath; + if (alwaysCompileMainPath) { + const params: ProjectMainPathParams = { uri: uri.toString() }; + const mainUri = await ctx?.client.sendRequest(projectMainPath, params); + if (mainUri === undefined) { + fileToCompilePath = uri.fsPath; + } else { + fileToCompilePath = URI.parse(mainUri).fsPath; + } } else { fileToCompilePath = uri.fsPath; } @@ -146,7 +151,12 @@ export async function run(args: URI): Promise { try { ctx?.setSpcompStatus({ quiescent: false }); // Compile in child process. - let command = spcomp + "\n"; + let spcompCommand = spcomp + "\n"; + if (process.platform === "darwin" && process.arch === "arm64") { + spcompCommand = "arch"; + compilerArgs.unshift("-x86_64", spcomp); + } + let command = spcompCommand; compilerArgs.forEach((e) => { command += e + " "; if (e.length > 10) { @@ -154,7 +164,10 @@ export async function run(args: URI): Promise { } }); output.appendLine(`${command}\n`); - execFile(spcomp, compilerArgs, async (error, stdout) => { + execFile(spcompCommand, compilerArgs, async (error, stdout) => { + if (error) { + console.error(error); + } ctx?.setSpcompStatus({ quiescent: true }); output.append(stdout.toString().trim()); if ( diff --git a/editors/code/src/Commands/doctor.ts b/editors/code/src/Commands/doctor.ts index 63aa140d4..df91855c0 100644 --- a/editors/code/src/Commands/doctor.ts +++ b/editors/code/src/Commands/doctor.ts @@ -1,5 +1,4 @@ import * as vscode from "vscode"; -import { ctx } from "../spIndex"; import * as fs from "fs"; import { execFile } from "child_process"; @@ -33,11 +32,6 @@ enum DiagnosticState { } class Doctor { - // Language server - lspVersion: string | undefined = undefined; - isLSPInstalled = DiagnosticState.None; - isLSPExecutable = DiagnosticState.None; - // Settings spCompPath: string | undefined = undefined; isSPCompSet = DiagnosticState.None; @@ -45,9 +39,6 @@ class Doctor { isSPCompRunnable = DiagnosticState.None; isSMInstalled = DiagnosticState.None; - isMainPathSet = DiagnosticState.None; - isMainPathValid = DiagnosticState.None; - isMainPathCorrect = DiagnosticState.None; constructor() {} @@ -61,10 +52,6 @@ class Doctor {

SourcePawn Doctor

-

Language Server

-
    - ${this.lspDiagnosticToWebview()} -

Compiler (spcomp)

    ${this.spCompToWebView()} @@ -73,17 +60,12 @@ class Doctor {
      ${this.includesDirectoriesToWebView()}
    -

    Main Path

    -
      - ${this.mainPathToWebView()} -

    Additional help

    If all the above are green and the extension is still not behaving as expected, try the following:

    • Restart the SourcePawn Language Server (Hover your mouse on the "sourcepawn-lsp" logo on the bottom left of the screen and click on restart).
    • Reload VSCode (CTRL+Shift+P and type "Reload Window").
    • -
    • Reinstall the SourcePawn Language Server (CTRL+Shift+P and type "Install Sourcepawn Language Server").
    • Look in the logs for errors (Hover your mouse on the "sourcepawn-lsp" logo on the bottom left of the screen and click on Open Logs). You can set the verbosity of the server to "trace" in the "sourcepawn.trace.server" setting.
    • Try to reproduce the issue in a new project.
    • If the extension is still not working properly, try contacting Sarrus on Discord (sarrus_).
    • @@ -93,61 +75,11 @@ class Doctor { } async runDiagnostics() { - this.checkLSP(); this.checkSettings(); this.checkIncludesDirectories(); this.checkSpComp(); } - lspDiagnosticToWebview(): string { - const diagnostics = []; - switch (this.isLSPInstalled) { - case DiagnosticState.OK: - diagnostics.push("✅ Language Server installed."); - break; - case DiagnosticState.Error: - diagnostics.push("❌ Language Server not installed."); - break; - case DiagnosticState.None: - diagnostics.push("🩺 Checking if the Language Server is installed."); - break; - } - switch (this.isLSPExecutable) { - case DiagnosticState.OK: - diagnostics.push("✅ Language Server is executable."); - break; - case DiagnosticState.Error: - diagnostics.push("❌ Language Server is not executable."); - break; - case DiagnosticState.None: - diagnostics.push("🩺 Checking if the Language Server is executable."); - break; - } - - return diagnostics.map((d) => `
    • ${d}
    • `).join("\n"); - } - - async checkLSP() { - fs.stat(ctx?.serverPath, (err, _stats) => { - if (err) { - this.isLSPInstalled = DiagnosticState.Error; - return; - } - if (!_stats?.isFile()) { - this.isLSPInstalled = DiagnosticState.Error; - return; - } - this.isLSPInstalled = DiagnosticState.OK; - }); - const version = await ctx?.getServerVersionFromBinaryAsync(); - if (version === undefined) { - this.isLSPExecutable = DiagnosticState.Error; - return; - } - this.isLSPExecutable = DiagnosticState.OK; - this.lspVersion = version; - } - spCompToWebView(): string { const diagnostics = []; switch (this.isSPCompSet) { @@ -250,22 +182,22 @@ class Doctor { switch (this.isSMInstalled) { case DiagnosticState.OK: diagnostics.push( - '✅ "SourcePawnLanguageServer.includeDirectories" contains at least one entry that contains "sourcemod.inc".' + '✅ "SourcePawnLanguageServer.includesDirectories" contains at least one entry that contains "sourcemod.inc".' ); break; case DiagnosticState.Error: diagnostics.push( - '❌ "SourcePawnLanguageServer.includeDirectories" contains at least one invalid entry".' + '❌ "SourcePawnLanguageServer.includesDirectories" contains at least one invalid entry".' ); break; case DiagnosticState.Warning: diagnostics.push( - '⚠️ "SourcePawnLanguageServer.includeDirectories" contains at least one entry that was not scanned properly.' + '⚠️ "SourcePawnLanguageServer.includesDirectories" contains at least one entry that was not scanned properly.' ); break; case DiagnosticState.None: diagnostics.push( - '🩺 Checking if "SourcePawnLanguageServer.includeDirectories" is set.' + '🩺 Checking if "SourcePawnLanguageServer.includesDirectories" is set.' ); break; } @@ -308,108 +240,9 @@ class Doctor { }); } - mainPathToWebView(): string { - const diagnostics = []; - switch (this.isMainPathSet) { - case DiagnosticState.OK: - diagnostics.push('✅ "SourcePawnLanguageServer.mainPath" is set.'); - break; - case DiagnosticState.Error: - diagnostics.push('❌ "SourcePawnLanguageServer.mainPath" is not set.'); - break; - case DiagnosticState.Warning: - diagnostics.push( - '⚠️ "SourcePawnLanguageServer.mainPath" is not set. Consider setting it for the extension to work properly.' - ); - break; - case DiagnosticState.None: - diagnostics.push( - '🩺 Checking if "SourcePawnLanguageServer.mainPath" is set.' - ); - break; - } - - switch (this.isMainPathValid) { - case DiagnosticState.OK: - diagnostics.push( - `✅ "SourcePawnLanguageServer.mainPath" points to a file (value: ${this.spCompPath}).` - ); - break; - case DiagnosticState.Error: - diagnostics.push( - `❌ "SourcePawnLanguageServer.mainPath" does not point to a file (value: ${this.spCompPath}).` - ); - break; - case DiagnosticState.None: - diagnostics.push( - '🩺 Checking if "SourcePawnLanguageServer.mainPath" points to a file.' - ); - break; - } - - switch (this.isMainPathCorrect) { - case DiagnosticState.OK: - diagnostics.push( - `✅ "SourcePawnLanguageServer.mainPath" points to a file that contains "OnPluginStart".` - ); - break; - case DiagnosticState.Warning: - diagnostics.push( - `⚠️ "SourcePawnLanguageServer.mainPath" points to a file that does not contain "OnPluginStart". This does not mean that it is incorrect.` - ); - break; - case DiagnosticState.Error: - diagnostics.push( - `❌ "SourcePawnLanguageServer.mainPath" points to a file of which the content cannot be read` - ); - break; - case DiagnosticState.None: - diagnostics.push( - '🩺 Checking if "SourcePawnLanguageServer.mainPath" points to a file that contains "OnPluginStart".' - ); - break; - } - - return diagnostics.map((d) => `
    • ${d}
    • `).join("\n"); - } - async checkSettings() { this.checkSpComp(); this.checkIncludesDirectories(); - const mainPath: string = vscode.workspace - .getConfiguration("SourcePawnLanguageServer") - .get("mainPath"); - if (!mainPath) { - this.isMainPathSet = DiagnosticState.Warning; - this.isMainPathValid = DiagnosticState.Warning; - this.isMainPathCorrect = DiagnosticState.Warning; - return; - } - this.isMainPathSet = DiagnosticState.OK; - fs.stat(mainPath, (err, _stats) => { - if (err) { - this.isMainPathValid = DiagnosticState.Error; - this.isMainPathCorrect = DiagnosticState.Error; - return; - } - if (!_stats?.isFile()) { - this.isMainPathValid = DiagnosticState.Error; - this.isMainPathCorrect = DiagnosticState.Error; - return; - } - this.isMainPathValid = DiagnosticState.OK; - fs.readFile(mainPath, (err, files) => { - if (err) { - this.isMainPathCorrect = DiagnosticState.Error; - return; - } - if (!files.toString().includes("OnPluginStart")) { - this.isMainPathCorrect = DiagnosticState.Warning; - return; - } - this.isMainPathCorrect = DiagnosticState.OK; - }); - }); } } diff --git a/editors/code/src/Commands/installSM.ts b/editors/code/src/Commands/installSM.ts index 560648e45..b315e5475 100644 --- a/editors/code/src/Commands/installSM.ts +++ b/editors/code/src/Commands/installSM.ts @@ -60,13 +60,13 @@ export async function run(args: any) { } function updatePath(smDir: string, spComp: string): void { - const includeDirectories = Workspace.getConfiguration( + const includesDirectories = Workspace.getConfiguration( "SourcePawnLanguageServer" ).get("includesDirectories"); - includeDirectories.push(smDir); + includesDirectories.push(smDir); Workspace.getConfiguration("SourcePawnLanguageServer").update( "includesDirectories", - Array.from(new Set(includeDirectories)), // avoid duplicates + Array.from(new Set(includesDirectories)), // avoid duplicates true ); Workspace.getConfiguration("SourcePawnLanguageServer").update( diff --git a/editors/code/src/Commands/projectsGraphviz.ts b/editors/code/src/Commands/projectsGraphviz.ts new file mode 100644 index 000000000..0e7cb8d1a --- /dev/null +++ b/editors/code/src/Commands/projectsGraphviz.ts @@ -0,0 +1,41 @@ +import * as vscode from "vscode"; +import { projectsGraphviz, ProjectsGraphvizParams } from "../lsp_ext"; +import { ctx } from "../spIndex"; + +export async function run(args: any) { + if (!vscode.extensions.getExtension("graphviz-interactive-preview.preview")) { + vscode.window + .showErrorMessage( + "The extension 'graphviz-interactive-preview' is required to run this command.", + "Install" + ) + .then((msg) => { + if (msg === "Install") { + vscode.commands.executeCommand( + "workbench.extensions.search", + "graphviz-interactive-preview" + ); + } + }); + return; + } + const params: ProjectsGraphvizParams = {}; + const doc = vscode.window.activeTextEditor?.document; + if (doc !== undefined) { + params.textDocument = + ctx?.client.code2ProtocolConverter.asTextDocumentIdentifier(doc); + } + let content = await ctx?.client.sendRequest(projectsGraphviz, params); + if (content === undefined) { + content = ""; + } + let options = { + content, + title: "Sourcepawn Dependency Graph", + }; + + vscode.commands.executeCommand( + "graphviz-interactive-preview.preview.beside", + options + ); +} diff --git a/editors/code/src/Commands/registerCommands.ts b/editors/code/src/Commands/registerCommands.ts index aac78fc9d..d0eadecab 100644 --- a/editors/code/src/Commands/registerCommands.ts +++ b/editors/code/src/Commands/registerCommands.ts @@ -8,13 +8,13 @@ import { run as CompileSMCommand } from "./compileSM"; import { run as UploadToServerCommand } from "./uploadToServer"; import { run as RefreshPluginsCommand } from "./refreshPlugins"; import { run as InsertParametersCommand } from "./insertParameters"; -import { run as setFileAsMainCommand } from "./setFileAsMain"; import { run as installSMCommand } from "./installSM"; import { run as createChangelogCommand } from "./createCHANGELOG"; import { run as createGitignoreCommand } from "./createGITIGNORE"; import { run as createLicenseCommand } from "./createLICENSE"; import { run as changeSMApiCommand } from "./changeSMApi"; import { run as doctorCommand } from "./doctor"; +import { run as projectsGraphvizCommand } from "./projectsGraphviz"; import { preprocessedDocumentCommand } from "./preprocessedDocument"; import { CommandFactory } from "../ctx"; @@ -24,101 +24,101 @@ import { CommandFactory } from "../ctx"; * @returns void */ export function registerSMCommands(context: vscode.ExtensionContext): void { - const createTask = vscode.commands.registerCommand( - "sourcepawn-vscode.createTask", - CreateTaskCommand.bind(undefined) - ); - context.subscriptions.push(createTask); - - const createScript = vscode.commands.registerCommand( - "sourcepawn-vscode.createScript", - CreateScriptCommand.bind(undefined) - ); - context.subscriptions.push(createScript); - - const createREADME = vscode.commands.registerCommand( - "sourcepawn-vscode.createREADME", - CreateREADMECommand.bind(undefined) - ); - context.subscriptions.push(createREADME); - - const createMaster = vscode.commands.registerCommand( - "sourcepawn-vscode.createMaster", - CreateMasterCommand.bind(undefined) - ); - context.subscriptions.push(createMaster); - - const createProject = vscode.commands.registerCommand( - "sourcepawn-vscode.createProject", - CreateProjectCommand.bind(undefined) - ); - context.subscriptions.push(createProject); - - const compileSM = vscode.commands.registerCommand( - "sourcepawn-vscode.compileSM", - CompileSMCommand.bind(undefined) - ); - context.subscriptions.push(compileSM); - - const uploadToServer = vscode.commands.registerCommand( - "sourcepawn-vscode.uploadToServer", - UploadToServerCommand.bind(undefined) - ); - context.subscriptions.push(uploadToServer); - - const refreshPlugins = vscode.commands.registerCommand( - "sourcepawn-vscode.refreshPlugins", - RefreshPluginsCommand.bind(undefined) - ); - context.subscriptions.push(refreshPlugins); - - const insertParameters = vscode.commands.registerCommand( - "sourcepawn-vscode.insertParameters", - InsertParametersCommand.bind(undefined) - ); - context.subscriptions.push(insertParameters); - - const setFileAsMain = vscode.commands.registerCommand( - "sourcepawn-vscode.setFileAsMain", - setFileAsMainCommand.bind(undefined) - ); - context.subscriptions.push(setFileAsMain); - - const installSM = vscode.commands.registerCommand( - "sourcepawn-vscode.installSM", - installSMCommand.bind(undefined) - ); - context.subscriptions.push(installSM); - - const createChangelog = vscode.commands.registerCommand( - "sourcepawn-vscode.createChangelog", - createChangelogCommand.bind(undefined) - ); - context.subscriptions.push(createChangelog); - - const createGitignore = vscode.commands.registerCommand( - "sourcepawn-vscode.createGitignore", - createGitignoreCommand.bind(undefined) - ); - context.subscriptions.push(createGitignore); - - const createLicense = vscode.commands.registerCommand( - "sourcepawn-vscode.createLicense", - createLicenseCommand.bind(undefined) - ); - context.subscriptions.push(createLicense); - - const changeSMApi = vscode.commands.registerCommand( - "sourcepawn-vscode.changeSMApi", - changeSMApiCommand.bind(undefined) - ); - context.subscriptions.push(changeSMApi); - - const doctor = vscode.commands.registerCommand( - "sourcepawn-vscode.doctor", - doctorCommand.bind(undefined) - ); - context.subscriptions.push(doctor); + const createTask = vscode.commands.registerCommand( + "sourcepawn-vscode.createTask", + CreateTaskCommand.bind(undefined) + ); + context.subscriptions.push(createTask); + + const createScript = vscode.commands.registerCommand( + "sourcepawn-vscode.createScript", + CreateScriptCommand.bind(undefined) + ); + context.subscriptions.push(createScript); + + const createREADME = vscode.commands.registerCommand( + "sourcepawn-vscode.createREADME", + CreateREADMECommand.bind(undefined) + ); + context.subscriptions.push(createREADME); + + const createMaster = vscode.commands.registerCommand( + "sourcepawn-vscode.createMaster", + CreateMasterCommand.bind(undefined) + ); + context.subscriptions.push(createMaster); + + const createProject = vscode.commands.registerCommand( + "sourcepawn-vscode.createProject", + CreateProjectCommand.bind(undefined) + ); + context.subscriptions.push(createProject); + + const compileSM = vscode.commands.registerCommand( + "sourcepawn-vscode.compileSM", + CompileSMCommand.bind(undefined) + ); + context.subscriptions.push(compileSM); + + const uploadToServer = vscode.commands.registerCommand( + "sourcepawn-vscode.uploadToServer", + UploadToServerCommand.bind(undefined) + ); + context.subscriptions.push(uploadToServer); + + const refreshPlugins = vscode.commands.registerCommand( + "sourcepawn-vscode.refreshPlugins", + RefreshPluginsCommand.bind(undefined) + ); + context.subscriptions.push(refreshPlugins); + + const insertParameters = vscode.commands.registerCommand( + "sourcepawn-vscode.insertParameters", + InsertParametersCommand.bind(undefined) + ); + context.subscriptions.push(insertParameters); + + const installSM = vscode.commands.registerCommand( + "sourcepawn-vscode.installSM", + installSMCommand.bind(undefined) + ); + context.subscriptions.push(installSM); + + const createChangelog = vscode.commands.registerCommand( + "sourcepawn-vscode.createChangelog", + createChangelogCommand.bind(undefined) + ); + context.subscriptions.push(createChangelog); + + const createGitignore = vscode.commands.registerCommand( + "sourcepawn-vscode.createGitignore", + createGitignoreCommand.bind(undefined) + ); + context.subscriptions.push(createGitignore); + + const createLicense = vscode.commands.registerCommand( + "sourcepawn-vscode.createLicense", + createLicenseCommand.bind(undefined) + ); + context.subscriptions.push(createLicense); + + const changeSMApi = vscode.commands.registerCommand( + "sourcepawn-vscode.changeSMApi", + changeSMApiCommand.bind(undefined) + ); + context.subscriptions.push(changeSMApi); + + const projectsGraphviz = vscode.commands.registerCommand( + "sourcepawn-vscode.projectsGraphviz", + projectsGraphvizCommand.bind(undefined) + ); + context.subscriptions.push(projectsGraphviz); + + const doctor = vscode.commands.registerCommand( + "sourcepawn-vscode.doctor", + doctorCommand.bind(undefined) + ); + context.subscriptions.push(doctor); } /** @@ -126,34 +126,34 @@ export function registerSMCommands(context: vscode.ExtensionContext): void { * @returns Record */ export function createServerCommands(): Record { - return { - startServer: { - enabled: (ctx) => async () => { - await ctx.restart(); - }, - disabled: (ctx) => async () => { - await ctx.start(); - }, - }, - stopServer: { - enabled: (ctx) => async () => { - await ctx.stopAndDispose(); - ctx.setServerStatus({ - health: "stopped", - }); - }, - disabled: (_) => async () => {}, - }, - openLogs: { - enabled: (ctx) => async () => { - if (ctx.client.outputChannel) { - ctx.client.outputChannel.show(); - } - }, - disabled: (_) => async () => {}, - }, - preprocessedDocument: { - enabled: preprocessedDocumentCommand, - }, - }; + return { + startServer: { + enabled: (ctx) => async () => { + await ctx.restart(); + }, + disabled: (ctx) => async () => { + await ctx.start(); + }, + }, + stopServer: { + enabled: (ctx) => async () => { + await ctx.stopAndDispose(); + ctx.setServerStatus({ + health: "stopped", + }); + }, + disabled: (_) => async () => {}, + }, + openLogs: { + enabled: (ctx) => async () => { + if (ctx.client.outputChannel) { + ctx.client.outputChannel.show(); + } + }, + disabled: (_) => async () => {}, + }, + preprocessedDocument: { + enabled: preprocessedDocumentCommand, + }, + }; } diff --git a/editors/code/src/Commands/setFileAsMain.ts b/editors/code/src/Commands/setFileAsMain.ts deleted file mode 100644 index 6117f4888..000000000 --- a/editors/code/src/Commands/setFileAsMain.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { workspace as Workspace, window } from "vscode"; -import { URI } from "vscode-uri"; - -/** - * Callback for the Set Current File As Main command. - * @param {URI} args URI of the document to be set as main. - * @returns Promise - */ -export async function run(args: URI): Promise { - if (args === undefined) { - if (window.activeTextEditor === undefined) { - return 1; - } - args = window.activeTextEditor.document.uri; - } - const workspaceFolder = Workspace.getWorkspaceFolder(args); - await Workspace.getConfiguration( - "SourcePawnLanguageServer", - workspaceFolder - ).update("mainPath", args.fsPath); - - return 0; -} diff --git a/editors/code/src/Commands/uploadToServer.ts b/editors/code/src/Commands/uploadToServer.ts index 0fb151056..423fee6e3 100644 --- a/editors/code/src/Commands/uploadToServer.ts +++ b/editors/code/src/Commands/uploadToServer.ts @@ -2,7 +2,9 @@ import { join } from "path"; import { run as refreshPluginsCommand } from "./refreshPlugins"; -import { findMainPath } from "../spUtils"; +import { ctx } from "../spIndex"; +import { ProjectMainPathParams, projectMainPath } from "../lsp_ext"; +import { URI } from "vscode-uri"; const FTPDeploy = require("ftp-deploy"); export async function run(args: any) { @@ -45,7 +47,11 @@ export async function run(args: any) { config["deleteRemote"] = false; if (config["localRoot"] === "${mainPath}") { - config["localRoot"] = findMainPath(); + const params: ProjectMainPathParams = { + uri: vscode.window.activeTextEditor.document.uri.toString(), + }; + const mainUri = await ctx?.client.sendRequest(projectMainPath, params); + config["localRoot"] = URI.parse(mainUri).fsPath; } if (config["isRootRelative"]) { diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts index fc9e168e1..2de99c059 100644 --- a/editors/code/src/lsp_ext.ts +++ b/editors/code/src/lsp_ext.ts @@ -14,6 +14,26 @@ export type PreprocessedDocumentParams = { textDocument?: lc.TextDocumentIdentifier; }; +export const projectMainPath = new lc.RequestType< + ProjectMainPathParams, + lc.URI, + void +>("sourcepawn-lsp/projectMainPath"); + +export type ProjectMainPathParams = { + uri?: lc.URI; +}; + +export const projectsGraphviz = new lc.RequestType< + ProjectsGraphvizParams, + string, + void +>("sourcepawn-lsp/projectsGraphviz"); + +export type ProjectsGraphvizParams = { + textDocument?: lc.TextDocumentIdentifier; +}; + export const serverStatus = new lc.NotificationType( "sourcepawn-lsp/serverStatus" ); diff --git a/editors/code/src/spIndex.ts b/editors/code/src/spIndex.ts index d3de3d3b4..f4e8876b1 100644 --- a/editors/code/src/spIndex.ts +++ b/editors/code/src/spIndex.ts @@ -7,7 +7,6 @@ import { import { SMDocumentFormattingEditProvider } from "./Formatters/spFormat"; import { KVDocumentFormattingEditProvider } from "./Formatters/kvFormat"; -import { migrateSettings } from "./spUtils"; import { Ctx } from "./ctx"; import { registerKVLinter } from "./Keyvalues/registerKVLinter"; import { buildDoctorStatusBar } from "./Commands/doctor"; @@ -15,7 +14,6 @@ import { buildDoctorStatusBar } from "./Commands/doctor"; export let ctx: Ctx | undefined; export async function activate(context: vscode.ExtensionContext) { - migrateSettings(); ctx = new Ctx(context, createServerCommands()); ctx.start(); diff --git a/editors/code/src/spUtils.ts b/editors/code/src/spUtils.ts index 0c54a0603..e79002115 100644 --- a/editors/code/src/spUtils.ts +++ b/editors/code/src/spUtils.ts @@ -1,82 +1,3 @@ -import { workspace as Workspace } from "vscode"; -import { URI } from "vscode-uri"; -import { existsSync } from "fs"; -import { resolve } from "path"; - -/** - * Find the MainPath setting for a given URI. - * Will return an empty string if the mainpath setting doesn't point to an - * existing location, and will return undefined if nothing is found. - * @param {Uri} uri? The URI we are looking up the MainPath for. - * @returns string | undefined - */ -export function findMainPath(uri?: URI): string | undefined { - const workspaceFolders = Workspace.workspaceFolders; - const workspaceFolder = - uri === undefined ? undefined : Workspace.getWorkspaceFolder(uri); - let mainPath: string = - Workspace.getConfiguration("SourcePawnLanguageServer", workspaceFolder).get( - "mainPath" - ) || ""; - if (mainPath === "") { - return undefined; - } - // Check if it exists, meaning it's an absolute path. - if (!existsSync(mainPath) && workspaceFolders !== undefined) { - // If it doesn't, loop over the workspace folders until one matches. - for (const wk of workspaceFolders) { - mainPath = resolve(wk.uri.fsPath, mainPath); - if (existsSync(mainPath)) { - return mainPath; - } - } - return ""; - } else { - return mainPath; - } -} - -/** - * If needed, migrate the settings of the user to use the LanguageServer. - */ -export function migrateSettings() { - const smHome: string = - Workspace.getConfiguration("sourcepawn").get("SourcemodHome"); - const optionalIncludeDirsPaths: string[] = Workspace.getConfiguration( - "sourcepawn" - ).get("optionalIncludeDirsPaths"); - - const includesDirectories: string[] = Workspace.getConfiguration( - "SourcePawnLanguageServer" - ).get("includesDirectories"); - - const oldSpcompPath: string = - Workspace.getConfiguration("sourcepawn").get("SpcompPath"); - - const newSpcompPath: string = Workspace.getConfiguration( - "SourcePawnLanguageServer" - ).get("spcompPath"); - - if ( - (includesDirectories.length == 0 && smHome) || - (!newSpcompPath && oldSpcompPath) - ) { - Workspace.getConfiguration("SourcePawnLanguageServer").update( - "includesDirectories", - Array.from(new Set([smHome].concat(optionalIncludeDirsPaths))), - true - ); - - if (oldSpcompPath && !newSpcompPath) { - Workspace.getConfiguration("SourcePawnLanguageServer").update( - "spcompPath", - oldSpcompPath, - true - ); - } - } -} - -export function sleep(ms: number) { +export function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); }