diff --git a/Cargo.lock b/Cargo.lock index c711c7ae826..20a7082ea74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -219,6 +219,12 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "async-weighted-semaphore" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0c3c4e4764855498d3ee6554748ff2d9376bc4c937692e266a56dd4ac098cfb" + [[package]] name = "atomic-polyfill" version = "0.1.11" @@ -6539,10 +6545,10 @@ checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" [[package]] name = "tower-lsp" version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b38fb0e6ce037835174256518aace3ca621c4f96383c56bb846cfc11b341910" +source = "git+https://github.com/ebkalderon/tower-lsp?branch=support-mutable-methods#8fe0f21e9d386b6bf2561ee2bf9590d89724cf5a" dependencies = [ "async-trait", + "async-weighted-semaphore", "auto_impl", "bytes", "dashmap", @@ -6562,8 +6568,7 @@ dependencies = [ [[package]] name = "tower-lsp-macros" version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34723c06344244474fdde365b76aebef8050bf6be61a935b91ee9ff7c4e91157" +source = "git+https://github.com/ebkalderon/tower-lsp?branch=support-mutable-methods#8fe0f21e9d386b6bf2561ee2bf9590d89724cf5a" dependencies = [ "proc-macro2", "quote", diff --git a/forc-plugins/forc-fmt/src/main.rs b/forc-plugins/forc-fmt/src/main.rs index 845941e8e95..efb41a7de64 100644 --- a/forc-plugins/forc-fmt/src/main.rs +++ b/forc-plugins/forc-fmt/src/main.rs @@ -256,7 +256,7 @@ fn format_pkg_at_dir(app: &App, dir: &Path, formatter: &mut Formatter) -> Result Some(path) => { let manifest_path = path.clone(); let manifest_file = manifest_path.join(constants::MANIFEST_FILE_NAME); - let files = get_sway_files(path); + let files = get_sway_files(&path); let mut contains_edits = false; for file in files { diff --git a/sway-lsp/Cargo.toml b/sway-lsp/Cargo.toml index e96ba244644..90d73c2102d 100644 --- a/sway-lsp/Cargo.toml +++ b/sway-lsp/Cargo.toml @@ -34,7 +34,7 @@ tempfile = "3" thiserror = "1.0.30" tokio = { version = "1.3", features = ["io-std", "io-util", "macros", "net", "rt-multi-thread", "sync", "time"] } toml_edit = "0.19" -tower-lsp = { version = "0.19", features = ["proposed"] } +tower-lsp = { git = "https://github.com/ebkalderon/tower-lsp", branch = "support-mutable-methods" } tracing = "0.1" urlencoding = "2.1.2" diff --git a/sway-lsp/benches/lsp_benchmarks/mod.rs b/sway-lsp/benches/lsp_benchmarks/mod.rs index eb140de96b2..f3e0a83147a 100644 --- a/sway-lsp/benches/lsp_benchmarks/mod.rs +++ b/sway-lsp/benches/lsp_benchmarks/mod.rs @@ -3,18 +3,18 @@ pub mod requests; pub mod token_map; use lsp_types::Url; -use std::{path::PathBuf, sync::Arc}; +use std::path::PathBuf; use sway_lsp::core::session::{self, Session}; -pub fn compile_test_project() -> (Url, Arc) { - let session = Session::new(); +pub fn compile_test_project() -> (Url, Session) { + let mut session = Session::new(); // Load the test project let uri = Url::from_file_path(benchmark_dir().join("src/main.sw")).unwrap(); session.handle_open_file(&uri); // Compile the project and write the parse result to the session let parse_result = session::parse_project(&uri).unwrap(); session.write_parse_result(parse_result); - (uri, Arc::new(session)) + (uri, session) } pub fn sway_workspace_dir() -> PathBuf { diff --git a/sway-lsp/benches/lsp_benchmarks/requests.rs b/sway-lsp/benches/lsp_benchmarks/requests.rs index 649db22059e..1bb9b021b70 100644 --- a/sway-lsp/benches/lsp_benchmarks/requests.rs +++ b/sway-lsp/benches/lsp_benchmarks/requests.rs @@ -6,14 +6,14 @@ use lsp_types::{ use sway_lsp::{capabilities, lsp_ext::OnEnterParams, utils::keyword_docs::KeywordDocs}; fn benchmarks(c: &mut Criterion) { - let (uri, session) = black_box(super::compile_test_project()); + let (uri, ref session) = black_box(super::compile_test_project()); let config = sway_lsp::config::Config::default(); let keyword_docs = KeywordDocs::new(); let position = Position::new(1717, 24); let range = Range::new(Position::new(1628, 0), Position::new(1728, 0)); c.bench_function("semantic_tokens", |b| { - b.iter(|| capabilities::semantic_tokens::semantic_tokens_full(session.clone(), &uri)) + b.iter(|| capabilities::semantic_tokens::semantic_tokens_full(session, &uri)) }); c.bench_function("document_symbol", |b| { @@ -34,13 +34,11 @@ fn benchmarks(c: &mut Criterion) { }); c.bench_function("hover", |b| { - b.iter(|| { - capabilities::hover::hover_data(session.clone(), &keyword_docs, uri.clone(), position) - }) + b.iter(|| capabilities::hover::hover_data(session, &keyword_docs, uri.clone(), position)) }); c.bench_function("highlight", |b| { - b.iter(|| capabilities::highlight::get_highlights(session.clone(), uri.clone(), position)) + b.iter(|| capabilities::highlight::get_highlights(session, uri.clone(), position)) }); c.bench_function("goto_definition", |b| { @@ -49,23 +47,18 @@ fn benchmarks(c: &mut Criterion) { c.bench_function("inlay_hints", |b| { b.iter(|| { - capabilities::inlay_hints::inlay_hints( - session.clone(), - &uri, - &range, - &config.inlay_hints, - ) + capabilities::inlay_hints::inlay_hints(session, &uri, &range, &config.inlay_hints) }) }); c.bench_function("prepare_rename", |b| { - b.iter(|| capabilities::rename::prepare_rename(session.clone(), uri.clone(), position)) + b.iter(|| capabilities::rename::prepare_rename(session, uri.clone(), position)) }); c.bench_function("rename", |b| { b.iter(|| { capabilities::rename::rename( - session.clone(), + session, "new_token_name".to_string(), uri.clone(), position, @@ -75,11 +68,11 @@ fn benchmarks(c: &mut Criterion) { c.bench_function("code_action", |b| { let range = Range::new(Position::new(4, 10), Position::new(4, 10)); - b.iter(|| capabilities::code_actions::code_actions(session.clone(), &range, &uri, &uri)) + b.iter(|| capabilities::code_actions::code_actions(session, &range, &uri, &uri)) }); c.bench_function("code_lens", |b| { - b.iter(|| capabilities::code_lens::code_lens(&session, &uri.clone())) + b.iter(|| capabilities::code_lens::code_lens(session, &uri.clone())) }); c.bench_function("on_enter", |b| { @@ -91,7 +84,7 @@ fn benchmarks(c: &mut Criterion) { text: "\n".to_string(), }], }; - b.iter(|| capabilities::on_enter::on_enter(&config.on_enter, &session, &uri, ¶ms)) + b.iter(|| capabilities::on_enter::on_enter(&config.on_enter, session, &uri, ¶ms)) }); } diff --git a/sway-lsp/benches/lsp_benchmarks/token_map.rs b/sway-lsp/benches/lsp_benchmarks/token_map.rs index 5ae1ff64b3d..69d8fc63eaf 100644 --- a/sway-lsp/benches/lsp_benchmarks/token_map.rs +++ b/sway-lsp/benches/lsp_benchmarks/token_map.rs @@ -3,7 +3,7 @@ use lsp_types::Position; fn benchmarks(c: &mut Criterion) { let (uri, session) = black_box(super::compile_test_project()); - let engines = session.engines.read(); + let engines = &session.engines; let position = Position::new(1716, 24); c.bench_function("tokens_for_file", |b| { diff --git a/sway-lsp/src/capabilities/code_actions/mod.rs b/sway-lsp/src/capabilities/code_actions/mod.rs index 48abda9da65..4263a6fb616 100644 --- a/sway-lsp/src/capabilities/code_actions/mod.rs +++ b/sway-lsp/src/capabilities/code_actions/mod.rs @@ -20,7 +20,7 @@ use lsp_types::{ CodeActionResponse, Position, Range, TextEdit, Url, WorkspaceEdit, }; use serde_json::Value; -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use sway_core::{language::ty, Engines}; use sway_types::Spanned; @@ -37,18 +37,17 @@ pub(crate) struct CodeActionContext<'a> { } pub fn code_actions( - session: Arc, + session: &Session, range: &Range, uri: &Url, temp_uri: &Url, ) -> Option { - let engines = session.engines.read(); let (_, token) = session .token_map() .token_at_position(temp_uri, range.start)?; let ctx = CodeActionContext { - engines: &engines, + engines: &session.engines, tokens: session.token_map(), token: &token, uri, diff --git a/sway-lsp/src/capabilities/code_lens.rs b/sway-lsp/src/capabilities/code_lens.rs index d4c92c52648..68f7c2e73f9 100644 --- a/sway-lsp/src/capabilities/code_lens.rs +++ b/sway-lsp/src/capabilities/code_lens.rs @@ -1,10 +1,10 @@ -use std::{path::PathBuf, sync::Arc}; +use std::path::PathBuf; use lsp_types::{CodeLens, Url}; use crate::core::session::Session; -pub fn code_lens(session: &Arc, url: &Url) -> Vec { +pub fn code_lens(session: &Session, url: &Url) -> Vec { let url_path = PathBuf::from(url.path()); // Construct code lenses for runnable functions diff --git a/sway-lsp/src/capabilities/diagnostic.rs b/sway-lsp/src/capabilities/diagnostic.rs index 69ff60624b1..80518977b3c 100644 --- a/sway-lsp/src/capabilities/diagnostic.rs +++ b/sway-lsp/src/capabilities/diagnostic.rs @@ -61,7 +61,6 @@ pub fn get_diagnostics( .push(diagnostic); } } - diagnostics } diff --git a/sway-lsp/src/capabilities/highlight.rs b/sway-lsp/src/capabilities/highlight.rs index 63949d101c4..c0e13246956 100644 --- a/sway-lsp/src/capabilities/highlight.rs +++ b/sway-lsp/src/capabilities/highlight.rs @@ -1,9 +1,8 @@ use crate::core::session::Session; use lsp_types::{DocumentHighlight, Position, Url}; -use std::sync::Arc; pub fn get_highlights( - session: Arc, + session: &Session, url: Url, position: Position, ) -> Option> { diff --git a/sway-lsp/src/capabilities/hover/hover_link_contents.rs b/sway-lsp/src/capabilities/hover/hover_link_contents.rs index 4618702361d..72534c2ba35 100644 --- a/sway-lsp/src/capabilities/hover/hover_link_contents.rs +++ b/sway-lsp/src/capabilities/hover/hover_link_contents.rs @@ -2,7 +2,6 @@ use crate::{ core::{session::Session, token::get_range_from_span}, utils::document::get_url_from_span, }; -use std::sync::Arc; use sway_core::{ language::{ ty::{TyDecl, TyTraitDecl}, @@ -26,12 +25,12 @@ pub struct RelatedType { pub struct HoverLinkContents<'a> { pub related_types: Vec, pub implementations: Vec, - session: Arc, + session: &'a Session, engines: &'a Engines, } impl<'a> HoverLinkContents<'a> { - pub fn new(session: Arc, engines: &'a Engines) -> Self { + pub fn new(session: &'a Session, engines: &'a Engines) -> Self { Self { related_types: Vec::new(), implementations: Vec::new(), @@ -82,7 +81,7 @@ impl<'a> HoverLinkContents<'a> { /// Adds all implementations of the given [TyTraitDecl] to the list of implementations. pub fn add_implementations_for_trait(&mut self, trait_decl: &TyTraitDecl) { if let Some(namespace) = self.session.namespace() { - let call_path = CallPath::from(trait_decl.name.clone()).to_fullpath(&namespace); + let call_path = CallPath::from(trait_decl.name.clone()).to_fullpath(namespace); let impl_spans = namespace.get_impl_spans_for_trait_name(&call_path); self.add_implementations(&trait_decl.span(), impl_spans); } diff --git a/sway-lsp/src/capabilities/hover/mod.rs b/sway-lsp/src/capabilities/hover/mod.rs index e70de60f59d..613aa504981 100644 --- a/sway-lsp/src/capabilities/hover/mod.rs +++ b/sway-lsp/src/capabilities/hover/mod.rs @@ -9,7 +9,6 @@ use crate::{ attributes::doc_comment_attributes, keyword_docs::KeywordDocs, markdown, markup::Markup, }, }; -use std::sync::Arc; use sway_core::{ language::{ty, Visibility}, Engines, TypeId, @@ -22,7 +21,7 @@ use self::hover_link_contents::HoverLinkContents; /// Extracts the hover information for a token at the current position. pub fn hover_data( - session: Arc, + session: &Session, keyword_docs: &KeywordDocs, url: Url, position: Position, @@ -47,8 +46,7 @@ pub fn hover_data( }); } - let engines = session.engines.read(); - let (decl_ident, decl_token) = match token.declared_token_ident(&engines) { + let (decl_ident, decl_token) = match token.declared_token_ident(&session.engines) { Some(decl_ident) => { let decl_token = session .token_map() @@ -62,7 +60,12 @@ pub fn hover_data( None => (ident.clone(), token), }; - let contents = hover_format(session.clone(), &engines, &decl_token, &decl_ident.name); + let contents = hover_format( + session.clone(), + &session.engines, + &decl_token, + &decl_ident.name, + ); Some(lsp_types::Hover { contents, range: Some(range), @@ -120,7 +123,7 @@ fn markup_content(markup: Markup) -> lsp_types::MarkupContent { } fn hover_format( - session: Arc, + session: &Session, engines: &Engines, token: &Token, ident_name: &str, diff --git a/sway-lsp/src/capabilities/inlay_hints.rs b/sway-lsp/src/capabilities/inlay_hints.rs index 3c363d091d6..8a288414279 100644 --- a/sway-lsp/src/capabilities/inlay_hints.rs +++ b/sway-lsp/src/capabilities/inlay_hints.rs @@ -6,7 +6,6 @@ use crate::{ }, }; use lsp_types::{self, Range, Url}; -use std::sync::Arc; use sway_core::{language::ty::TyDecl, type_system::TypeInfo}; use sway_types::Spanned; @@ -24,7 +23,7 @@ pub struct InlayHint { } pub fn inlay_hints( - session: Arc, + session: &Session, uri: &Url, range: &Range, config: &InlayHintsConfig, @@ -38,9 +37,6 @@ pub fn inlay_hints( return None; } - let engines = session.engines.read(); - let type_engine = engines.te(); - let hints: Vec = session .token_map() .tokens_for_file(uri) @@ -63,7 +59,7 @@ pub fn inlay_hints( }) }) .filter_map(|var| { - let type_info = type_engine.get(var.type_ascription.type_id); + let type_info = session.engines.te().get(var.type_ascription.type_id); match type_info { TypeInfo::Unknown | TypeInfo::UnknownGeneric { .. } => None, _ => Some(var), @@ -72,7 +68,7 @@ pub fn inlay_hints( .map(|var| { let range = get_range_from_span(&var.name.span()); let kind = InlayKind::TypeHint; - let label = format!("{}", engines.help_out(var.type_ascription)); + let label = format!("{}", session.engines.help_out(var.type_ascription)); let inlay_hint = InlayHint { range, kind, label }; self::inlay_hint(config.render_colons, inlay_hint) }) diff --git a/sway-lsp/src/capabilities/on_enter.rs b/sway-lsp/src/capabilities/on_enter.rs index d2c14af7bd2..0f90315a9d1 100644 --- a/sway-lsp/src/capabilities/on_enter.rs +++ b/sway-lsp/src/capabilities/on_enter.rs @@ -3,7 +3,6 @@ use crate::{ core::{document::TextDocument, session::Session}, lsp_ext::OnEnterParams, }; -use std::sync::Arc; use tower_lsp::lsp_types::{ DocumentChanges, OneOf, OptionalVersionedTextDocumentIdentifier, Position, Range, TextDocumentEdit, TextEdit, Url, WorkspaceEdit, @@ -17,7 +16,7 @@ const DOC_COMMENT_START: &str = "///"; /// with the appropriate comment start pattern (// or ///). pub fn on_enter( config: &OnEnterConfig, - session: &Arc, + session: &Session, temp_uri: &Url, params: &OnEnterParams, ) -> Option { @@ -31,11 +30,11 @@ pub fn on_enter( .expect("could not get text document"); if config.continue_doc_comments.unwrap_or(false) { - workspace_edit = get_comment_workspace_edit(DOC_COMMENT_START, params, &text_document); + workspace_edit = get_comment_workspace_edit(DOC_COMMENT_START, params, text_document); } if config.continue_comments.unwrap_or(false) && workspace_edit.is_none() { - workspace_edit = get_comment_workspace_edit(COMMENT_START, params, &text_document); + workspace_edit = get_comment_workspace_edit(COMMENT_START, params, text_document); } workspace_edit diff --git a/sway-lsp/src/capabilities/rename.rs b/sway-lsp/src/capabilities/rename.rs index 504ab7db404..db48763a7d9 100644 --- a/sway-lsp/src/capabilities/rename.rs +++ b/sway-lsp/src/capabilities/rename.rs @@ -8,14 +8,14 @@ use crate::{ utils::document::get_url_from_path, }; use lsp_types::{Position, PrepareRenameResponse, TextEdit, Url, WorkspaceEdit}; -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use sway_core::{language::ty, Engines}; use sway_types::SourceEngine; const RAW_IDENTIFIER: &str = "r#"; pub fn rename( - session: Arc, + session: &Session, new_name: String, url: Url, position: Position, @@ -48,18 +48,16 @@ pub fn rename( )); } - let engines = session.engines.read(); - // If the token is a function, find the parent declaration // and collect idents for all methods of ABI Decl, Trait Decl, and Impl Trait let map_of_changes: HashMap> = (if token.kind == SymbolKind::Function { - find_all_methods_for_decl(&session, &engines, &url, position)? + find_all_methods_for_decl(session, &session.engines, &url, position)? } else { // otherwise, just find all references of the token in the token map session .token_map() .iter() - .all_references_of_token(&token, &engines) + .all_references_of_token(&token, &session.engines) .map(|(ident, _)| ident) .collect::>() }) @@ -100,7 +98,7 @@ pub fn rename( } pub fn prepare_rename( - session: Arc, + session: &Session, url: Url, position: Position, ) -> Result { @@ -109,11 +107,9 @@ pub fn prepare_rename( .token_at_position(&url, position) .ok_or(RenameError::TokenNotFound)?; - let engines = session.engines.read(); - // Only let through tokens that are in the users workspace. // tokens that are external to the users workspace cannot be renamed. - let _ = is_token_in_workspace(&session, &engines, &token)?; + let _ = is_token_in_workspace(session, &session.engines, &token)?; // Make sure we don't allow renaming of tokens that // are keywords or intrinsics. @@ -141,7 +137,7 @@ fn formatted_name(ident: &TokenIdent) -> String { /// Checks if the token is in the users workspace. fn is_token_in_workspace( - session: &Arc, + session: &Session, engines: &Engines, token: &Token, ) -> Result { diff --git a/sway-lsp/src/capabilities/semantic_tokens.rs b/sway-lsp/src/capabilities/semantic_tokens.rs index 0b4327fd5e5..fd7e42ec938 100644 --- a/sway-lsp/src/capabilities/semantic_tokens.rs +++ b/sway-lsp/src/capabilities/semantic_tokens.rs @@ -6,13 +6,10 @@ use lsp_types::{ Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, SemanticTokensResult, Url, }; -use std::sync::{ - atomic::{AtomicU32, Ordering}, - Arc, -}; +use std::sync::atomic::{AtomicU32, Ordering}; // https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71 -pub fn semantic_tokens_full(session: Arc, url: &Url) -> Option { +pub fn semantic_tokens_full(session: &Session, url: &Url) -> Option { // The tokens need sorting by their span so each token is sequential // If this step isn't done, then the bit offsets used for the lsp_types::SemanticToken are incorrect. let mut tokens_sorted: Vec<_> = session.token_map().tokens_for_file(url).collect(); diff --git a/sway-lsp/src/core/session.rs b/sway-lsp/src/core/session.rs index 5b4d09d55d5..b1babc525b9 100644 --- a/sway-lsp/src/core/session.rs +++ b/sway-lsp/src/core/session.rs @@ -16,15 +16,14 @@ use crate::{ dependency, lexed_tree, parsed_tree::ParsedTree, typed_tree::TypedTree, ParseContext, }, }; -use dashmap::DashMap; use forc_pkg as pkg; use lsp_types::{ CompletionItem, GotoDefinitionResponse, Location, Position, Range, SymbolInformation, TextDocumentContentChangeEvent, TextEdit, Url, }; -use parking_lot::RwLock; use pkg::{manifest::ManifestFile, BuildPlan}; use std::{ + collections::HashMap, fs::File, io::Write, ops::Deref, @@ -33,7 +32,6 @@ use std::{ vec, }; use sway_core::{ - decl_engine::DeclEngine, language::{ lexed::LexedProgram, parsed::{AstNode, ParseProgram}, @@ -42,13 +40,13 @@ use sway_core::{ BuildTarget, Engines, Namespace, Programs, }; use sway_error::{error::CompileError, warning::CompileWarning}; -use sway_types::{SourceEngine, Spanned}; +use sway_types::Spanned; use sway_utils::helpers::get_sway_files; -use tokio::sync::Semaphore; +use tokio::sync::{RwLock, Semaphore}; use super::token::get_range_from_span; -pub type Documents = DashMap; +pub type Documents = HashMap; pub type ProjectDirectory = PathBuf; #[derive(Default, Debug)] @@ -78,9 +76,9 @@ pub struct ParseResult { pub struct Session { token_map: TokenMap, pub documents: Documents, - pub runnables: DashMap>>, - pub compiled_program: RwLock, - pub engines: RwLock, + pub runnables: HashMap>>, + pub compiled_program: CompiledProgram, + pub engines: Engines, pub sync: SyncWorkspace, // Limit the number of threads that can wait to parse at the same time. One thread can be parsing // and one thread can be waiting to start parsing. All others will return the cached diagnostics. @@ -99,9 +97,9 @@ impl Session { pub fn new() -> Self { Session { token_map: TokenMap::new(), - documents: DashMap::new(), - runnables: DashMap::new(), - compiled_program: RwLock::new(Default::default()), + documents: HashMap::new(), + runnables: HashMap::new(), + compiled_program: Default::default(), engines: <_>::default(), sync: SyncWorkspace::new(), parse_permits: Arc::new(Semaphore::new(2)), @@ -109,7 +107,7 @@ impl Session { } } - pub fn init(&self, uri: &Url) -> Result { + pub fn init(&mut self, uri: &Url) -> Result<&ProjectDirectory, LanguageServerError> { let manifest_dir = PathBuf::from(uri.path()); // Create a new temp dir that clones the current workspace // and store manifest and temp paths @@ -121,12 +119,12 @@ impl Session { self.sync.manifest_dir().map_err(Into::into) } - pub fn shutdown(&self) { + pub fn shutdown(&mut self) { // Set the should_end flag to true self.sync.should_end.store(true, Ordering::Relaxed); // Wait for the thread to finish - let mut join_handle_option = self.sync.notify_join_handle.write(); + let join_handle_option = &mut self.sync.notify_join_handle; if let Some(join_handle) = std::mem::take(&mut *join_handle_option) { let _ = join_handle.join(); } @@ -141,39 +139,34 @@ impl Session { } /// Wait for the cached [DiagnosticMap] to be unlocked after parsing and return a copy. - pub fn wait_for_parsing(&self) -> DiagnosticMap { - self.diagnostics.read().clone() + pub async fn wait_for_parsing(&self) -> DiagnosticMap { + self.diagnostics.read().await.clone() } /// Write the result of parsing to the session. /// This function should only be called after successfully parsing. - pub fn write_parse_result(&self, res: ParseResult) { + pub fn write_parse_result(&mut self, res: ParseResult) { self.token_map.clear(); self.runnables.clear(); - *self.engines.write() = res.engines; + self.engines = res.engines; res.token_map.deref().iter().for_each(|item| { let (s, t) = item.pair(); self.token_map.insert(s.clone(), t.clone()); }); - self.create_runnables( - &res.typed, - self.engines.read().de(), - self.engines.read().se(), - ); - self.compiled_program.write().lexed = Some(res.lexed); - self.compiled_program.write().parsed = Some(res.parsed); - self.compiled_program.write().typed = Some(res.typed); + self.create_runnables(&res.typed); + self.compiled_program.lexed = Some(res.lexed); + self.compiled_program.parsed = Some(res.parsed); + self.compiled_program.typed = Some(res.typed); } pub fn token_ranges(&self, url: &Url, position: Position) -> Option> { let (_, token) = self.token_map.token_at_position(url, position)?; - let engines = self.engines.read(); let mut token_ranges: Vec<_> = self .token_map .tokens_for_file(url) - .all_references_of_token(&token, &engines) + .all_references_of_token(&token, &self.engines) .map(|(ident, _)| ident.range) .collect(); @@ -186,10 +179,9 @@ impl Session { uri: Url, position: Position, ) -> Option { - let engines = self.engines.read(); self.token_map .token_at_position(&uri, position) - .and_then(|(_, token)| token.declared_token_ident(&engines)) + .and_then(|(_, token)| token.declared_token_ident(&self.engines)) .and_then(|decl_ident| { decl_ident.path.and_then(|path| { // We use ok() here because we don't care about propagating the error from from_file_path @@ -212,18 +204,16 @@ impl Session { line: position.line, character: position.character - trigger_char.len() as u32 - 1, }; - let engines = self.engines.read(); let (ident_to_complete, _) = self.token_map.token_at_position(uri, shifted_position)?; let fn_tokens = self.token_map - .tokens_at_position(engines.se(), uri, shifted_position, Some(true)); + .tokens_at_position(self.engines.se(), uri, shifted_position, Some(true)); let (_, fn_token) = fn_tokens.first()?; - let compiled_program = &*self.compiled_program.read(); if let Some(TypedAstToken::TypedFunctionDeclaration(fn_decl)) = fn_token.typed.clone() { - let program = compiled_program.typed.clone()?; + let program = self.compiled_program.typed.clone()?; return Some(capabilities::completion::to_completion_items( &program.root.namespace, - &self.engines.read(), + &self.engines, &ident_to_complete, &fn_decl, position, @@ -233,10 +223,11 @@ impl Session { } /// Returns the [Namespace] from the compiled program if it exists. - pub fn namespace(&self) -> Option { - let compiled_program = &*self.compiled_program.read(); - let program = compiled_program.typed.clone()?; - Some(program.root.namespace) + pub fn namespace(&self) -> Option<&Namespace> { + match &self.compiled_program.typed { + Some(typed) => Some(&typed.root.namespace), + None => None, + } } pub fn symbol_information(&self, url: &Url) -> Option> { @@ -247,19 +238,18 @@ impl Session { } pub fn format_text(&self, url: &Url) -> Result, LanguageServerError> { - let document = self - .documents - .try_get(url.path()) - .try_unwrap() - .ok_or_else(|| DocumentError::DocumentNotFound { - path: url.path().to_string(), - })?; + let document = + self.documents + .get(url.path()) + .ok_or_else(|| DocumentError::DocumentNotFound { + path: url.path().to_string(), + })?; get_page_text_edit(Arc::from(document.get_text()), &mut <_>::default()) .map(|page_text_edit| vec![page_text_edit]) } - pub fn handle_open_file(&self, uri: &Url) { + pub fn handle_open_file(&mut self, uri: &Url) { if !self.documents.contains_key(uri.path()) { if let Ok(text_document) = TextDocument::build_from_path(uri.path()) { let _ = self.store_document(text_document); @@ -269,7 +259,7 @@ impl Session { /// Writes the changes to the file and updates the document. pub fn write_changes_to_file( - &self, + &mut self, uri: &Url, changes: Vec, ) -> Result<(), LanguageServerError> { @@ -291,45 +281,39 @@ impl Session { } /// Get the document at the given [Url]. - pub fn get_text_document(&self, url: &Url) -> Result { + pub fn get_text_document(&self, url: &Url) -> Result<&TextDocument, DocumentError> { self.documents - .try_get(url.path()) - .try_unwrap() + .get(url.path()) .ok_or_else(|| DocumentError::DocumentNotFound { path: url.path().to_string(), }) - .map(|document| document.clone()) } /// Update the document at the given [Url] with the Vec of changes returned by the client. pub fn update_text_document( - &self, + &mut self, url: &Url, changes: Vec, ) -> Option { - self.documents - .try_get_mut(url.path()) - .try_unwrap() - .map(|mut document| { - changes.iter().for_each(|change| { - document.apply_change(change); - }); - document.get_text() - }) + self.documents.get_mut(url.path()).map(|document| { + changes.iter().for_each(|change| { + document.apply_change(change); + }); + document.get_text() + }) } /// Remove the text document from the session. - pub fn remove_document(&self, url: &Url) -> Result { + pub fn remove_document(&mut self, url: &Url) -> Result { self.documents .remove(url.path()) .ok_or_else(|| DocumentError::DocumentNotFound { path: url.path().to_string(), }) - .map(|(_, text_document)| text_document) } /// Store the text document in the session. - fn store_document(&self, text_document: TextDocument) -> Result<(), DocumentError> { + fn store_document(&mut self, text_document: TextDocument) -> Result<(), DocumentError> { let uri = text_document.get_uri().to_string(); self.documents .insert(uri.clone(), text_document) @@ -339,21 +323,16 @@ impl Session { } /// Create runnables if the `TyProgramKind` of the `TyProgram` is a script. - fn create_runnables( - &self, - typed_program: &ty::TyProgram, - decl_engine: &DeclEngine, - source_engine: &SourceEngine, - ) { + fn create_runnables(&mut self, typed_program: &ty::TyProgram) { // Insert runnable test functions. - for (decl, _) in typed_program.test_fns(decl_engine) { + for (decl, _) in typed_program.test_fns(self.engines.de()) { // Get the span of the first attribute if it exists, otherwise use the span of the function name. let span = decl .attributes .first() .map_or_else(|| decl.name.span(), |(_, attr)| attr.span.clone()); if let Some(source_id) = span.source_id() { - let path = source_engine.get_path(source_id); + let path = self.engines.se().get_path(source_id); let runnable = Box::new(RunnableTestFn { range: get_range_from_span(&span.clone()), tree_type: typed_program.kind.tree_type(), @@ -373,7 +352,7 @@ impl Session { { let span = main_function.name.span(); if let Some(source_id) = span.source_id() { - let path = source_engine.get_path(source_id); + let path = self.engines.se().get_path(source_id); let runnable = Box::new(RunnableMainFn { range: get_range_from_span(&span.clone()), tree_type: typed_program.kind.tree_type(), @@ -387,7 +366,7 @@ impl Session { } /// Populate [Documents] with sway files found in the workspace. - fn store_sway_files(&self) -> Result<(), LanguageServerError> { + fn store_sway_files(&mut self) -> Result<(), LanguageServerError> { let temp_dir = self.sync.temp_dir()?; // Store the documents. for path in get_sway_files(temp_dir).iter().filter_map(|fp| fp.to_str()) { @@ -548,21 +527,21 @@ mod tests { #[test] fn store_document_returns_empty_tuple() { - let session = Session::new(); + let mut session = Session::new(); let path = get_absolute_path("sway-lsp/tests/fixtures/cats.txt"); let document = TextDocument::build_from_path(&path).unwrap(); - let result = Session::store_document(&session, document); + let result = Session::store_document(&mut session, document); assert!(result.is_ok()); } #[test] fn store_document_returns_document_already_stored_error() { - let session = Session::new(); + let mut session = Session::new(); let path = get_absolute_path("sway-lsp/tests/fixtures/cats.txt"); let document = TextDocument::build_from_path(&path).unwrap(); - Session::store_document(&session, document).expect("expected successfully stored"); + Session::store_document(&mut session, document).expect("expected successfully stored"); let document = TextDocument::build_from_path(&path).unwrap(); - let result = Session::store_document(&session, document) + let result = Session::store_document(&mut session, document) .expect_err("expected DocumentAlreadyStored"); assert_eq!(result, DocumentError::DocumentAlreadyStored { path }); } diff --git a/sway-lsp/src/core/sync.rs b/sway-lsp/src/core/sync.rs index 0ca8a032bb8..1261d55f1d9 100644 --- a/sway-lsp/src/core/sync.rs +++ b/sway-lsp/src/core/sync.rs @@ -2,12 +2,10 @@ use crate::{ error::{DirectoryError, DocumentError, LanguageServerError}, utils::document::{get_path_from_url, get_url_from_path, get_url_from_span}, }; -use dashmap::DashMap; use forc_pkg::{manifest::Dependency, PackageManifestFile}; use lsp_types::Url; use notify::RecursiveMode; use notify_debouncer_mini::new_debouncer; -use parking_lot::RwLock; use std::{ collections::HashMap, fs::{self, File}, @@ -28,10 +26,10 @@ pub enum Directory { #[derive(Debug)] pub struct SyncWorkspace { - pub directories: DashMap, - pub notify_join_handle: RwLock>>, + pub directories: HashMap, + pub notify_join_handle: Option>, // if we should shutdown the thread watching the manifest file - pub should_end: Arc, + pub should_end: AtomicBool, } impl SyncWorkspace { @@ -39,9 +37,9 @@ impl SyncWorkspace { pub(crate) fn new() -> Self { Self { - directories: DashMap::new(), - notify_join_handle: RwLock::new(None), - should_end: Arc::new(AtomicBool::new(false)), + directories: HashMap::new(), + notify_join_handle: None, + should_end: AtomicBool::new(false), } } @@ -69,7 +67,7 @@ impl SyncWorkspace { } pub(crate) fn create_temp_dir_from_workspace( - &self, + &mut self, manifest_dir: &Path, ) -> Result<(), LanguageServerError> { let manifest = PackageManifestFile::from_dir(manifest_dir).map_err(|_| { @@ -186,7 +184,7 @@ impl SyncWorkspace { } /// Watch the manifest directory and check for any save events on Forc.toml - pub(crate) fn watch_and_sync_manifest(&self) { + pub(crate) fn watch_and_sync_manifest(&mut self) { let _ = self .manifest_path() .and_then(|manifest_path| PackageManifestFile::from_dir(&manifest_path).ok()) @@ -219,33 +217,26 @@ impl SyncWorkspace { }); // Store the join handle so we can clean up the thread on shutdown - { - let mut join_handle = self.notify_join_handle.write(); - *join_handle = Some(handle); - } + self.notify_join_handle = Some(handle); } }); } /// Return the path to the projects manifest directory. - pub(crate) fn manifest_dir(&self) -> Result { + pub(crate) fn manifest_dir(&self) -> Result<&PathBuf, DirectoryError> { self.directories - .try_get(&Directory::Manifest) - .try_unwrap() - .map(|item| item.value().clone()) + .get(&Directory::Manifest) .ok_or(DirectoryError::ManifestDirNotFound) } /// Return the path to the temporary directory that was created for the current session. - pub(crate) fn temp_dir(&self) -> Result { + pub(crate) fn temp_dir(&self) -> Result<&PathBuf, DirectoryError> { self.directories - .try_get(&Directory::Temp) - .try_unwrap() - .map(|item| item.value().clone()) + .get(&Directory::Temp) .ok_or(DirectoryError::TempDirNotFound) } - fn convert_url(&self, uri: &Url, from: PathBuf, to: PathBuf) -> Result { + fn convert_url(&self, uri: &Url, from: &Path, to: &PathBuf) -> Result { let path = from.join( PathBuf::from(uri.path()) .strip_prefix(to) diff --git a/sway-lsp/src/error.rs b/sway-lsp/src/error.rs index 0a31d6e0d1c..3f0c49e0265 100644 --- a/sway-lsp/src/error.rs +++ b/sway-lsp/src/error.rs @@ -22,6 +22,8 @@ pub enum LanguageServerError { FormatError(FormatterError), #[error("Unable to acquire a semaphore permit for parsing")] UnableToAcquirePermit, + #[error("Session has not been initialized")] + SessionNotFound, } #[derive(Debug, Error, PartialEq, Eq)] diff --git a/sway-lsp/src/handlers/notification.rs b/sway-lsp/src/handlers/notification.rs index f874687f740..19f792452e7 100644 --- a/sway-lsp/src/handlers/notification.rs +++ b/sway-lsp/src/handlers/notification.rs @@ -1,66 +1,93 @@ //! This module is responsible for implementing handlers for Language Server //! Protocol. This module specifically handles notification messages sent by the Client. -use crate::{error::LanguageServerError, server_state::ServerState}; +use crate::{ + error::LanguageServerError, + server_state::{self, ServerState}, +}; use lsp_types::{ DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, FileChangeType, }; pub async fn handle_did_open_text_document( - state: &ServerState, + state: &mut ServerState, params: DidOpenTextDocumentParams, ) -> Result<(), LanguageServerError> { let (uri, session) = state .sessions - .uri_and_session_from_workspace(¶ms.text_document.uri)?; + .uri_and_mut_session_from_workspace(¶ms.text_document.uri)?; session.handle_open_file(&uri); // If the token map is empty, then we need to parse the project. // Otherwise, don't recompile the project when a new file in the project is opened // as the workspace is already compiled. if session.token_map().is_empty() { - state - .parse_project(uri, params.text_document.uri, session.clone()) - .await; + let parse_result = server_state::parse_project(uri.clone(), session).await?; + session.write_parse_result(parse_result); + server_state::publish_diagnostics( + &state.config, + &state.client, + uri, + params.text_document.uri, + session, + ) + .await; } Ok(()) } pub async fn handle_did_change_text_document( - state: &ServerState, + state: &mut ServerState, params: DidChangeTextDocumentParams, ) -> Result<(), LanguageServerError> { let (uri, session) = state .sessions - .uri_and_session_from_workspace(¶ms.text_document.uri)?; + .uri_and_mut_session_from_workspace(¶ms.text_document.uri)?; session.write_changes_to_file(&uri, params.content_changes)?; - state - .parse_project(uri, params.text_document.uri, session.clone()) - .await; + let parse_result = server_state::parse_project(uri.clone(), session).await?; + session.write_parse_result(parse_result); + server_state::publish_diagnostics( + &state.config, + &state.client, + uri, + params.text_document.uri, + session, + ) + .await; Ok(()) } pub(crate) async fn handle_did_save_text_document( - state: &ServerState, + state: &mut ServerState, params: DidSaveTextDocumentParams, ) -> Result<(), LanguageServerError> { let (uri, session) = state .sessions - .uri_and_session_from_workspace(¶ms.text_document.uri)?; + .uri_and_mut_session_from_workspace(¶ms.text_document.uri)?; session.sync.resync()?; - state - .parse_project(uri, params.text_document.uri, session.clone()) - .await; + let parse_result = server_state::parse_project(uri.clone(), session).await?; + session.write_parse_result(parse_result); + server_state::publish_diagnostics( + &state.config, + &state.client, + uri, + params.text_document.uri, + session, + ) + .await; Ok(()) } pub(crate) fn handle_did_change_watched_files( - state: &ServerState, + state: &mut ServerState, params: DidChangeWatchedFilesParams, ) { for event in params.changes { if event.typ == FileChangeType::DELETED { - match state.sessions.uri_and_session_from_workspace(&event.uri) { + match state + .sessions + .uri_and_mut_session_from_workspace(&event.uri) + { Ok((uri, session)) => { let _ = session.remove_document(&uri); } diff --git a/sway-lsp/src/handlers/request.rs b/sway-lsp/src/handlers/request.rs index 8fc75e166d6..3377437eab0 100644 --- a/sway-lsp/src/handlers/request.rs +++ b/sway-lsp/src/handlers/request.rs @@ -18,20 +18,18 @@ use tower_lsp::jsonrpc::Result; use tracing::metadata::LevelFilter; pub fn handle_initialize( - state: &ServerState, + state: &mut ServerState, params: lsp_types::InitializeParams, ) -> Result { if let Some(initialization_options) = ¶ms.initialization_options { - let mut config = state.config.write(); - *config = serde_json::from_value(initialization_options.clone()) + state.config = serde_json::from_value(initialization_options.clone()) .ok() .unwrap_or_default(); } // Initalizing tracing library based on the user's config - let config = state.config.read(); - if config.logging.level != LevelFilter::OFF { + if state.config.logging.level != LevelFilter::OFF { let tracing_options = TracingSubscriberOptions { - log_level: Some(config.logging.level), + log_level: Some(state.config.logging.level), writer_mode: Some(TracingWriterMode::Stderr), ..Default::default() }; @@ -45,7 +43,7 @@ pub fn handle_initialize( }) } -pub fn handle_document_symbol( +pub async fn handle_document_symbol( state: &ServerState, params: lsp_types::DocumentSymbolParams, ) -> Result> { @@ -54,7 +52,7 @@ pub fn handle_document_symbol( .uri_and_session_from_workspace(¶ms.text_document.uri) { Ok((uri, session)) => { - let _ = session.wait_for_parsing(); + let _ = session.wait_for_parsing().await; Ok(session .symbol_information(&uri) .map(DocumentSymbolResponse::Flat)) @@ -244,7 +242,7 @@ pub fn handle_code_lens( .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) { - Ok((url, session)) => Ok(Some(capabilities::code_lens::code_lens(&session, &url))), + Ok((url, session)) => Ok(Some(capabilities::code_lens::code_lens(session, &url))), Err(err) => { tracing::error!("{}", err.to_string()); Ok(None) @@ -252,7 +250,7 @@ pub fn handle_code_lens( } } -pub fn handle_semantic_tokens_full( +pub async fn handle_semantic_tokens_full( state: &ServerState, params: SemanticTokensParams, ) -> Result> { @@ -261,7 +259,7 @@ pub fn handle_semantic_tokens_full( .uri_and_session_from_workspace(¶ms.text_document.uri) { Ok((uri, session)) => { - let _ = session.wait_for_parsing(); + let _ = session.wait_for_parsing().await; Ok(capabilities::semantic_tokens::semantic_tokens_full( session, &uri, )) @@ -273,7 +271,7 @@ pub fn handle_semantic_tokens_full( } } -pub(crate) fn handle_inlay_hints( +pub(crate) async fn handle_inlay_hints( state: &ServerState, params: InlayHintParams, ) -> Result>> { @@ -282,8 +280,8 @@ pub(crate) fn handle_inlay_hints( .uri_and_session_from_workspace(¶ms.text_document.uri) { Ok((uri, session)) => { - let _ = session.wait_for_parsing(); - let config = &state.config.read().inlay_hints; + let _ = session.wait_for_parsing().await; + let config = &state.config.inlay_hints; Ok(capabilities::inlay_hints::inlay_hints( session, &uri, @@ -337,13 +335,16 @@ pub fn handle_show_ast( // Returns true if the current path matches the path of a submodule let path_is_submodule = |ident: &Ident, path: &Option| -> bool { - let engines = session.engines.read(); - ident.span().source_id().map(|p| engines.se().get_path(p)) == *path + ident + .span() + .source_id() + .map(|p| session.engines.se().get_path(p)) + == *path }; let ast_path = PathBuf::from(params.save_path.path()); { - let program = session.compiled_program.read(); + let program = &session.compiled_program; match params.ast_kind.as_str() { "lexed" => { Ok(program.lexed.as_ref().and_then(|lexed_program| { @@ -377,14 +378,14 @@ pub fn handle_show_ast( // Initialize the string with the AST from the root let mut formatted_ast = debug::print_decl_engine_types( &typed_program.root.all_nodes, - session.engines.read().de(), + session.engines.de(), ); for (ident, submodule) in &typed_program.root.submodules { if path_is_submodule(ident, &path) { // overwrite the root AST with the submodule AST formatted_ast = debug::print_decl_engine_types( &submodule.module.all_nodes, - session.engines.read().de(), + session.engines.de(), ); } } @@ -407,14 +408,14 @@ pub(crate) fn on_enter( state: &ServerState, params: lsp_ext::OnEnterParams, ) -> Result> { - let config = &state.config.read().on_enter; + let config = &state.config.on_enter; match state .sessions .uri_and_session_from_workspace(¶ms.text_document.uri) { Ok((uri, session)) => { // handle on_enter capabilities if they are enabled - Ok(capabilities::on_enter(config, &session, &uri, ¶ms)) + Ok(capabilities::on_enter(config, session, &uri, ¶ms)) } Err(err) => { tracing::error!("{}", err.to_string()); diff --git a/sway-lsp/src/server.rs b/sway-lsp/src/server.rs index 15aa5b635dd..96d83a5074d 100644 --- a/sway-lsp/src/server.rs +++ b/sway-lsp/src/server.rs @@ -18,39 +18,39 @@ use lsp_types::{ }; use tower_lsp::{jsonrpc::Result, LanguageServer}; -#[tower_lsp::async_trait] +#[tower_lsp::async_trait(?Send)] impl LanguageServer for ServerState { - async fn initialize(&self, params: InitializeParams) -> Result { + async fn initialize(&mut self, params: InitializeParams) -> Result { request::handle_initialize(self, params) } - async fn initialized(&self, _: InitializedParams) { + async fn initialized(&mut self, _: InitializedParams) { tracing::info!("Sway Language Server Initialized"); } - async fn shutdown(&self) -> Result<()> { + async fn shutdown(&mut self) -> Result<()> { self.shutdown_server() } - async fn did_open(&self, params: DidOpenTextDocumentParams) { + async fn did_open(&mut self, params: DidOpenTextDocumentParams) { if let Err(err) = notification::handle_did_open_text_document(self, params).await { tracing::error!("{}", err.to_string()); } } - async fn did_change(&self, params: DidChangeTextDocumentParams) { + async fn did_change(&mut self, params: DidChangeTextDocumentParams) { if let Err(err) = notification::handle_did_change_text_document(self, params).await { tracing::error!("{}", err.to_string()); } } - async fn did_save(&self, params: DidSaveTextDocumentParams) { + async fn did_save(&mut self, params: DidSaveTextDocumentParams) { if let Err(err) = notification::handle_did_save_text_document(self, params).await { tracing::error!("{}", err.to_string()); } } - async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + async fn did_change_watched_files(&mut self, params: DidChangeWatchedFilesParams) { notification::handle_did_change_watched_files(self, params); } @@ -74,14 +74,14 @@ impl LanguageServer for ServerState { &self, params: DocumentSymbolParams, ) -> Result> { - request::handle_document_symbol(self, params) + request::handle_document_symbol(self, params).await } async fn semantic_tokens_full( &self, params: SemanticTokensParams, ) -> Result> { - request::handle_semantic_tokens_full(self, params) + request::handle_semantic_tokens_full(self, params).await } async fn document_highlight( @@ -102,7 +102,7 @@ impl LanguageServer for ServerState { request::handle_formatting(self, params) } - async fn rename(&self, params: RenameParams) -> Result> { + async fn rename(&mut self, params: RenameParams) -> Result> { request::handle_rename(self, params) } @@ -114,7 +114,7 @@ impl LanguageServer for ServerState { } async fn inlay_hint(&self, params: InlayHintParams) -> Result>> { - request::handle_inlay_hints(self, params) + request::handle_inlay_hints(self, params).await } } diff --git a/sway-lsp/src/server_state.rs b/sway-lsp/src/server_state.rs index a9d8700be40..896c6e56551 100644 --- a/sway-lsp/src/server_state.rs +++ b/sway-lsp/src/server_state.rs @@ -3,33 +3,31 @@ use crate::{ capabilities::diagnostic::get_diagnostics, config::{Config, Warnings}, - core::session::{self, Session}, + core::session::{self, ParseResult, Session}, error::{DirectoryError, DocumentError, LanguageServerError}, utils::debug, utils::keyword_docs::KeywordDocs, }; -use dashmap::DashMap; use forc_pkg::PackageManifestFile; use lsp_types::{Diagnostic, Url}; -use parking_lot::RwLock; -use std::{path::PathBuf, sync::Arc}; +use std::{collections::HashMap, path::PathBuf, sync::Arc}; use tower_lsp::{jsonrpc, Client}; /// `ServerState` is the primary mutable state of the language server pub struct ServerState { pub(crate) client: Option, - pub(crate) config: Arc>, + pub(crate) config: Arc, pub(crate) keyword_docs: Arc, - pub(crate) sessions: Arc, + pub(crate) sessions: Sessions, } impl Default for ServerState { fn default() -> Self { ServerState { client: None, - config: Arc::new(RwLock::new(Default::default())), + config: Arc::new(Default::default()), keyword_docs: Arc::new(KeywordDocs::new()), - sessions: Arc::new(Sessions(DashMap::new())), + sessions: Sessions(HashMap::new()), } } } @@ -42,87 +40,92 @@ impl ServerState { } } - pub fn shutdown_server(&self) -> jsonrpc::Result<()> { + pub fn shutdown_server(&mut self) -> jsonrpc::Result<()> { tracing::info!("Shutting Down the Sway Language Server"); - let _ = self.sessions.iter().map(|item| { - let session = item.value(); + let _ = self.sessions.iter_mut().map(|(_, session)| { session.shutdown(); }); Ok(()) } +} - pub(crate) fn diagnostics(&self, uri: &Url, session: Arc) -> Vec { - let mut diagnostics_to_publish = vec![]; - let config = &self.config.read(); - let tokens = session.token_map().tokens_for_file(uri); - match config.debug.show_collected_tokens_as_warnings { - // If collected_tokens_as_warnings is Parsed or Typed, - // take over the normal error and warning display behavior - // and instead show the either the parsed or typed tokens as warnings. - // This is useful for debugging the lsp parser. - Warnings::Parsed => { - diagnostics_to_publish = debug::generate_warnings_for_parsed_tokens(tokens) - } - Warnings::Typed => { - diagnostics_to_publish = debug::generate_warnings_for_typed_tokens(tokens) - } - Warnings::Default => { - let diagnostics_map = session.wait_for_parsing(); - if let Some(diagnostics) = diagnostics_map.get(&PathBuf::from(uri.path())) { - if config.diagnostic.show_warnings { - diagnostics_to_publish.extend(diagnostics.warnings.clone()); - } - if config.diagnostic.show_errors { - diagnostics_to_publish.extend(diagnostics.errors.clone()); - } - } - } +pub(crate) async fn diagnostics(config: &Config, uri: &Url, session: &Session) -> Vec { + let mut diagnostics_to_publish = vec![]; + let tokens = session.token_map().tokens_for_file(uri); + match config.debug.show_collected_tokens_as_warnings { + // If collected_tokens_as_warnings is Parsed or Typed, + // take over the normal error and warning display behavior + // and instead show the either the parsed or typed tokens as warnings. + // This is useful for debugging the lsp parser. + Warnings::Parsed => { + diagnostics_to_publish = debug::generate_warnings_for_parsed_tokens(tokens) } - diagnostics_to_publish - } - - pub(crate) async fn parse_project(&self, uri: Url, workspace_uri: Url, session: Arc) { - match run_blocking_parse_project(uri.clone(), session.clone()).await { - Ok(_) => { - // Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec - // in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics. - if let Some(client) = self.client.as_ref() { - client - .publish_diagnostics( - workspace_uri.clone(), - self.diagnostics(&uri, session), - None, - ) - .await; + Warnings::Typed => { + diagnostics_to_publish = debug::generate_warnings_for_typed_tokens(tokens) + } + Warnings::Default => { + let diagnostics_map = session.wait_for_parsing().await; + if let Some(diagnostics) = diagnostics_map.get(&PathBuf::from(uri.path())) { + if config.diagnostic.show_warnings { + diagnostics_to_publish.extend(diagnostics.warnings.clone()); } - } - Err(err) => { - if matches!(err, LanguageServerError::FailedToParse) { - tracing::error!("Error parsing project: {:?}", err); + if config.diagnostic.show_errors { + diagnostics_to_publish.extend(diagnostics.errors.clone()); } } } } + diagnostics_to_publish } -/// Runs parse_project in a blocking thread, because parsing is not async. -async fn run_blocking_parse_project( +pub(crate) async fn publish_diagnostics( + config: &Config, + client: &Option, uri: Url, - session: Arc, -) -> Result<(), LanguageServerError> { + workspace_uri: Url, + session: &Session, +) { + // Note: Even if the computed diagnostics vec is empty, we still have to push the empty Vec + // in order to clear former diagnostics. Newly pushed diagnostics always replace previously pushed diagnostics. + if let Some(client) = client.as_ref() { + client + .publish_diagnostics( + workspace_uri.clone(), + diagnostics(config, &uri, session).await, + None, + ) + .await; + } +} + +pub(crate) async fn parse_project( + uri: Url, + session: &Session, +) -> Result { // Acquire a permit to parse the project. If there are none available, return false. This way, // we avoid publishing the same diagnostics multiple times. + try_acquire_parse_permit(session)?; + + // Lock the diagnostics result to prevent multiple threads from parsing the project at the same time. + let mut diagnostics = session.diagnostics.write().await; + let parse_result = run_blocking_parse_project(uri).await?; + let (errors, warnings) = parse_result.diagnostics.clone(); + *diagnostics = get_diagnostics(&warnings, &errors, parse_result.engines.se()); + Ok(parse_result) +} + +fn try_acquire_parse_permit(session: &Session) -> Result<(), LanguageServerError> { if session.parse_permits.try_acquire().is_err() { return Err(LanguageServerError::UnableToAcquirePermit); } + Ok(()) +} + +/// Runs parse_project in a blocking thread, because parsing is not async. +async fn run_blocking_parse_project(uri: Url) -> Result { tokio::task::spawn_blocking(move || { - // Lock the diagnostics result to prevent multiple threads from parsing the project at the same time. - let mut diagnostics = session.diagnostics.write(); let parse_result = session::parse_project(&uri)?; - let (errors, warnings) = parse_result.diagnostics.clone(); - session.write_parse_result(parse_result); - *diagnostics = get_diagnostics(&warnings, &errors, session.engines.read().se()); - Ok(()) + Ok(parse_result) }) .await .unwrap_or_else(|_| Err(LanguageServerError::FailedToParse)) @@ -130,60 +133,83 @@ async fn run_blocking_parse_project( /// `Sessions` is a collection of [Session]s, each of which represents a project /// that has been opened in the users workspace. -pub(crate) struct Sessions(DashMap>); +pub(crate) struct Sessions(HashMap); impl Sessions { - fn init(&self, uri: &Url) -> Result<(), LanguageServerError> { - let session = Arc::new(Session::new()); + fn init(&mut self, uri: &Url) -> Result<(), LanguageServerError> { + let mut session = Session::new(); let project_name = session.init(uri)?; - self.insert(project_name, session); + self.insert(project_name.clone(), session); Ok(()) } - /// Constructs and returns a tuple of `(Url, Arc)` from a given workspace URI. + /// Constructs and returns a tuple of `(Url, &Session)` from a given workspace URI. /// The returned URL represents the temp directory workspace. pub(crate) fn uri_and_session_from_workspace( &self, workspace_uri: &Url, - ) -> Result<(Url, Arc), LanguageServerError> { + ) -> Result<(Url, &Session), LanguageServerError> { let session = self.url_to_session(workspace_uri)?; let uri = session.sync.workspace_to_temp_url(workspace_uri)?; Ok((uri, session)) } - fn url_to_session(&self, uri: &Url) -> Result, LanguageServerError> { - let path = PathBuf::from(uri.path()); - let manifest = PackageManifestFile::from_dir(&path).map_err(|_| { - DocumentError::ManifestFileNotFound { - dir: path.to_string_lossy().to_string(), - } - })?; + pub(crate) fn uri_and_mut_session_from_workspace( + &mut self, + workspace_uri: &Url, + ) -> Result<(Url, &mut Session), LanguageServerError> { + let session = self.url_to_session_mut(workspace_uri)?; + let uri = session.sync.workspace_to_temp_url(workspace_uri)?; + Ok((uri, session)) + } - // strip Forc.toml from the path to get the manifest directory - let manifest_dir = manifest - .path() - .parent() - .ok_or(DirectoryError::ManifestDirNotFound)? - .to_path_buf(); - - let session = match self.try_get(&manifest_dir).try_unwrap() { - Some(item) => item.value().clone(), - None => { - // If no session can be found, then we need to call init and inserst a new session into the map - self.init(uri)?; - self.try_get(&manifest_dir) - .try_unwrap() - .map(|item| item.value().clone()) - .expect("no session found even though it was just inserted into the map") - } - }; + fn url_to_session(&self, uri: &Url) -> Result<&Session, LanguageServerError> { + let manifest_dir = get_manifest_dir_from_uri(uri)?; + let session = self + .get(&manifest_dir) + .ok_or(LanguageServerError::SessionNotFound)?; + Ok(session) + } + + fn url_to_session_mut(&mut self, uri: &Url) -> Result<&mut Session, LanguageServerError> { + let manifest_dir = get_manifest_dir_from_uri(uri)?; + if self.get(&manifest_dir).is_none() { + // If no session can be found, then we need to call init and insert a new session into the map + self.init(uri)?; + } + let session = self + .get_mut(&manifest_dir) + .expect("no session found even though it was just inserted into the map"); Ok(session) } } impl std::ops::Deref for Sessions { - type Target = DashMap>; + type Target = HashMap; fn deref(&self) -> &Self::Target { &self.0 } } + +impl std::ops::DerefMut for Sessions { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +fn get_manifest_dir_from_uri(uri: &Url) -> Result { + let path = PathBuf::from(uri.path()); + let manifest = + PackageManifestFile::from_dir(&path).map_err(|_| DocumentError::ManifestFileNotFound { + dir: path.to_string_lossy().to_string(), + })?; + + // Strip Forc.toml from the path to get the manifest directory + let manifest_dir = manifest + .path() + .parent() + .ok_or(DirectoryError::ManifestDirNotFound)? + .to_path_buf(); + + Ok(manifest_dir) +} diff --git a/sway-lsp/tests/integration/code_actions.rs b/sway-lsp/tests/integration/code_actions.rs index 30bfaab8523..d4fb58324a6 100644 --- a/sway-lsp/tests/integration/code_actions.rs +++ b/sway-lsp/tests/integration/code_actions.rs @@ -42,7 +42,7 @@ fn create_code_action_params(uri: Url, range: Range) -> CodeActionParams { } } -pub(crate) fn code_action_abi_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_action_abi_request(server: &ServerState, uri: &Url) { let params = create_code_action_params( uri.clone(), Range { @@ -83,7 +83,7 @@ pub(crate) fn code_action_abi_request(server: &ServerState, uri: &Url) { assert_eq!(res.unwrap().unwrap(), expected); } -pub(crate) fn code_action_function_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_action_function_request(server: &ServerState, uri: &Url) { let params = create_code_action_params( uri.clone(), Range { @@ -121,7 +121,7 @@ pub(crate) fn code_action_function_request(server: &ServerState, uri: &Url) { assert_eq!(res.unwrap().unwrap(), expected); } -pub(crate) fn code_action_trait_fn_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_action_trait_fn_request(server: &ServerState, uri: &Url) { let params = create_code_action_params( uri.clone(), Range { @@ -160,7 +160,7 @@ pub(crate) fn code_action_trait_fn_request(server: &ServerState, uri: &Url) { assert_eq!(res.unwrap().unwrap(), expected); } -pub(crate) fn code_action_struct_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_action_struct_request(server: &ServerState, uri: &Url) { let params = create_code_action_params( uri.clone(), Range { @@ -248,7 +248,7 @@ pub(crate) fn code_action_struct_request(server: &ServerState, uri: &Url) { assert_eq!(res.unwrap().unwrap(), expected); } -pub(crate) fn code_action_struct_type_params_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_action_struct_type_params_request(server: &ServerState, uri: &Url) { let params = create_code_action_params( uri.clone(), Range { @@ -340,7 +340,7 @@ pub(crate) fn code_action_struct_type_params_request(server: &ServerState, uri: assert_eq!(res.unwrap().unwrap(), expected); } -pub(crate) fn code_action_struct_existing_impl_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_action_struct_existing_impl_request(server: &ServerState, uri: &Url) { let params = create_code_action_params( uri.clone(), Range { diff --git a/sway-lsp/tests/integration/lsp.rs b/sway-lsp/tests/integration/lsp.rs index 458560313cd..30473082770 100644 --- a/sway-lsp/tests/integration/lsp.rs +++ b/sway-lsp/tests/integration/lsp.rs @@ -141,32 +141,36 @@ pub(crate) async fn show_ast_request( assert_eq!(expected, response.unwrap().unwrap()); } -pub(crate) fn semantic_tokens_request(server: &ServerState, uri: &Url) { +pub(crate) async fn semantic_tokens_request(server: &ServerState, uri: &Url) { let params = SemanticTokensParams { text_document: TextDocumentIdentifier { uri: uri.clone() }, work_done_progress_params: Default::default(), partial_result_params: Default::default(), }; - let response = request::handle_semantic_tokens_full(server, params.clone()).unwrap(); + let response = request::handle_semantic_tokens_full(server, params.clone()) + .await + .unwrap(); eprintln!("{:#?}", response); if let Some(SemanticTokensResult::Tokens(tokens)) = response { assert!(!tokens.data.is_empty()); } } -pub(crate) fn document_symbol_request(server: &ServerState, uri: &Url) { +pub(crate) async fn document_symbol_request(server: &ServerState, uri: &Url) { let params = DocumentSymbolParams { text_document: TextDocumentIdentifier { uri: uri.clone() }, work_done_progress_params: Default::default(), partial_result_params: Default::default(), }; - let response = request::handle_document_symbol(server, params.clone()).unwrap(); + let response = request::handle_document_symbol(server, params.clone()) + .await + .unwrap(); if let Some(DocumentSymbolResponse::Flat(res)) = response { assert!(!res.is_empty()); } } -pub(crate) fn format_request(server: &ServerState, uri: &Url) { +pub(crate) async fn format_request(server: &ServerState, uri: &Url) { let params = DocumentFormattingParams { text_document: TextDocumentIdentifier { uri: uri.clone() }, options: FormattingOptions { @@ -180,7 +184,7 @@ pub(crate) fn format_request(server: &ServerState, uri: &Url) { assert!(!response.unwrap().is_empty()); } -pub(crate) fn highlight_request(server: &ServerState, uri: &Url) { +pub(crate) async fn highlight_request(server: &ServerState, uri: &Url) { let params = DocumentHighlightParams { text_document_position_params: TextDocumentPositionParams { text_document: TextDocumentIdentifier { uri: uri.clone() }, @@ -224,7 +228,7 @@ pub(crate) fn highlight_request(server: &ServerState, uri: &Url) { assert_eq!(expected, response.unwrap()); } -pub(crate) fn code_lens_empty_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_lens_empty_request(server: &ServerState, uri: &Url) { let params = CodeLensParams { text_document: TextDocumentIdentifier { uri: uri.clone() }, work_done_progress_params: Default::default(), @@ -234,7 +238,7 @@ pub(crate) fn code_lens_empty_request(server: &ServerState, uri: &Url) { assert_eq!(response.unwrap().len(), 0); } -pub(crate) fn code_lens_request(server: &ServerState, uri: &Url) { +pub(crate) async fn code_lens_request(server: &ServerState, uri: &Url) { let params = CodeLensParams { text_document: TextDocumentIdentifier { uri: uri.clone() }, work_done_progress_params: Default::default(), @@ -304,7 +308,7 @@ pub(crate) fn code_lens_request(server: &ServerState, uri: &Url) { assert_eq!(expected, response.unwrap()); } -pub(crate) fn completion_request(server: &ServerState, uri: &Url) { +pub(crate) async fn completion_request(server: &ServerState, uri: &Url) { let params = CompletionParams { text_document_position: TextDocumentPositionParams { text_document: TextDocumentIdentifier { uri: uri.clone() }, diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs index 4a796c7a482..4c82b58258a 100644 --- a/sway-lsp/tests/lib.rs +++ b/sway-lsp/tests/lib.rs @@ -42,7 +42,7 @@ pub(crate) struct Rename<'a> { new_name: &'a str, } -async fn open(server: &ServerState, entry_point: PathBuf) -> Url { +async fn open(server: &mut ServerState, entry_point: PathBuf) -> Url { let (uri, sway_program) = load_sway_example(entry_point); let params = DidOpenTextDocumentParams { text_document: TextDocumentItem { @@ -72,18 +72,18 @@ async fn shutdown_and_exit(service: &mut LspService) { #[tokio::test] async fn initialize() { - let server = ServerState::default(); + let mut server = ServerState::default(); let params = InitializeParams { initialization_options: None, ..Default::default() }; - let _ = request::handle_initialize(&server, params); + let _ = request::handle_initialize(&mut server, params); } #[tokio::test] async fn did_open() { - let server = ServerState::default(); - let _ = open(&server, e2e_test_dir().join("src/main.sw")).await; + let mut server = ServerState::default(); + let _ = open(&mut server, e2e_test_dir().join("src/main.sw")).await; let _ = server.shutdown_server(); } @@ -108,17 +108,17 @@ async fn lsp_syncs_with_workspace_edits() { def_end_char: 11, def_path: uri.as_str(), }; - lsp::definition_check(service.inner(), &go_to); + lsp::definition_check(&service.inner(), &go_to); let _ = lsp::did_change_request(&mut service, &uri).await; go_to.def_line = 20; - lsp::definition_check_with_req_offset(service.inner(), &mut go_to, 45, 24); + lsp::definition_check_with_req_offset(&service.inner(), &mut go_to, 45, 24); shutdown_and_exit(&mut service).await; } #[tokio::test] async fn show_ast() { - let server = ServerState::default(); - let uri = open(&server, e2e_test_dir().join("src/main.sw")).await; + let mut server = ServerState::default(); + let uri = open(&mut server, e2e_test_dir().join("src/main.sw")).await; lsp::show_ast_request(&server, &uri, "typed", None).await; let _ = server.shutdown_server(); } @@ -127,8 +127,8 @@ async fn show_ast() { #[tokio::test] async fn go_to_definition() { - let server = ServerState::default(); - let uri = open(&server, doc_comments_dir().join("src/main.sw")).await; + let mut server = ServerState::default(); + let uri = open(&mut server, doc_comments_dir().join("src/main.sw")).await; let go_to = GotoDefinition { req_uri: &uri, req_line: 44, @@ -144,9 +144,9 @@ async fn go_to_definition() { #[tokio::test] async fn go_to_definition_for_fields() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/fields/src/main.sw"), ) .await; @@ -198,9 +198,9 @@ async fn go_to_definition_for_fields() { #[tokio::test] async fn go_to_definition_inside_turbofish() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/turbofish/src/main.sw"), ) .await; @@ -246,9 +246,9 @@ async fn go_to_definition_inside_turbofish() { #[tokio::test] async fn go_to_definition_for_matches() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/matches/src/main.sw"), ) .await; @@ -357,9 +357,9 @@ async fn go_to_definition_for_matches() { #[tokio::test] async fn go_to_definition_for_modules() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/modules/src/lib.sw"), ) .await; @@ -378,9 +378,9 @@ async fn go_to_definition_for_modules() { let _ = server.shutdown_server(); - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/modules/src/test_mod.sw"), ) .await; @@ -402,9 +402,9 @@ async fn go_to_definition_for_modules() { #[tokio::test] async fn go_to_definition_for_paths() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/paths/src/main.sw"), ) .await; @@ -774,9 +774,9 @@ async fn go_to_definition_for_paths() { #[tokio::test] async fn go_to_definition_for_traits() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/traits/src/main.sw"), ) .await; @@ -803,9 +803,9 @@ async fn go_to_definition_for_traits() { #[tokio::test] async fn go_to_definition_for_variables() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/variables/src/main.sw"), ) .await; @@ -894,9 +894,9 @@ async fn go_to_definition_for_variables() { #[tokio::test] async fn go_to_definition_for_consts() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/consts/src/main.sw"), ) .await; @@ -969,9 +969,9 @@ async fn go_to_definition_for_consts() { #[tokio::test] async fn go_to_definition_for_functions() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/functions/src/main.sw"), ) .await; @@ -1019,9 +1019,9 @@ async fn go_to_definition_for_functions() { #[tokio::test] async fn go_to_definition_for_structs() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/structs/src/main.sw"), ) .await; @@ -1072,9 +1072,9 @@ async fn go_to_definition_for_structs() { #[tokio::test] async fn go_to_definition_for_impls() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/impls/src/main.sw"), ) .await; @@ -1109,9 +1109,9 @@ async fn go_to_definition_for_impls() { #[tokio::test] async fn go_to_definition_for_where_clause() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/where_clause/src/main.sw"), ) .await; @@ -1168,9 +1168,9 @@ async fn go_to_definition_for_where_clause() { #[tokio::test] async fn go_to_definition_for_enums() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/enums/src/main.sw"), ) .await; @@ -1211,8 +1211,12 @@ async fn go_to_definition_for_enums() { #[tokio::test] async fn go_to_definition_for_abi() { - let server = ServerState::default(); - let uri = open(&server, test_fixtures_dir().join("tokens/abi/src/main.sw")).await; + let mut server = ServerState::default(); + let uri = open( + &mut server, + test_fixtures_dir().join("tokens/abi/src/main.sw"), + ) + .await; let mut go_to = GotoDefinition { req_uri: &uri, @@ -1236,9 +1240,9 @@ async fn go_to_definition_for_abi() { #[tokio::test] async fn go_to_definition_for_storage() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/storage/src/main.sw"), ) .await; @@ -1326,9 +1330,9 @@ async fn go_to_definition_for_storage() { #[tokio::test] async fn hover_docs_for_consts() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/consts/src/main.sw"), ) .await; @@ -1349,9 +1353,9 @@ async fn hover_docs_for_consts() { #[tokio::test] async fn hover_docs_for_functions() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/functions/src/main.sw"), ) .await; @@ -1368,9 +1372,9 @@ async fn hover_docs_for_functions() { #[tokio::test] async fn hover_docs_for_structs() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/structs/src/main.sw"), ) .await; @@ -1405,9 +1409,9 @@ async fn hover_docs_for_structs() { #[tokio::test] async fn hover_docs_for_enums() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/enums/src/main.sw"), ) .await; @@ -1432,8 +1436,12 @@ async fn hover_docs_for_enums() { #[tokio::test] async fn hover_docs_for_abis() { - let server = ServerState::default(); - let uri = open(&server, test_fixtures_dir().join("tokens/abi/src/main.sw")).await; + let mut server = ServerState::default(); + let uri = open( + &mut server, + test_fixtures_dir().join("tokens/abi/src/main.sw"), + ) + .await; let hover = HoverDocumentation { req_uri: &uri, @@ -1447,9 +1455,9 @@ async fn hover_docs_for_abis() { #[tokio::test] async fn hover_docs_for_variables() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/variables/src/main.sw"), ) .await; @@ -1466,8 +1474,8 @@ async fn hover_docs_for_variables() { #[tokio::test] async fn hover_docs_with_code_examples() { - let server = ServerState::default(); - let uri = open(&server, doc_comments_dir().join("src/main.sw")).await; + let mut server = ServerState::default(); + let uri = open(&mut server, doc_comments_dir().join("src/main.sw")).await; let hover = HoverDocumentation { req_uri: &uri, @@ -1481,8 +1489,12 @@ async fn hover_docs_with_code_examples() { #[tokio::test] async fn hover_docs_for_self_keywords() { - let server = ServerState::default(); - let uri = open(&server, test_fixtures_dir().join("completion/src/main.sw")).await; + let mut server = ServerState::default(); + let uri = open( + &mut server, + test_fixtures_dir().join("completion/src/main.sw"), + ) + .await; let mut hover = HoverDocumentation { req_uri: &uri, @@ -1500,9 +1512,9 @@ async fn hover_docs_for_self_keywords() { #[tokio::test] async fn hover_docs_for_boolean_keywords() { - let server = ServerState::default(); + let mut server = ServerState::default(); let uri = open( - &server, + &mut server, test_fixtures_dir().join("tokens/storage/src/main.sw"), ) .await; @@ -1524,8 +1536,12 @@ async fn hover_docs_for_boolean_keywords() { #[tokio::test] async fn rename() { - let server = ServerState::default(); - let uri = open(&server, test_fixtures_dir().join("renaming/src/main.sw")).await; + let mut server = ServerState::default(); + let uri = open( + &mut server, + test_fixtures_dir().join("renaming/src/main.sw"), + ) + .await; // Struct expression variable let rename = Rename { @@ -1667,11 +1683,11 @@ async fn publish_diagnostics_multi_file() { // The capability argument is an async function. macro_rules! test_lsp_capability { ($entry_point:expr, $capability:expr) => {{ - let server = ServerState::default(); - let uri = open(&server, $entry_point).await; + let mut server = ServerState::default(); + let uri = open(&mut server, $entry_point).await; // Call the specific LSP capability function that was passed in. - let _ = $capability(&server, &uri); + let _ = $capability(&server, &uri).await; let _ = server.shutdown_server(); }}; } @@ -1779,7 +1795,7 @@ async fn write_all_example_asts() { // ordering is required the entries should be explicitly sorted. entries.sort(); - let server = ServerState::default(); + let mut server = ServerState::default(); for entry in entries { let manifest_dir = entry; @@ -1794,7 +1810,7 @@ async fn write_all_example_asts() { Err(_) => continue, } - let uri = open(&server, manifest_dir.join("src/main.sw")).await; + let uri = open(&mut server, manifest_dir.join("src/main.sw")).await; let example_dir = Some(Url::from_file_path(example_dir).unwrap()); lsp::show_ast_request(&server, &uri, "lexed", example_dir.clone()).await; lsp::show_ast_request(&server, &uri, "parsed", example_dir.clone()).await; diff --git a/sway-lsp/tests/utils/Cargo.toml b/sway-lsp/tests/utils/Cargo.toml index 13fd96796ed..678c63b9d63 100644 --- a/sway-lsp/tests/utils/Cargo.toml +++ b/sway-lsp/tests/utils/Cargo.toml @@ -17,4 +17,4 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.60" tokio = { version = "1.3", features = ["io-std", "io-util", "macros", "net", "rt-multi-thread", "sync", "time"] } tower = { version = "0.4.12", default-features = false, features = ["util"] } -tower-lsp = { version = "0.19", features = ["proposed"] } \ No newline at end of file +tower-lsp = { git = "https://github.com/ebkalderon/tower-lsp", branch = "support-mutable-methods" } diff --git a/sway-utils/src/helpers.rs b/sway-utils/src/helpers.rs index 2b3ec244fbb..a51ec5f3309 100644 --- a/sway-utils/src/helpers.rs +++ b/sway-utils/src/helpers.rs @@ -4,9 +4,9 @@ use std::ffi::OsStr; use std::fs; use std::path::{Path, PathBuf}; -pub fn get_sway_files(path: PathBuf) -> Vec { +pub fn get_sway_files(path: &Path) -> Vec { let mut files = vec![]; - let mut dir_entries = vec![path]; + let mut dir_entries = vec![path.to_path_buf()]; while let Some(next_dir) = dir_entries.pop() { if let Ok(read_dir) = fs::read_dir(next_dir) {