Skip to content
This repository was archived by the owner on Dec 29, 2022. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 20 additions & 1 deletion src/actions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ use crate::lsp_data::*;
use crate::project_model::{ProjectModel, RacerFallbackModel, RacerProjectModel};
use crate::server::Output;

use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::io;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
Expand Down Expand Up @@ -143,6 +143,7 @@ pub struct InitActionContext {

previous_build_results: Arc<Mutex<BuildResults>>,
build_queue: BuildQueue,
file_to_crates: Arc<Mutex<HashMap<PathBuf, HashSet<Crate>>>>,
// Keep a record of builds/post-build tasks currently in flight so that
// mutating actions can block until the data is ready.
active_build_count: Arc<AtomicUsize>,
Expand Down Expand Up @@ -212,6 +213,7 @@ impl InitActionContext {
project_model: Arc::default(),
previous_build_results: Arc::default(),
build_queue,
file_to_crates: Arc::default(),
active_build_count: Arc::new(AtomicUsize::new(0)),
shown_cargo_error: Arc::new(AtomicBool::new(false)),
quiescent: Arc::new(AtomicBool::new(false)),
Expand Down Expand Up @@ -288,6 +290,22 @@ impl InitActionContext {
FmtConfig::from(&self.current_project)
}

fn file_edition(&self, file: PathBuf) -> Option<Edition> {
let files_to_crates = self.file_to_crates.lock().unwrap();

let editions: HashSet<_> = files_to_crates
.get(&file)?
.iter()
.map(|c| c.edition)
.collect();

let mut iter = editions.into_iter();
match (iter.next(), iter.next()) {
(ret @ Some(_), None) => ret,
_ => None,
}
}

fn init<O: Output>(&self, init_options: &InitializationOptions, out: &O) {
let current_project = self.current_project.clone();
let config = self.config.clone();
Expand Down Expand Up @@ -318,6 +336,7 @@ impl InitActionContext {
analysis: self.analysis.clone(),
analysis_queue: self.analysis_queue.clone(),
previous_build_results: self.previous_build_results.clone(),
file_to_crates: self.file_to_crates.clone(),
project_path: project_path.to_owned(),
show_warnings: config.show_warnings,
related_information_support: self.client_capabilities.related_information_support,
Expand Down
14 changes: 11 additions & 3 deletions src/actions/post_build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,18 @@
#![allow(missing_docs)]

use std::collections::hash_map::DefaultHasher;
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::hash::{Hash, Hasher};
use std::panic::RefUnwindSafe;
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use std::thread::{self, Thread};
use std::ops::Deref;

use crate::actions::diagnostics::{parse_diagnostics, Diagnostic, ParsedDiagnostics, Suggestion};
use crate::actions::progress::DiagnosticsNotifier;
use crate::build::BuildResult;
use crate::build::{BuildResult, Crate};
use crate::concurrency::JobToken;
use crate::lsp_data::{Range, PublishDiagnosticsParams};

Expand All @@ -41,6 +42,7 @@ pub struct PostBuildHandler {
pub analysis: Arc<AnalysisHost>,
pub analysis_queue: Arc<AnalysisQueue>,
pub previous_build_results: Arc<Mutex<BuildResults>>,
pub file_to_crates: Arc<Mutex<HashMap<PathBuf, HashSet<Crate>>>>,
pub project_path: PathBuf,
pub show_warnings: bool,
pub use_black_list: bool,
Expand All @@ -55,14 +57,20 @@ pub struct PostBuildHandler {
impl PostBuildHandler {
pub fn handle(self, result: BuildResult) {
match result {
BuildResult::Success(cwd, messages, new_analysis, _) => {
BuildResult::Success(cwd, messages, new_analysis, input_files, _) => {
trace!("build - Success");
self.notifier.notify_begin_diagnostics();

// Emit appropriate diagnostics using the ones from build.
self.handle_messages(&cwd, &messages);
let analysis_queue = self.analysis_queue.clone();

{
let mut files_to_crates = self.file_to_crates.lock().unwrap();
*files_to_crates = input_files;
trace!("Files to crates: {:#?}", files_to_crates.deref());
}

let job = Job::new(self, new_analysis, cwd);
analysis_queue.enqueue(job);
}
Expand Down
25 changes: 23 additions & 2 deletions src/actions/requests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@

use crate::actions::InitActionContext;
use itertools::Itertools;
use log::{debug, trace};
use log::{debug, trace, warn};
use racer;
use rls_data as data;
use rls_span as span;
use rls_vfs::FileContents;
use rustfmt_nightly::{FileLines, FileName, Range as RustfmtRange};
use rustfmt_nightly::{Edition as RustfmtEdition, FileLines, FileName, Range as RustfmtRange};
use serde_derive::{Deserialize, Serialize};
use serde_json;
use url::Url;
Expand All @@ -26,6 +26,7 @@ use crate::actions::hover;
use crate::actions::run::collect_run_actions;
use crate::actions::work_pool;
use crate::actions::work_pool::WorkDescription;
use crate::build::Edition;
use crate::lsp_data;
use crate::lsp_data::*;
use crate::server;
Expand Down Expand Up @@ -754,6 +755,26 @@ fn reformat(
if !config.was_set().tab_spaces() {
config.set().tab_spaces(opts.tab_size as usize);
}
if !config.was_set().edition() {
match ctx.file_edition(path.clone()) {
Some(edition) => {
let edition = match edition {
Edition::Edition2015 => RustfmtEdition::Edition2015,
Edition::Edition2018 => RustfmtEdition::Edition2018,
};
config.set().edition(edition);
trace!("Detected edition {:?} for file `{}`", edition, path.display());
},
None => {
warn!("Reformat failed: ambiguous edition for `{}`", path.display());

return Err(ResponseError::Message(
ErrorCode::InternalError,
"Reformat failed to complete successfully".into(),
));
}
}
}

if let Some(r) = selection {
let range_of_rls = ls_util::range_to_rls(r).one_indexed();
Expand Down
50 changes: 39 additions & 11 deletions src/build/cargo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use serde_json;
use crate::actions::progress::ProgressUpdate;
use crate::build::cargo_plan::CargoPlan;
use crate::build::environment::{self, Environment, EnvironmentLock};
use crate::build::plan::BuildPlan;
use crate::build::plan::{BuildPlan, Crate};
use crate::build::{BufWriter, BuildResult, CompilationContext, Internals, PackageArg};
use crate::config::Config;
use crate::lsp_data::{Position, Range};
Expand Down Expand Up @@ -58,6 +58,8 @@ pub(super) fn cargo(
let diagnostics_clone = diagnostics.clone();
let analysis = Arc::new(Mutex::new(vec![]));
let analysis_clone = analysis.clone();
let input_files = Arc::new(Mutex::new(HashMap::new()));
let input_files_clone = input_files.clone();
let out = Arc::new(Mutex::new(vec![]));
let out_clone = out.clone();

Expand All @@ -74,6 +76,7 @@ pub(super) fn cargo(
env_lock,
diagnostics,
analysis,
input_files,
out,
progress_sender,
)
Expand All @@ -93,7 +96,11 @@ pub(super) fn cargo(
.unwrap()
.into_inner()
.unwrap();
BuildResult::Success(cwd.clone(), diagnostics, analysis, true)
let input_files = Arc::try_unwrap(input_files_clone)
.unwrap()
.into_inner()
.unwrap();
BuildResult::Success(cwd.clone(), diagnostics, analysis, input_files, true)
}
Err(error) => {
let stdout = String::from_utf8(out_clone.lock().unwrap().to_owned()).unwrap();
Expand Down Expand Up @@ -123,6 +130,7 @@ fn run_cargo(
env_lock: Arc<EnvironmentLock>,
compiler_messages: Arc<Mutex<Vec<String>>>,
analysis: Arc<Mutex<Vec<Analysis>>>,
input_files: Arc<Mutex<HashMap<PathBuf, HashSet<Crate>>>>,
out: Arc<Mutex<Vec<u8>>>,
progress_sender: Sender<ProgressUpdate>,
) -> Result<PathBuf, failure::Error> {
Expand Down Expand Up @@ -163,6 +171,7 @@ fn run_cargo(
vfs,
compiler_messages,
analysis,
input_files,
progress_sender,
inner_lock,
restore_env,
Expand All @@ -180,6 +189,7 @@ fn run_cargo_ws(
vfs: Arc<Vfs>,
compiler_messages: Arc<Mutex<Vec<String>>>,
analysis: Arc<Mutex<Vec<Analysis>>>,
input_files: Arc<Mutex<HashMap<PathBuf, HashSet<Crate>>>>,
progress_sender: Sender<ProgressUpdate>,
inner_lock: environment::InnerLock,
mut restore_env: Environment<'_>,
Expand Down Expand Up @@ -275,6 +285,7 @@ fn run_cargo_ws(
vfs,
compiler_messages,
analysis,
input_files,
progress_sender,
reached_primary.clone(),
);
Expand Down Expand Up @@ -321,6 +332,7 @@ struct RlsExecutor {
/// Packages which are directly a member of the workspace, for which
/// analysis and diagnostics will be provided
member_packages: Mutex<HashSet<PackageId>>,
input_files: Arc<Mutex<HashMap<PathBuf, HashSet<Crate>>>>,
/// JSON compiler messages emitted for each primary compiled crate
compiler_messages: Arc<Mutex<Vec<String>>>,
progress_sender: Mutex<Sender<ProgressUpdate>>,
Expand All @@ -341,6 +353,7 @@ impl RlsExecutor {
vfs: Arc<Vfs>,
compiler_messages: Arc<Mutex<Vec<String>>>,
analysis: Arc<Mutex<Vec<Analysis>>>,
input_files: Arc<Mutex<HashMap<PathBuf, HashSet<Crate>>>>,
progress_sender: Sender<ProgressUpdate>,
reached_primary: Arc<AtomicBool>,
) -> RlsExecutor {
Expand All @@ -352,6 +365,7 @@ impl RlsExecutor {
env_lock,
vfs,
analysis,
input_files,
member_packages: Mutex::new(member_packages),
compiler_messages,
progress_sender: Mutex::new(progress_sender),
Expand Down Expand Up @@ -559,18 +573,32 @@ impl Executor for RlsExecutor {
cx.build_dir.clone().unwrap()
};

if let BuildResult::Success(_, mut messages, mut analysis, success) = super::rustc::rustc(
&self.vfs,
&args,
&envs,
cargo_cmd.get_cwd(),
&build_dir,
Arc::clone(&self.config),
&self.env_lock.as_facade(),
) {
if let BuildResult::Success(_, mut messages, mut analysis, input_files, success) =
super::rustc::rustc(
&self.vfs,
&args,
&envs,
cargo_cmd.get_cwd(),
&build_dir,
Arc::clone(&self.config),
&self.env_lock.as_facade(),
) {
self.compiler_messages.lock().unwrap().append(&mut messages);
self.analysis.lock().unwrap().append(&mut analysis);

// Cache calculated input files for a given rustc invocation
{
let mut cx = self.compilation_cx.lock().unwrap();
let plan = cx.build_plan.as_cargo_mut().unwrap();
let input_files = input_files.keys().cloned().collect();
plan.cache_input_files(id, target, mode, input_files, cargo_cmd.get_cwd());
}

let mut self_input_files = self.input_files.lock().unwrap();
for (file, inputs) in input_files {
self_input_files.entry(file).or_default().extend(inputs);
}

if !success {
return Err(format_err!("Build error"));
}
Expand Down
38 changes: 38 additions & 0 deletions src/build/cargo_plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ use url::Url;

use crate::build::PackageArg;
use crate::build::plan::{BuildKey, BuildGraph, JobQueue, WorkStatus};
use crate::build::rustc::src_path;
use crate::lsp_data::parse_file_path;

/// Main key type by which `Unit`s will be distinguished in the build plan.
Expand All @@ -58,6 +59,9 @@ crate struct CargoPlan {
crate rev_dep_graph: HashMap<UnitKey, HashSet<UnitKey>>,
/// Cached compiler calls used when creating a compiler call queue.
crate compiler_jobs: HashMap<UnitKey, ProcessBuilder>,
/// Calculated input files that unit depend on.
crate input_files: HashMap<UnitKey, Vec<PathBuf>>,
crate file_key_mapping: HashMap<PathBuf, HashSet<UnitKey>>,
// An object for finding the package which a file belongs to and this inferring
// a package argument.
package_map: Option<PackageMap>,
Expand Down Expand Up @@ -101,6 +105,40 @@ impl CargoPlan {
self.compiler_jobs.insert(unit_key, cmd.clone());
}

crate fn cache_input_files(
&mut self,
id: &PackageId,
target: &Target,
mode: CompileMode,
input_files: Vec<PathBuf>,
cwd: Option<&Path>,
) {
let input_files: Vec<_> = input_files
.iter()
.filter_map(|file| src_path(cwd, file))
.filter_map(|file| match std::fs::canonicalize(&file) {
Ok(file) => Some(file),
Err(err) => {
error!("Couldn't canonicalize `{}`: {}", file.display(), err);
None
}
})
.collect();

let unit_key = (id.clone(), target.clone(), mode);
trace!("Caching these files: {:#?} for {:?} key", &input_files, &unit_key);

// Create reverse file -> unit mapping (to be used for dirty unit calculation)
for file in &input_files {
self.file_key_mapping
.entry(file.to_path_buf())
.or_default()
.insert(unit_key.clone());
}

self.input_files.insert(unit_key, input_files);
}

/// Emplace a given `Unit`, along with its `Unit` dependencies (recursively)
/// into the dependency graph as long as the passed `Unit` isn't filtered
/// out by the `filter` closure.
Expand Down
13 changes: 2 additions & 11 deletions src/build/external.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ use std::process::{Command, Stdio};

use crate::build::BuildResult;
use crate::build::plan::{BuildKey, BuildGraph, JobQueue, WorkStatus};
use crate::build::rustc::src_path;

use cargo::util::{process, ProcessBuilder};
use log::trace;
Expand Down Expand Up @@ -96,7 +97,7 @@ pub(super) fn build_with_external_cmd<S: AsRef<str>>(
};

let plan = plan_from_analysis(&analyses, &build_dir);
(BuildResult::Success(build_dir, vec![], analyses, false), plan)
(BuildResult::Success(build_dir, vec![], analyses, HashMap::default(), false), plan)
}

/// Reads and deserializes given save-analysis JSON files into corresponding
Expand Down Expand Up @@ -453,16 +454,6 @@ fn guess_rustc_src_path(build_dir: &Path, cmd: &ProcessBuilder) -> Option<PathBu
src_path(cwd, file)
}

fn src_path(cwd: Option<&Path>, path: impl AsRef<Path>) -> Option<PathBuf> {
let path = path.as_ref();

Some(match (cwd, path.is_absolute()) {
(_, true) => path.to_owned(),
(Some(cwd), _) => cwd.join(path),
(None, _) => std::env::current_dir().ok()?.join(path)
})
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
Loading