Skip to content

Commit

Permalink
Auto merge of #101220 - JohnTitor:rollup-ov7upr7, r=JohnTitor
Browse files Browse the repository at this point in the history
Rollup of 10 pull requests

Successful merges:

 - #100804 (Fix search results color on hover for ayu theme)
 - #100892 (Add `AsFd` implementations for stdio types on WASI.)
 - #100927 (Adding new Fuchsia rustup docs... reworking walkthrough)
 - #101088 (Set DebuginfoKind::Pdb in msvc_base)
 - #101159 (add tracking issue number to const_slice_split_at_not_mut)
 - #101192 (Remove path string)
 - #101193 (Avoid zeroing large stack buffers in stdio on Windows)
 - #101197 (:arrow_up: rust-analyzer)
 - #101200 (Add test for issue #85872)
 - #101219 (Update books)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Aug 31, 2022
2 parents 79ebac7 + 572f5a3 commit 28243a1
Show file tree
Hide file tree
Showing 74 changed files with 2,005 additions and 865 deletions.
81 changes: 54 additions & 27 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

40 changes: 28 additions & 12 deletions crates/base-db/src/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@
use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};

use cfg::CfgOptions;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_hash::FxHashMap;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::SmolStr;
use tt::Subtree;
use vfs::{file_set::FileSet, FileId, VfsPath};
use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath};

/// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a
Expand All @@ -31,22 +32,30 @@ pub struct SourceRoot {
/// Libraries are considered mostly immutable, this assumption is used to
/// optimize salsa's query structure
pub is_library: bool,
pub(crate) file_set: FileSet,
file_set: FileSet,
}

impl SourceRoot {
pub fn new_local(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: false, file_set }
}

pub fn new_library(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: true, file_set }
}

pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
self.file_set.path_for_file(file)
}

pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
self.file_set.file_for_path(path)
}

pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
self.file_set.resolve_path(path)
}

pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
self.file_set.iter()
}
Expand All @@ -72,12 +81,19 @@ impl SourceRoot {
/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
#[derive(Debug, Clone, Default /* Serialize, Deserialize */)]
pub struct CrateGraph {
arena: FxHashMap<CrateId, CrateData>,
arena: NoHashHashMap<CrateId, CrateData>,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct CrateId(pub u32);

impl stdx::hash::NoHashHashable for CrateId {}
impl std::hash::Hash for CrateId {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}

#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CrateName(SmolStr);

Expand Down Expand Up @@ -342,7 +358,7 @@ impl CrateGraph {
// Check if adding a dep from `from` to `to` creates a cycle. To figure
// that out, look for a path in the *opposite* direction, from `to` to
// `from`.
if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
if let Some(path) = self.find_path(&mut NoHashHashSet::default(), dep.crate_id, from) {
let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
let err = CyclicDependenciesError { path };
assert!(err.from().0 == from && err.to().0 == dep.crate_id);
Expand All @@ -365,7 +381,7 @@ impl CrateGraph {
/// including the crate itself.
pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of];
let mut deps = FxHashSet::default();
let mut deps = NoHashHashSet::default();

while let Some(krate) = worklist.pop() {
if !deps.insert(krate) {
Expand All @@ -382,10 +398,10 @@ impl CrateGraph {
/// including the crate itself.
pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of];
let mut rev_deps = FxHashSet::default();
let mut rev_deps = NoHashHashSet::default();
rev_deps.insert(of);

let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
let mut inverted_graph = NoHashHashMap::<_, Vec<_>>::default();
self.arena.iter().for_each(|(&krate, data)| {
data.dependencies
.iter()
Expand All @@ -409,7 +425,7 @@ impl CrateGraph {
/// come before the crate itself).
pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
let mut res = Vec::new();
let mut visited = FxHashSet::default();
let mut visited = NoHashHashSet::default();

for krate in self.arena.keys().copied() {
go(self, &mut visited, &mut res, krate);
Expand All @@ -419,7 +435,7 @@ impl CrateGraph {

fn go(
graph: &CrateGraph,
visited: &mut FxHashSet<CrateId>,
visited: &mut NoHashHashSet<CrateId>,
res: &mut Vec<CrateId>,
source: CrateId,
) {
Expand Down Expand Up @@ -459,7 +475,7 @@ impl CrateGraph {

fn find_path(
&self,
visited: &mut FxHashSet<CrateId>,
visited: &mut NoHashHashSet<CrateId>,
from: CrateId,
to: CrateId,
) -> Option<Vec<CrateId>> {
Expand Down
12 changes: 6 additions & 6 deletions crates/base-db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ pub mod fixture;

use std::{panic, sync::Arc};

use rustc_hash::FxHashSet;
use stdx::hash::NoHashHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};

pub use crate::{
Expand Down Expand Up @@ -58,7 +58,7 @@ pub trait FileLoader {
/// Text of the file.
fn file_text(&self, file_id: FileId) -> Arc<String>;
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>>;
}

/// Database which stores all significant input facts: source code and project
Expand Down Expand Up @@ -94,10 +94,10 @@ pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input]
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;

fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
fn source_root_crates(&self, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>>;
}

fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>> {
let graph = db.crate_graph();
let res = graph
.iter()
Expand All @@ -120,10 +120,10 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
// FIXME: this *somehow* should be platform agnostic...
let source_root = self.0.file_source_root(path.anchor);
let source_root = self.0.source_root(source_root);
source_root.file_set.resolve_path(path)
source_root.resolve_path(path)
}

fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
let _p = profile::span("relevant_crates");
let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root)
Expand Down
10 changes: 6 additions & 4 deletions crates/flycheck/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ pub enum Progress {
DidCheckCrate(String),
DidFinish(io::Result<()>),
DidCancel,
DidFailToRestart(String),
}

enum Restart {
Expand Down Expand Up @@ -193,10 +194,11 @@ impl FlycheckActor {
self.progress(Progress::DidStart);
}
Err(error) => {
tracing::error!(
command = ?self.check_command(),
%error, "failed to restart flycheck"
);
self.progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {:?} error={}",
self.check_command(),
error
)));
}
}
}
Expand Down
Loading

0 comments on commit 28243a1

Please sign in to comment.