From dbf04a5ee29101afbd1db665369bb1d21224efb5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 20 Mar 2023 08:31:01 +0200 Subject: [PATCH] :arrow_up: rust-analyzer --- Cargo.lock | 16 +- bench_data/numerous_macro_rules | 4 +- crates/hir-def/src/adt.rs | 19 +- crates/hir-def/src/body.rs | 42 +- crates/hir-def/src/body/lower.rs | 2 + crates/hir-def/src/body/pretty.rs | 10 +- crates/hir-def/src/data.rs | 33 +- crates/hir-def/src/db.rs | 4 +- crates/hir-def/src/expr.rs | 1 + crates/hir-def/src/macro_expansion_tests.rs | 2 +- crates/hir-def/src/nameres.rs | 14 +- crates/hir-def/src/nameres/collector.rs | 12 +- crates/hir-def/src/test_db.rs | 8 +- crates/hir-def/src/visibility.rs | 28 +- crates/hir-expand/src/attrs.rs | 12 +- crates/hir-expand/src/builtin_attr_macro.rs | 8 +- crates/hir-expand/src/builtin_derive_macro.rs | 28 +- crates/hir-expand/src/builtin_fn_macro.rs | 56 +-- crates/hir-expand/src/db.rs | 51 +-- crates/hir-expand/src/eager.rs | 8 +- crates/hir-expand/src/fixup.rs | 3 +- crates/hir-expand/src/hygiene.rs | 18 +- crates/hir-expand/src/lib.rs | 64 +-- crates/hir-expand/src/mod_path.rs | 10 +- crates/hir-expand/src/proc_macro.rs | 19 +- crates/hir-ty/Cargo.toml | 8 +- crates/hir-ty/src/chalk_ext.rs | 15 +- crates/hir-ty/src/diagnostics.rs | 6 + crates/hir-ty/src/infer/expr.rs | 73 ++-- crates/hir-ty/src/infer/pat.rs | 37 +- crates/hir-ty/src/method_resolution.rs | 92 ++++- crates/hir-ty/src/mir/lower.rs | 20 +- crates/hir-ty/src/test_db.rs | 8 +- crates/hir-ty/src/tests.rs | 2 +- crates/hir-ty/src/tests/method_resolution.rs | 11 +- crates/hir-ty/src/tests/regression.rs | 32 ++ crates/hir-ty/src/tests/simple.rs | 60 +-- crates/hir-ty/src/tests/traits.rs | 71 +++- crates/hir/src/db.rs | 2 +- crates/hir/src/diagnostics.rs | 5 +- crates/hir/src/lib.rs | 35 +- crates/hir/src/semantics.rs | 8 +- crates/hir/src/source_analyzer.rs | 7 +- .../src/handlers/generate_function.rs | 2 +- .../ide-assists/src/handlers/inline_call.rs | 40 +- crates/ide-assists/src/handlers/remove_dbg.rs | 2 +- crates/ide-assists/src/lib.rs | 5 +- crates/ide-completion/src/completions.rs | 19 +- crates/ide-completion/src/completions/dot.rs | 6 +- crates/ide-completion/src/context.rs | 2 + crates/ide-completion/src/context/analysis.rs | 62 ++- crates/ide-completion/src/context/tests.rs | 12 + crates/ide-completion/src/render/pattern.rs | 26 +- crates/ide-completion/src/tests/pattern.rs | 1 + crates/ide-completion/src/tests/record.rs | 60 +++ crates/ide-completion/src/tests/special.rs | 1 + crates/ide-db/src/apply_change.rs | 2 +- crates/ide-db/src/defs.rs | 9 +- crates/ide-db/src/lib.rs | 8 +- .../src/handlers/incoherent_impl.rs | 77 ++++ .../src/handlers/incorrect_case.rs | 2 +- .../src/handlers/missing_fields.rs | 2 +- .../src/handlers/missing_unsafe.rs | 381 +++++++++++++++++- .../src/handlers/mutability_errors.rs | 24 ++ .../src/handlers/no_such_field.rs | 2 +- .../src/handlers/private_field.rs | 20 + .../replace_filter_map_next_with_find_map.rs | 2 +- .../src/handlers/type_mismatch.rs | 2 +- .../src/handlers/unresolved_field.rs | 2 +- .../src/handlers/unresolved_method.rs | 2 +- .../src/handlers/unresolved_module.rs | 2 +- crates/ide-diagnostics/src/lib.rs | 2 + crates/ide/src/goto_implementation.rs | 1 + crates/ide/src/goto_type_definition.rs | 2 +- crates/ide/src/inlay_hints/adjustment.rs | 46 ++- crates/ide/src/inlay_hints/chaining.rs | 12 +- crates/ide/src/signature_help.rs | 263 +++++++++++- crates/parser/src/grammar/patterns.rs | 15 +- crates/project-model/src/build_scripts.rs | 5 +- crates/project-model/src/cargo_workspace.rs | 6 +- crates/project-model/src/lib.rs | 2 +- crates/project-model/src/tests.rs | 6 +- crates/project-model/src/workspace.rs | 224 +++++----- .../rust-analyzer/src/cli/analysis_stats.rs | 6 +- crates/rust-analyzer/src/cli/diagnostics.rs | 4 +- crates/rust-analyzer/src/cli/lsif.rs | 4 +- crates/rust-analyzer/src/cli/scip.rs | 4 +- crates/rust-analyzer/src/cli/ssr.rs | 4 +- crates/rust-analyzer/src/config.rs | 20 +- crates/rust-analyzer/src/dispatch.rs | 36 ++ crates/rust-analyzer/src/handlers.rs | 12 +- crates/rust-analyzer/src/lsp_utils.rs | 40 +- crates/rust-analyzer/src/main_loop.rs | 19 +- crates/rust-analyzer/src/reload.rs | 41 +- crates/syntax/src/ast/expr_ext.rs | 43 +- crates/syntax/src/ast/node_ext.rs | 6 - crates/test-utils/src/minicore.rs | 2 + editors/code/package.json | 16 + editors/code/src/client.ts | 13 +- editors/code/src/commands.ts | 29 +- editors/code/src/config.ts | 29 +- editors/code/src/ctx.ts | 69 +++- editors/code/src/lsp_ext.ts | 1 + editors/code/src/main.ts | 1 + editors/code/src/rust_project.ts | 91 +++++ editors/code/src/util.ts | 17 + 106 files changed, 2220 insertions(+), 610 deletions(-) create mode 100644 crates/ide-diagnostics/src/handlers/incoherent_impl.rs create mode 100644 editors/code/src/rust_project.ts diff --git a/Cargo.lock b/Cargo.lock index fc77515b63bfd..25242c6028a47 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -169,9 +169,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4df80a3fbc1f0e59f560eeeebca94bf655566a8ad3023c210a109deb6056455a" +checksum = "ea176c50987dc4765961aa165001e8eb5a722a26308c5797a47303ea91686aab" dependencies = [ "proc-macro2", "quote", @@ -181,9 +181,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f39e5272016916956298cceea5147006f897972c274a768ed4d6e074efe5d3fb" +checksum = "473b480241695428c14e8f84f1c9a47ef232450a50faf3a4041e5c9dc11e0a3b" dependencies = [ "bitflags", "chalk-derive", @@ -192,9 +192,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9d60b42ad7478d3e027e2f9ea4e99fbbb8fdee0c8c3cf068be269f57e603618" +checksum = "6764b4fe67cac3a3758185084efbfbd39bf0352795824ba849ddd2b64cd4bb28" dependencies = [ "chalk-derive", "chalk-ir", @@ -205,9 +205,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab30620ea5b36819525eaab2204f4b8e1842fc7ee36826424a28bef59ae7fecf" +checksum = "55a7e6160966eceb6e7dcc2f479a2af4c477aaf5bccbc640d82515995ab1a6cc" dependencies = [ "chalk-derive", "chalk-ir", diff --git a/bench_data/numerous_macro_rules b/bench_data/numerous_macro_rules index bf89ed594f707..7610a3ae1e3cb 100644 --- a/bench_data/numerous_macro_rules +++ b/bench_data/numerous_macro_rules @@ -341,8 +341,8 @@ macro_rules! __ra_macro_fixture339 {($name : ident )=>{ impl Clone for $name macro_rules! __ra_macro_fixture340 {([$($stack : tt )*])=>{$($stack )* }; ([$($stack : tt )*]{$($tail : tt )* })=>{$($stack )* { remove_sections_inner ! ([]$($tail )*); }}; ([$($stack : tt )*]$t : tt $($tail : tt )*)=>{ remove_sections ! ([$($stack )* $t ]$($tail )*); }; } macro_rules! __ra_macro_fixture341 {($t : ty ,$z : expr )=>{ impl Zero for $t { fn zero ()-> Self {$z as $t } fn is_zero (& self )-> bool { self == & Self :: zero ()}}}; } macro_rules! __ra_macro_fixture342 {($($ident : ident ),* $(,)?)=>{$(# [ allow ( bad_style )] pub const $ident : super :: Name = super :: Name :: new_inline ( stringify ! ($ident )); )* }; } -macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; } -macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn AstDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn AstDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; } +macro_rules! __ra_macro_fixture343 {($($trait : ident =>$expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinDeriveExpander {$($trait ),* } impl BuiltinDeriveExpander { pub fn expand (& self , db : & dyn ExpandDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> Result < tt :: Subtree , mbe :: ExpandError > { let expander = match * self {$(BuiltinDeriveExpander ::$trait =>$expand , )* }; expander ( db , id , tt )} fn find_by_name ( name : & name :: Name )-> Option < Self > { match name {$(id if id == & name :: name ! [$trait ]=> Some ( BuiltinDeriveExpander ::$trait ), )* _ => None , }}}}; } +macro_rules! __ra_macro_fixture344 {( LAZY : $(($name : ident , $kind : ident )=>$expand : ident ),* , EAGER : $(($e_name : ident , $e_kind : ident )=>$e_expand : ident ),* )=>{# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum BuiltinFnLikeExpander {$($kind ),* }# [ derive ( Debug , Clone , Copy , PartialEq , Eq , Hash )] pub enum EagerExpander {$($e_kind ),* } impl BuiltinFnLikeExpander { pub fn expand (& self , db : & dyn ExpandDatabase , id : LazyMacroId , tt : & tt :: Subtree , )-> ExpandResult < tt :: Subtree > { let expander = match * self {$(BuiltinFnLikeExpander ::$kind =>$expand , )* }; expander ( db , id , tt )}} impl EagerExpander { pub fn expand (& self , db : & dyn ExpandDatabase , arg_id : EagerMacroId , tt : & tt :: Subtree , )-> ExpandResult < Option < ( tt :: Subtree , FragmentKind )>> { let expander = match * self {$(EagerExpander ::$e_kind =>$e_expand , )* }; expander ( db , arg_id , tt )}} fn find_by_name ( ident : & name :: Name )-> Option < Either < BuiltinFnLikeExpander , EagerExpander >> { match ident {$(id if id == & name :: name ! [$name ]=> Some ( Either :: Left ( BuiltinFnLikeExpander ::$kind )), )* $(id if id == & name :: name ! [$e_name ]=> Some ( Either :: Right ( EagerExpander ::$e_kind )), )* _ => return None , }}}; } macro_rules! __ra_macro_fixture345 {($($ty : ty =>$this : ident $im : block );*)=>{$(impl ToTokenTree for $ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . into (); leaf . into ()}} impl ToTokenTree for &$ty { fn to_token ($this )-> tt :: TokenTree { let leaf : tt :: Leaf = $im . clone (). into (); leaf . into ()}})* }} macro_rules! __ra_macro_fixture346 {($name : ident )=>{ impl $crate :: salsa :: InternKey for $name { fn from_intern_id ( v : $crate :: salsa :: InternId )-> Self {$name ( v )} fn as_intern_id (& self )-> $crate :: salsa :: InternId { self . 0 }}}; } macro_rules! __ra_macro_fixture347 {($($var : ident ($t : ty )),+ )=>{$(impl From <$t > for AttrOwner { fn from ( t : $t )-> AttrOwner { AttrOwner ::$var ( t )}})+ }; } diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index 9bc1c54a3c641..b336f59ffee31 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -40,6 +40,7 @@ pub struct StructData { pub repr: Option, pub visibility: RawVisibility, pub rustc_has_incoherent_inherent_impls: bool, + pub fundamental: bool, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -173,10 +174,10 @@ impl StructData { let item_tree = loc.id.item_tree(db); let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); - let rustc_has_incoherent_inherent_impls = item_tree - .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) - .by_key("rustc_has_incoherent_inherent_impls") - .exists(); + let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()); + let rustc_has_incoherent_inherent_impls = + attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); + let fundamental = attrs.by_key("fundamental").exists(); let strukt = &item_tree[loc.id.value]; let (variant_data, diagnostics) = lower_fields( @@ -196,6 +197,7 @@ impl StructData { repr, visibility: item_tree[strukt.visibility].clone(), rustc_has_incoherent_inherent_impls, + fundamental, }), diagnostics.into(), ) @@ -215,10 +217,10 @@ impl StructData { let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); - let rustc_has_incoherent_inherent_impls = item_tree - .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) - .by_key("rustc_has_incoherent_inherent_impls") - .exists(); + let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()); + let rustc_has_incoherent_inherent_impls = + attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); + let fundamental = attrs.by_key("fundamental").exists(); let union = &item_tree[loc.id.value]; let (variant_data, diagnostics) = lower_fields( @@ -238,6 +240,7 @@ impl StructData { repr, visibility: item_tree[union.visibility].clone(), rustc_has_incoherent_inherent_impls, + fundamental, }), diagnostics.into(), ) diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 3be477d48774a..b70e658efd79c 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -24,7 +24,9 @@ use syntax::{ast, AstPtr, SyntaxNode, SyntaxNodePtr}; use crate::{ attr::Attrs, db::DefDatabase, - expr::{dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId}, + expr::{ + dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId, RecordFieldPat, + }, item_scope::BuiltinShadowMode, macro_id_to_def_id, nameres::DefMap, @@ -432,6 +434,44 @@ impl Body { pats.shrink_to_fit(); bindings.shrink_to_fit(); } + + pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) { + self.walk_pats(pat_id, &mut |pat| { + if let Pat::Bind { id, .. } = pat { + f(*id); + } + }); + } + + pub fn walk_pats(&self, pat_id: PatId, f: &mut impl FnMut(&Pat)) { + let pat = &self[pat_id]; + f(pat); + match pat { + Pat::Range { .. } + | Pat::Lit(..) + | Pat::Path(..) + | Pat::ConstBlock(..) + | Pat::Wild + | Pat::Missing => {} + &Pat::Bind { subpat, .. } => { + if let Some(subpat) = subpat { + self.walk_pats(subpat, f); + } + } + Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => { + args.iter().copied().for_each(|p| self.walk_pats(p, f)); + } + Pat::Ref { pat, .. } => self.walk_pats(*pat, f), + Pat::Slice { prefix, slice, suffix } => { + let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter()); + total_iter.copied().for_each(|p| self.walk_pats(p, f)); + } + Pat::Record { args, .. } => { + args.iter().for_each(|RecordFieldPat { pat, .. }| self.walk_pats(*pat, f)); + } + Pat::Box { inner } => self.walk_pats(*inner, f), + } + } } impl Default for Body { diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 83ce9b6acbba8..fedaf39559858 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -499,6 +499,8 @@ impl ExprCollector<'_> { Movability::Movable }; ClosureKind::Generator(movability) + } else if e.async_token().is_some() { + ClosureKind::Async } else { ClosureKind::Closure }; diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index f8b159797e44a..5a9b825a2530b 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -360,8 +360,14 @@ impl<'a> Printer<'a> { w!(self, "]"); } Expr::Closure { args, arg_types, ret_type, body, closure_kind } => { - if let ClosureKind::Generator(Movability::Static) = closure_kind { - w!(self, "static "); + match closure_kind { + ClosureKind::Generator(Movability::Static) => { + w!(self, "static "); + } + ClosureKind::Async => { + w!(self, "async "); + } + _ => (), } w!(self, "|"); for (i, (pat, ty)) in args.iter().zip(arg_types.iter()).enumerate() { diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index ee6e269fe5586..1633a33bedde8 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -35,6 +35,7 @@ pub struct FunctionData { pub visibility: RawVisibility, pub abi: Option>, pub legacy_const_generics_indices: Box<[u32]>, + pub rustc_allow_incoherent_impl: bool, flags: FnFlags, } @@ -84,13 +85,14 @@ impl FunctionData { } } - let legacy_const_generics_indices = item_tree - .attrs(db, krate, ModItem::from(loc.id.value).into()) + let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()); + let legacy_const_generics_indices = attrs .by_key("rustc_legacy_const_generics") .tt_values() .next() .map(parse_rustc_legacy_const_generics) .unwrap_or_default(); + let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists(); Arc::new(FunctionData { name: func.name.clone(), @@ -108,6 +110,7 @@ impl FunctionData { abi: func.abi.clone(), legacy_const_generics_indices, flags, + rustc_allow_incoherent_impl, }) } @@ -171,6 +174,7 @@ pub struct TypeAliasData { pub visibility: RawVisibility, pub is_extern: bool, pub rustc_has_incoherent_inherent_impls: bool, + pub rustc_allow_incoherent_impl: bool, /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). pub bounds: Vec>, } @@ -189,10 +193,14 @@ impl TypeAliasData { item_tree[typ.visibility].clone() }; - let rustc_has_incoherent_inherent_impls = item_tree - .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into()) - .by_key("rustc_has_incoherent_inherent_impls") - .exists(); + let attrs = item_tree.attrs( + db, + loc.container.module(db).krate(), + ModItem::from(loc.id.value).into(), + ); + let rustc_has_incoherent_inherent_impls = + attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); + let rustc_allow_incoherent_impl = attrs.by_key("rustc_allow_incoherent_impl").exists(); Arc::new(TypeAliasData { name: typ.name.clone(), @@ -200,6 +208,7 @@ impl TypeAliasData { visibility, is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), rustc_has_incoherent_inherent_impls, + rustc_allow_incoherent_impl, bounds: typ.bounds.to_vec(), }) } @@ -212,11 +221,12 @@ pub struct TraitData { pub is_auto: bool, pub is_unsafe: bool, pub rustc_has_incoherent_inherent_impls: bool, + pub skip_array_during_method_dispatch: bool, + pub fundamental: bool, pub visibility: RawVisibility, /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore /// method calls to this trait's methods when the receiver is an array and the crate edition is /// 2015 or 2018. - pub skip_array_during_method_dispatch: bool, // box it as the vec is usually empty anyways pub attribute_calls: Option, MacroCallId)>>>, } @@ -245,6 +255,7 @@ impl TraitData { attrs.by_key("rustc_skip_array_during_method_dispatch").exists(); let rustc_has_incoherent_inherent_impls = attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); + let fundamental = attrs.by_key("fundamental").exists(); let mut collector = AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr)); collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items); @@ -260,6 +271,7 @@ impl TraitData { visibility, skip_array_during_method_dispatch, rustc_has_incoherent_inherent_impls, + fundamental, }), diagnostics.into(), ) @@ -450,6 +462,7 @@ pub struct ConstData { pub name: Option, pub type_ref: Interned, pub visibility: RawVisibility, + pub rustc_allow_incoherent_impl: bool, } impl ConstData { @@ -463,10 +476,16 @@ impl ConstData { item_tree[konst.visibility].clone() }; + let rustc_allow_incoherent_impl = item_tree + .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into()) + .by_key("rustc_allow_incoherent_impl") + .exists(); + Arc::new(ConstData { name: konst.name.clone(), type_ref: konst.type_ref.clone(), visibility, + rustc_allow_incoherent_impl, }) } } diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 270cfa06e5815..9371fc14dd8a0 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use either::Either; -use hir_expand::{db::AstDatabase, HirFileId}; +use hir_expand::{db::ExpandDatabase, HirFileId}; use intern::Interned; use la_arena::ArenaMap; use syntax::{ast, AstPtr}; @@ -64,7 +64,7 @@ pub trait InternDatabase: SourceDatabase { } #[salsa::query_group(DefDatabaseStorage)] -pub trait DefDatabase: InternDatabase + AstDatabase + Upcast { +pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast { #[salsa::input] fn enable_proc_attr_macros(&self) -> bool; diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index bbea608c55eb5..19fa6b25419e1 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -245,6 +245,7 @@ pub enum Expr { pub enum ClosureKind { Closure, Generator(Movability), + Async, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/crates/hir-def/src/macro_expansion_tests.rs b/crates/hir-def/src/macro_expansion_tests.rs index 5ab90d92d9bd1..314bf22b95ee7 100644 --- a/crates/hir-def/src/macro_expansion_tests.rs +++ b/crates/hir-def/src/macro_expansion_tests.rs @@ -20,7 +20,7 @@ use ::mbe::TokenMap; use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; use expect_test::Expect; use hir_expand::{ - db::{AstDatabase, TokenExpander}, + db::{ExpandDatabase, TokenExpander}, AstId, InFile, MacroDefId, MacroDefKind, MacroFile, }; use stdx::format_to; diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 7d7240e7e8cb3..4efe8c58a69e3 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -120,6 +120,8 @@ pub struct DefMap { registered_tools: Vec, /// Unstable features of Rust enabled with `#![feature(A, B)]`. unstable_features: FxHashSet, + /// #[rustc_coherence_is_core] + rustc_coherence_is_core: bool, edition: Edition, recursion_limit: Option, @@ -215,7 +217,7 @@ pub struct ModuleData { pub origin: ModuleOrigin, /// Declared visibility of this module. pub visibility: Visibility, - + /// Always [`None`] for block modules pub parent: Option, pub children: FxHashMap, pub scope: ItemScope, @@ -292,6 +294,7 @@ impl DefMap { registered_tools: Vec::new(), unstable_features: FxHashSet::default(), diagnostics: Vec::new(), + rustc_coherence_is_core: false, } } @@ -325,6 +328,10 @@ impl DefMap { self.unstable_features.contains(feature) } + pub fn is_rustc_coherence_is_core(&self) -> bool { + self.rustc_coherence_is_core + } + pub fn root(&self) -> LocalModuleId { self.root } @@ -337,7 +344,7 @@ impl DefMap { self.proc_macro_loading_error.as_deref() } - pub(crate) fn krate(&self) -> CrateId { + pub fn krate(&self) -> CrateId { self.krate } @@ -425,7 +432,7 @@ impl DefMap { Some(self.block?.parent) } - /// Returns the module containing `local_mod`, either the parent `mod`, or the module containing + /// Returns the module containing `local_mod`, either the parent `mod`, or the module (or block) containing /// the block, if `self` corresponds to a block expression. pub fn containing_module(&self, local_mod: LocalModuleId) -> Option { match self[local_mod].parent { @@ -498,6 +505,7 @@ impl DefMap { krate: _, prelude: _, root: _, + rustc_coherence_is_core: _, } = self; extern_prelude.shrink_to_fit(); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 70acc3442c306..ddcee77ec4ccf 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -87,10 +87,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T // FIXME: a hacky way to create a Name from string. let name = tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; - ( - name.as_name(), - ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)), - ) + (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) }) .collect() } @@ -299,6 +296,11 @@ impl DefCollector<'_> { continue; } + if attr_name.as_text().as_deref() == Some("rustc_coherence_is_core") { + self.def_map.rustc_coherence_is_core = true; + continue; + } + if *attr_name == hir_expand::name![feature] { let features = attr.parse_path_comma_token_tree().into_iter().flatten().filter_map( @@ -581,7 +583,7 @@ impl DefCollector<'_> { let kind = def.kind.to_basedb_kind(); let (expander, kind) = match self.proc_macros.iter().find(|(n, _)| n == &def.name) { Some(&(_, expander)) => (expander, kind), - None => (ProcMacroExpander::dummy(self.def_map.krate), kind), + None => (ProcMacroExpander::dummy(), kind), }; let proc_macro_id = diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs index b7908bddaa1cf..ee143b19ae5b3 100644 --- a/crates/hir-def/src/test_db.rs +++ b/crates/hir-def/src/test_db.rs @@ -9,7 +9,7 @@ use base_db::{ salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase, Upcast, }; -use hir_expand::{db::AstDatabase, InFile}; +use hir_expand::{db::ExpandDatabase, InFile}; use stdx::hash::NoHashHashSet; use syntax::{algo, ast, AstNode}; @@ -23,7 +23,7 @@ use crate::{ #[salsa::database( base_db::SourceDatabaseExtStorage, base_db::SourceDatabaseStorage, - hir_expand::db::AstDatabaseStorage, + hir_expand::db::ExpandDatabaseStorage, crate::db::InternDatabaseStorage, crate::db::DefDatabaseStorage )] @@ -40,8 +40,8 @@ impl Default for TestDB { } } -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn AstDatabase + 'static) { +impl Upcast for TestDB { + fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { &*self } } diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index c9fcaae56cf0c..ab76ed43d3a0e 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -131,21 +131,23 @@ impl Visibility { // visibility as the containing module (even though no items are directly nameable from // there, getting this right is important for method resolution). // In that case, we adjust the visibility of `to_module` to point to the containing module. + // Additional complication: `to_module` might be in `from_module`'s `DefMap`, which we're // currently computing, so we must not call the `def_map` query for it. - let arc; - let to_module_def_map = - if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() { - cov_mark::hit!(is_visible_from_same_block_def_map); - def_map - } else { - arc = to_module.def_map(db); - &arc - }; - let is_block_root = - to_module.block.is_some() && to_module_def_map[to_module.local_id].parent.is_none(); - if is_block_root { - to_module = to_module_def_map.containing_module(to_module.local_id).unwrap(); + let mut arc; + loop { + let to_module_def_map = + if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() { + cov_mark::hit!(is_visible_from_same_block_def_map); + def_map + } else { + arc = to_module.def_map(db); + &arc + }; + match to_module_def_map.parent() { + Some(parent) => to_module = parent, + None => break, + } } // from_module needs to be a descendant of to_module diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 5c04f8e8b8f37..8d1e88725ecbf 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -10,7 +10,7 @@ use smallvec::{smallvec, SmallVec}; use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode}; use crate::{ - db::AstDatabase, + db::ExpandDatabase, hygiene::Hygiene, mod_path::{ModPath, PathKind}, name::AsName, @@ -38,7 +38,7 @@ impl ops::Deref for RawAttrs { impl RawAttrs { pub const EMPTY: Self = Self { entries: None }; - pub fn new(db: &dyn AstDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { + pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { let entries = collect_attrs(owner) .filter_map(|(id, attr)| match attr { Either::Left(attr) => { @@ -55,7 +55,7 @@ impl RawAttrs { Self { entries: if entries.is_empty() { None } else { Some(entries) } } } - pub fn from_attrs_owner(db: &dyn AstDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { + pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { let hygiene = Hygiene::new(db, owner.file_id); Self::new(db, owner.value, &hygiene) } @@ -87,7 +87,7 @@ impl RawAttrs { /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. // FIXME: This should return a different type - pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs { + pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs { let has_cfg_attrs = self .iter() .any(|attr| attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr])); @@ -199,7 +199,7 @@ impl fmt::Display for AttrInput { impl Attr { fn from_src( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, ast: ast::Meta, hygiene: &Hygiene, id: AttrId, @@ -221,7 +221,7 @@ impl Attr { } fn from_tt( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, tt: &tt::Subtree, hygiene: &Hygiene, id: AttrId, diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index 906ca991d73be..277ecd9399422 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,6 +1,6 @@ //! Builtin attributes. -use crate::{db::AstDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; +use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; macro_rules! register_builtin { ( $(($name:ident, $variant:ident) => $expand:ident),* ) => { @@ -12,7 +12,7 @@ macro_rules! register_builtin { impl BuiltinAttrExpander { pub fn expand( &self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -60,7 +60,7 @@ pub fn find_builtin_attr(ident: &name::Name) -> Option { } fn dummy_attr_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -90,7 +90,7 @@ fn dummy_attr_expand( /// So this hacky approach is a lot more friendly for us, though it does require a bit of support in /// [`hir::Semantics`] to make this work. fn derive_attr_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index 060a680542fd9..5c1a75132ee94 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -9,7 +9,7 @@ use syntax::{ match_ast, }; -use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; +use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; macro_rules! register_builtin { ( $($trait:ident => $expand:ident),* ) => { @@ -21,7 +21,7 @@ macro_rules! register_builtin { impl BuiltinDeriveExpander { pub fn expand( &self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -141,7 +141,7 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu ExpandResult::ok(expanded) } -fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree { +fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree { // FIXME: make hygiene works for builtin derive macro // such that $crate can be used here. let cg = db.crate_graph(); @@ -158,7 +158,7 @@ fn find_builtin_crate(db: &dyn AstDatabase, id: MacroCallId) -> tt::TokenTree { } fn copy_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -167,7 +167,7 @@ fn copy_expand( } fn clone_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -176,7 +176,7 @@ fn clone_expand( } fn default_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -185,7 +185,7 @@ fn default_expand( } fn debug_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -194,7 +194,7 @@ fn debug_expand( } fn hash_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -202,13 +202,17 @@ fn hash_expand( expand_simple_derive(tt, quote! { #krate::hash::Hash }) } -fn eq_expand(db: &dyn AstDatabase, id: MacroCallId, tt: &tt::Subtree) -> ExpandResult { +fn eq_expand( + db: &dyn ExpandDatabase, + id: MacroCallId, + tt: &tt::Subtree, +) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, quote! { #krate::cmp::Eq }) } fn partial_eq_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -217,7 +221,7 @@ fn partial_eq_expand( } fn ord_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -226,7 +230,7 @@ fn ord_expand( } fn partial_ord_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 295083a37f232..44510f2b7ff68 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -10,7 +10,7 @@ use syntax::{ }; use crate::{ - db::AstDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc, + db::ExpandDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc, }; macro_rules! register_builtin { @@ -28,7 +28,7 @@ macro_rules! register_builtin { impl BuiltinFnLikeExpander { pub fn expand( &self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -42,7 +42,7 @@ macro_rules! register_builtin { impl EagerExpander { pub fn expand( &self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -121,7 +121,7 @@ const DOLLAR_CRATE: tt::Ident = tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() }; fn module_path_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, ) -> ExpandResult { @@ -130,7 +130,7 @@ fn module_path_expand( } fn line_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, ) -> ExpandResult { @@ -144,7 +144,7 @@ fn line_expand( } fn log_syntax_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, ) -> ExpandResult { @@ -152,7 +152,7 @@ fn log_syntax_expand( } fn trace_macros_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, ) -> ExpandResult { @@ -160,7 +160,7 @@ fn trace_macros_expand( } fn stringify_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -174,7 +174,7 @@ fn stringify_expand( } fn column_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, ) -> ExpandResult { @@ -188,7 +188,7 @@ fn column_expand( } fn assert_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -218,7 +218,7 @@ fn assert_expand( } fn file_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, ) -> ExpandResult { @@ -234,7 +234,7 @@ fn file_expand( } fn format_args_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -276,7 +276,7 @@ fn format_args_expand( } fn asm_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -304,7 +304,7 @@ fn asm_expand( } fn global_asm_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, ) -> ExpandResult { @@ -313,7 +313,7 @@ fn global_asm_expand( } fn cfg_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -325,7 +325,7 @@ fn cfg_expand( } fn panic_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -343,7 +343,7 @@ fn panic_expand( } fn unreachable_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -379,7 +379,7 @@ fn unquote_byte_string(lit: &tt::Literal) -> Option> { } fn compile_error_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -395,7 +395,7 @@ fn compile_error_expand( } fn concat_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -441,7 +441,7 @@ fn concat_expand( } fn concat_bytes_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -507,7 +507,7 @@ fn concat_bytes_expand_subtree( } fn concat_idents_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -529,7 +529,7 @@ fn concat_idents_expand( } fn relative_file( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, call_id: MacroCallId, path_str: &str, allow_recursion: bool, @@ -558,7 +558,7 @@ fn parse_string(tt: &tt::Subtree) -> Result { } fn include_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -583,7 +583,7 @@ fn include_expand( } fn include_bytes_expand( - _db: &dyn AstDatabase, + _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -606,7 +606,7 @@ fn include_bytes_expand( } fn include_str_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -637,13 +637,13 @@ fn include_str_expand( ExpandResult::ok(ExpandedEager::new(quote!(#text))) } -fn get_env_inner(db: &dyn AstDatabase, arg_id: MacroCallId, key: &str) -> Option { +fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option { let krate = db.lookup_intern_macro_call(arg_id).krate; db.crate_graph()[krate].env.get(key) } fn env_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -679,7 +679,7 @@ fn env_expand( } fn option_env_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 76016274f0e85..45572499e8426 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -44,7 +44,7 @@ pub enum TokenExpander { impl TokenExpander { fn expand( &self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { @@ -83,9 +83,8 @@ impl TokenExpander { } } -// FIXME: rename to ExpandDatabase -#[salsa::query_group(AstDatabaseStorage)] -pub trait AstDatabase: SourceDatabase { +#[salsa::query_group(ExpandDatabaseStorage)] +pub trait ExpandDatabase: SourceDatabase { fn ast_id_map(&self, file_id: HirFileId) -> Arc; /// Main public API -- parses a hir file, not caring whether it's a real @@ -138,7 +137,7 @@ pub trait AstDatabase: SourceDatabase { /// token. The `token_to_map` mapped down into the expansion, with the mapped /// token returned. pub fn expand_speculative( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, actual_macro_call: MacroCallId, speculative_args: &SyntaxNode, token_to_map: SyntaxToken, @@ -211,7 +210,7 @@ pub fn expand_speculative( let mut speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { tt.delimiter = tt::Delimiter::unspecified(); - expander.expand(db, loc.krate, &tt, attr_arg.as_ref()) + expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref()) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?) @@ -236,12 +235,12 @@ pub fn expand_speculative( Some((node.syntax_node(), token)) } -fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { +fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default(); Arc::new(map) } -fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { +fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option { match file_id.repr() { HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), HirFileIdRepr::MacroFile(macro_file) => { @@ -253,13 +252,13 @@ fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option ExpandResult, Arc)>> { let _p = profile::span("parse_macro_expansion"); - let result = db.macro_expand(macro_file.macro_call_id); + let mbe::ValueResult { value, err } = db.macro_expand(macro_file.macro_call_id); - if let Some(err) = &result.err { + if let Some(err) = &err { // Note: // The final goal we would like to make all parse_macro success, // such that the following log will not call anyway. @@ -280,9 +279,9 @@ fn parse_macro_expansion( parents ); } - let tt = match result.value { + let tt = match value { Some(tt) => tt, - None => return ExpandResult { value: None, err: result.err }, + None => return ExpandResult { value: None, err }, }; let expand_to = macro_expand_to(db, macro_file.macro_call_id); @@ -292,11 +291,11 @@ fn parse_macro_expansion( let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); - ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: result.err } + ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err } } fn macro_arg( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, id: MacroCallId, ) -> Option> { let arg = db.macro_arg_text(id)?; @@ -357,7 +356,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet Option { +fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option { let loc = db.lookup_intern_macro_call(id); let arg = loc.kind.arg(db)?; if matches!(loc.kind, MacroCallKind::FnLike { .. }) { @@ -380,7 +379,10 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option { Some(arg.green().into()) } -fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result, mbe::ParseError> { +fn macro_def( + db: &dyn ExpandDatabase, + id: MacroDefId, +) -> Result, mbe::ParseError> { match id.kind { MacroDefKind::Declarative(ast_id) => { let (mac, def_site_token_map) = match ast_id.to_node(db) { @@ -419,7 +421,10 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Result, } } -fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult>> { +fn macro_expand( + db: &dyn ExpandDatabase, + id: MacroCallId, +) -> ExpandResult>> { let _p = profile::span("macro_expand"); let loc: MacroCallLoc = db.lookup_intern_macro_call(id); if let Some(eager) = &loc.eager { @@ -469,11 +474,11 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult Option { +fn macro_expand_error(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> Option { db.macro_expand(macro_call).err } -fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult { +fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { let loc: MacroCallLoc = db.lookup_intern_macro_call(id); let macro_arg = match db.macro_arg(id) { Some(it) => it, @@ -499,14 +504,14 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult None, }; - expander.expand(db, loc.krate, ¯o_arg.0, attr_arg.as_ref()) + expander.expand(db, loc.def.krate, loc.krate, ¯o_arg.0, attr_arg.as_ref()) } -fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { +fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { Arc::new(HygieneFrame::new(db, file_id)) } -fn macro_expand_to(db: &dyn AstDatabase, id: MacroCallId) -> ExpandTo { +fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo { let loc: MacroCallLoc = db.lookup_intern_macro_call(id); loc.kind.expand_to() } diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index dfab7ec92c763..aca41b11f926e 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -25,7 +25,7 @@ use syntax::{ted, SyntaxNode}; use crate::{ ast::{self, AstNode}, - db::AstDatabase, + db::ExpandDatabase, hygiene::Hygiene, mod_path::ModPath, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, @@ -96,7 +96,7 @@ impl ErrorSink for &'_ mut dyn FnMut(ExpandError) { } pub fn expand_eager_macro( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, krate: CrateId, macro_call: InFile, def: MacroDefId, @@ -172,7 +172,7 @@ fn to_subtree(node: &SyntaxNode) -> crate::tt::Subtree { } fn lazy_expand( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, def: &MacroDefId, macro_call: InFile, krate: CrateId, @@ -193,7 +193,7 @@ fn lazy_expand( } fn eager_macro_recur( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, hygiene: &Hygiene, curr: InFile, krate: CrateId, diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index c811d1c66a82d..b273f21768c68 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -636,9 +636,8 @@ fn foo() { if {} } "#, - // the {} gets parsed as the condition, I think? expect![[r#" -fn foo () {if {} {}} +fn foo () {if __ra_fixup {} {}} "#]], ) } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 2300ee9d08998..2eb56fc9e8b26 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -14,7 +14,7 @@ use syntax::{ }; use crate::{ - db::{self, AstDatabase}, + db::{self, ExpandDatabase}, fixup, name::{AsName, Name}, HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, @@ -26,7 +26,7 @@ pub struct Hygiene { } impl Hygiene { - pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene { + pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene { Hygiene { frames: Some(HygieneFrames::new(db, file_id)) } } @@ -37,7 +37,7 @@ impl Hygiene { // FIXME: this should just return name pub fn name_ref_to_name( &self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, name_ref: ast::NameRef, ) -> Either { if let Some(frames) = &self.frames { @@ -51,7 +51,7 @@ impl Hygiene { Either::Left(name_ref.as_name()) } - pub fn local_inner_macros(&self, db: &dyn AstDatabase, path: ast::Path) -> Option { + pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option { let mut token = path.syntax().first_token()?.text_range(); let frames = self.frames.as_ref()?; let mut current = &frames.0; @@ -87,13 +87,13 @@ pub struct HygieneFrame { } impl HygieneFrames { - fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Self { + fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self { // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work. HygieneFrames(Arc::new(HygieneFrame::new(db, file_id))) } - fn root_crate(&self, db: &dyn AstDatabase, node: &SyntaxNode) -> Option { + fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option { let mut token = node.first_token()?.text_range(); let mut result = self.0.krate; let mut current = self.0.clone(); @@ -136,7 +136,7 @@ struct HygieneInfo { impl HygieneInfo { fn map_ident_up( &self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, token: TextRange, ) -> Option<(InFile, Origin)> { let token_id = self.exp_map.token_by_range(token)?; @@ -175,7 +175,7 @@ impl HygieneInfo { } fn make_hygiene_info( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, macro_file: MacroFile, loc: &MacroCallLoc, ) -> Option { @@ -215,7 +215,7 @@ fn make_hygiene_info( } impl HygieneFrame { - pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame { + pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame { let (info, krate, local_inner) = match file_id.macro_file() { None => (None, None, false), Some(macro_file) => { diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 39fc08ecdcfe3..5e99eacc1b619 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -198,7 +198,7 @@ impl HirFileId { /// For macro-expansion files, returns the file original source file the /// expansion originated from. - pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { + pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { let mut file_id = self; loop { match file_id.repr() { @@ -214,7 +214,7 @@ impl HirFileId { } } - pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 { + pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { let mut level = 0; let mut curr = self; while let Some(macro_file) = curr.macro_file() { @@ -227,14 +227,14 @@ impl HirFileId { } /// If this is a macro call, returns the syntax node of the call. - pub fn call_node(self, db: &dyn db::AstDatabase) -> Option> { + pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); Some(loc.kind.to_node(db)) } /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> { + pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).kind.to_node(db); loop { @@ -248,7 +248,7 @@ impl HirFileId { } /// Return expansion information if it is a macro-expansion file - pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option { + pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -294,7 +294,7 @@ impl HirFileId { } /// Indicate it is macro file generated for builtin derive - pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { + pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let attr = match loc.def.kind { @@ -304,7 +304,7 @@ impl HirFileId { Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } - pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool { + pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -315,7 +315,7 @@ impl HirFileId { } /// Return whether this file is an include macro - pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool { + pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -326,7 +326,7 @@ impl HirFileId { } /// Return whether this file is an attr macro - pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool { + pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -338,7 +338,7 @@ impl HirFileId { /// Return whether this file is the pseudo expansion of the derive attribute. /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool { + pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -384,7 +384,7 @@ impl HirFileId { impl MacroDefId { pub fn as_lazy_macro( self, - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, krate: CrateId, kind: MacroCallKind, ) -> MacroCallId { @@ -427,7 +427,7 @@ impl MacroCallKind { } } - pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile { + pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile { match self { MacroCallKind::FnLike { ast_id, .. } => { ast_id.with_value(ast_id.to_node(db).syntax().clone()) @@ -465,7 +465,7 @@ impl MacroCallKind { /// Returns the original file range that best describes the location of this macro call. /// /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives. - pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange { + pub fn original_call_range_with_body(self, db: &dyn db::ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id().repr() { @@ -490,7 +490,7 @@ impl MacroCallKind { /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives /// get only the specific derive that is being referred to. - pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange { + pub fn original_call_range(self, db: &dyn db::ExpandDatabase) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id().repr() { @@ -529,7 +529,7 @@ impl MacroCallKind { FileRange { range, file_id } } - fn arg(&self, db: &dyn db::AstDatabase) -> Option { + fn arg(&self, db: &dyn db::ExpandDatabase) -> Option { match self { MacroCallKind::FnLike { ast_id, .. } => { Some(ast_id.to_node(db).token_tree()?.syntax().clone()) @@ -597,7 +597,7 @@ impl ExpansionInfo { /// Both of these only have one simple call site input so no special handling is required here. pub fn map_token_down( &self, - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, item: Option, token: InFile<&SyntaxToken>, ) -> Option> + '_> { @@ -666,7 +666,7 @@ impl ExpansionInfo { /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion. pub fn map_token_up( &self, - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, token: InFile<&SyntaxToken>, ) -> Option<(InFile, Origin)> { // Fetch the id through its text range, @@ -717,7 +717,7 @@ impl ExpansionInfo { pub type AstId = InFile>; impl AstId { - pub fn to_node(&self, db: &dyn db::AstDatabase) -> N { + pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { let root = db.parse_or_expand(self.file_id).unwrap(); db.ast_id_map(self.file_id).get(self.value).to_node(&root) } @@ -753,7 +753,7 @@ impl InFile { self.with_value(&self.value) } - pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode { + pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { db.parse_or_expand(self.file_id).expect("source created from invalid file") } } @@ -783,7 +783,7 @@ impl InFile> { impl<'a> InFile<&'a SyntaxNode> { pub fn ancestors_with_macros( self, - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, ) -> impl Iterator> + Clone + '_ { iter::successors(Some(self.cloned()), move |node| match node.value.parent() { Some(parent) => Some(node.with_value(parent)), @@ -794,7 +794,7 @@ impl<'a> InFile<&'a SyntaxNode> { /// Skips the attributed item that caused the macro invocation we are climbing up pub fn ancestors_with_macros_skip_attr_item( self, - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, ) -> impl Iterator> + '_ { let succ = move |node: &InFile| match node.value.parent() { Some(parent) => Some(node.with_value(parent)), @@ -816,7 +816,7 @@ impl<'a> InFile<&'a SyntaxNode> { /// /// For attributes and derives, this will point back to the attribute only. /// For the entire item use [`InFile::original_file_range_full`]. - pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { @@ -831,7 +831,7 @@ impl<'a> InFile<&'a SyntaxNode> { } /// Falls back to the macro call range if the node cannot be mapped up fully. - pub fn original_file_range_full(self, db: &dyn db::AstDatabase) -> FileRange { + pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { @@ -846,7 +846,7 @@ impl<'a> InFile<&'a SyntaxNode> { } /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option { + pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { match ascend_node_border_tokens(db, self) { Some(InFile { file_id, value: (first, last) }) => { let original_file = file_id.original_file(db); @@ -865,7 +865,7 @@ impl<'a> InFile<&'a SyntaxNode> { } } - pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option> { + pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input if !self.file_id.is_macro() { @@ -892,13 +892,13 @@ impl<'a> InFile<&'a SyntaxNode> { } impl InFile { - pub fn upmap(self, db: &dyn db::AstDatabase) -> Option> { + pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option> { let expansion = self.file_id.expansion_info(db)?; expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it) } /// Falls back to the macro call range if the node cannot be mapped up fully. - pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { @@ -913,7 +913,7 @@ impl InFile { } /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option { + pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => { Some(FileRange { file_id, range: self.value.text_range() }) @@ -932,7 +932,7 @@ impl InFile { pub fn ancestors_with_macros( self, - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, ) -> impl Iterator> + '_ { self.value.parent().into_iter().flat_map({ let file_id = self.file_id; @@ -942,7 +942,7 @@ impl InFile { } fn ascend_node_border_tokens( - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, InFile { file_id, value: node }: InFile<&SyntaxNode>, ) -> Option> { let expansion = file_id.expansion_info(db)?; @@ -958,7 +958,7 @@ fn ascend_node_border_tokens( } fn ascend_call_token( - db: &dyn db::AstDatabase, + db: &dyn db::ExpandDatabase, expansion: &ExpansionInfo, token: InFile, ) -> Option> { @@ -977,7 +977,7 @@ impl InFile { self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) } - pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option> { + pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input if !self.file_id.is_macro() { diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index d7586d129b768..e9393cc89aedf 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -6,7 +6,7 @@ use std::{ }; use crate::{ - db::AstDatabase, + db::ExpandDatabase, hygiene::Hygiene, name::{known, Name}, }; @@ -37,7 +37,11 @@ pub enum PathKind { } impl ModPath { - pub fn from_src(db: &dyn AstDatabase, path: ast::Path, hygiene: &Hygiene) -> Option { + pub fn from_src( + db: &dyn ExpandDatabase, + path: ast::Path, + hygiene: &Hygiene, + ) -> Option { convert_path(db, None, path, hygiene) } @@ -162,7 +166,7 @@ impl From for ModPath { } fn convert_path( - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, prefix: Option, path: ast::Path, hygiene: &Hygiene, diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 3f4d2540c099a..d758e9302cd87 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -3,22 +3,20 @@ use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; use stdx::never; -use crate::{db::AstDatabase, tt, ExpandError, ExpandResult}; +use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub struct ProcMacroExpander { - krate: CrateId, proc_macro_id: Option, } impl ProcMacroExpander { - pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> Self { - Self { krate, proc_macro_id: Some(proc_macro_id) } + pub fn new(proc_macro_id: ProcMacroId) -> Self { + Self { proc_macro_id: Some(proc_macro_id) } } - pub fn dummy(krate: CrateId) -> Self { - // FIXME: Should store the name for better errors - Self { krate, proc_macro_id: None } + pub fn dummy() -> Self { + Self { proc_macro_id: None } } pub fn is_dummy(&self) -> bool { @@ -27,7 +25,8 @@ impl ProcMacroExpander { pub fn expand( self, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, + def_crate: CrateId, calling_crate: CrateId, tt: &tt::Subtree, attr_arg: Option<&tt::Subtree>, @@ -35,7 +34,7 @@ impl ProcMacroExpander { match self.proc_macro_id { Some(id) => { let krate_graph = db.crate_graph(); - let proc_macros = match &krate_graph[self.krate].proc_macro { + let proc_macros = match &krate_graph[def_crate].proc_macro { Ok(proc_macros) => proc_macros, Err(_) => { never!("Non-dummy expander even though there are no proc macros"); @@ -84,7 +83,7 @@ impl ProcMacroExpander { } None => ExpandResult::with_err( tt::Subtree::empty(), - ExpandError::UnresolvedProcMacro(self.krate), + ExpandError::UnresolvedProcMacro(def_crate), ), } } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 4572e33486f0e..9b3296df2508a 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -22,10 +22,10 @@ either = "1.7.0" tracing = "0.1.35" rustc-hash = "1.1.0" scoped-tls = "1.0.0" -chalk-solve = { version = "0.88.0", default-features = false } -chalk-ir = "0.88.0" -chalk-recursive = { version = "0.88.0", default-features = false } -chalk-derive = "0.88.0" +chalk-solve = { version = "0.89.0", default-features = false } +chalk-ir = "0.89.0" +chalk-recursive = { version = "0.89.0", default-features = false } +chalk-derive = "0.89.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.17.0" typed-arena = "2.0.1" diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index e6aefbf27167a..2141894922f7b 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -12,7 +12,7 @@ use hir_def::{ use crate::{ db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders, - CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy, + CallableDefId, CallableSig, DynTy, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy, QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause, }; @@ -378,6 +378,19 @@ impl ProjectionTyExt for ProjectionTy { } } +pub trait DynTyExt { + fn principal(&self) -> Option<&TraitRef>; +} + +impl DynTyExt for DynTy { + fn principal(&self) -> Option<&TraitRef> { + self.bounds.skip_binders().interned().get(0).and_then(|b| match b.skip_binders() { + crate::WhereClause::Implemented(trait_ref) => Some(trait_ref), + _ => None, + }) + } +} + pub trait TraitRefExt { fn hir_trait_id(&self) -> TraitId; } diff --git a/crates/hir-ty/src/diagnostics.rs b/crates/hir-ty/src/diagnostics.rs index 37eb06be1d3d1..4b147b99707c1 100644 --- a/crates/hir-ty/src/diagnostics.rs +++ b/crates/hir-ty/src/diagnostics.rs @@ -11,3 +11,9 @@ pub use crate::diagnostics::{ }, unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr}, }; + +#[derive(Debug, PartialEq, Eq)] +pub struct IncoherentImpl { + pub file_id: hir_expand::HirFileId, + pub impl_: syntax::AstPtr, +} diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 535189ff0288b..ee186673ee130 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -275,7 +275,23 @@ impl<'a> InferenceContext<'a> { Some(type_ref) => self.make_ty(type_ref), None => self.table.new_type_var(), }; - sig_tys.push(ret_ty.clone()); + if let ClosureKind::Async = closure_kind { + // Use the first type parameter as the output type of future. + // existential type AsyncBlockImplTrait: Future + let impl_trait_id = + crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body); + let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into(); + sig_tys.push( + TyKind::OpaqueType( + opaque_ty_id, + Substitution::from1(Interner, ret_ty.clone()), + ) + .intern(Interner), + ); + } else { + sig_tys.push(ret_ty.clone()); + } + let sig_ty = TyKind::Function(FnPointer { num_binders: 0, sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false }, @@ -286,33 +302,38 @@ impl<'a> InferenceContext<'a> { }) .intern(Interner); - let (ty, resume_yield_tys) = if matches!(closure_kind, ClosureKind::Generator(_)) { - // FIXME: report error when there are more than 1 parameter. - let resume_ty = match sig_tys.first() { - // When `sig_tys.len() == 1` the first type is the return type, not the - // first parameter type. - Some(ty) if sig_tys.len() > 1 => ty.clone(), - _ => self.result.standard_types.unit.clone(), - }; - let yield_ty = self.table.new_type_var(); - - let subst = TyBuilder::subst_for_generator(self.db, self.owner) - .push(resume_ty.clone()) - .push(yield_ty.clone()) - .push(ret_ty.clone()) - .build(); + let (ty, resume_yield_tys) = match closure_kind { + ClosureKind::Generator(_) => { + // FIXME: report error when there are more than 1 parameter. + let resume_ty = match sig_tys.first() { + // When `sig_tys.len() == 1` the first type is the return type, not the + // first parameter type. + Some(ty) if sig_tys.len() > 1 => ty.clone(), + _ => self.result.standard_types.unit.clone(), + }; + let yield_ty = self.table.new_type_var(); + + let subst = TyBuilder::subst_for_generator(self.db, self.owner) + .push(resume_ty.clone()) + .push(yield_ty.clone()) + .push(ret_ty.clone()) + .build(); - let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into(); - let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner); + let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into(); + let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner); - (generator_ty, Some((resume_ty, yield_ty))) - } else { - let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into(); - let closure_ty = - TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone())) - .intern(Interner); + (generator_ty, Some((resume_ty, yield_ty))) + } + ClosureKind::Closure | ClosureKind::Async => { + let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into(); + let closure_ty = TyKind::Closure( + closure_id, + Substitution::from1(Interner, sig_ty.clone()), + ) + .intern(Interner); - (closure_ty, None) + (closure_ty, None) + } }; // Eagerly try to relate the closure type with the expected @@ -321,7 +342,7 @@ impl<'a> InferenceContext<'a> { self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected); // Now go through the argument patterns - for (arg_pat, arg_ty) in args.iter().zip(sig_tys) { + for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) { self.infer_top_pat(*arg_pat, &arg_ty); } diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index 0f49e83788183..5f839fc307aab 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -5,10 +5,7 @@ use std::iter::repeat_with; use chalk_ir::Mutability; use hir_def::{ body::Body, - expr::{ - Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId, - RecordFieldPat, - }, + expr::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId}, path::Path, }; use hir_expand::name::Name; @@ -439,38 +436,8 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool { pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool { let mut res = false; - walk_pats(body, pat_id, &mut |pat| { + body.walk_pats(pat_id, &mut |pat| { res |= matches!(pat, Pat::Bind { id, .. } if body.bindings[*id].mode == BindingAnnotation::Ref); }); res } - -fn walk_pats(body: &Body, pat_id: PatId, f: &mut impl FnMut(&Pat)) { - let pat = &body[pat_id]; - f(pat); - match pat { - Pat::Range { .. } - | Pat::Lit(..) - | Pat::Path(..) - | Pat::ConstBlock(..) - | Pat::Wild - | Pat::Missing => {} - &Pat::Bind { subpat, .. } => { - if let Some(subpat) = subpat { - walk_pats(body, subpat, f); - } - } - Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => { - args.iter().copied().for_each(|p| walk_pats(body, p, f)); - } - Pat::Ref { pat, .. } => walk_pats(body, *pat, f), - Pat::Slice { prefix, slice, suffix } => { - let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter()); - total_iter.copied().for_each(|p| walk_pats(body, p, f)); - } - Pat::Record { args, .. } => { - args.iter().for_each(|RecordFieldPat { pat, .. }| walk_pats(body, *pat, f)); - } - Pat::Box { inner } => walk_pats(body, *inner, f), - } -} diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 92a17fc3a9972..f3a27632bf545 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -19,13 +19,13 @@ use stdx::never; use crate::{ autoderef::{self, AutoderefKind}, db::HirDatabase, - from_foreign_def_id, + from_chalk_trait_id, from_foreign_def_id, infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast}, primitive::{FloatTy, IntTy, UintTy}, static_lifetime, to_chalk_trait_id, utils::all_super_traits, - AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner, - Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, + AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, InEnvironment, + Interner, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, }; /// This is used as a key for indexing impls. @@ -266,11 +266,12 @@ impl TraitImpls { #[derive(Debug, Eq, PartialEq)] pub struct InherentImpls { map: FxHashMap>, + invalid_impls: Vec, } impl InherentImpls { pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { - let mut impls = Self { map: FxHashMap::default() }; + let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; let crate_def_map = db.crate_def_map(krate); impls.collect_def_map(db, &crate_def_map); @@ -283,7 +284,7 @@ impl InherentImpls { db: &dyn HirDatabase, block: BlockId, ) -> Option> { - let mut impls = Self { map: FxHashMap::default() }; + let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() }; if let Some(block_def_map) = db.block_def_map(block) { impls.collect_def_map(db, &block_def_map); impls.shrink_to_fit(); @@ -306,11 +307,17 @@ impl InherentImpls { } let self_ty = db.impl_self_ty(impl_id); - let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders()); - if let Some(fp) = fp { - self.map.entry(fp).or_default().push(impl_id); + let self_ty = self_ty.skip_binders(); + + match is_inherent_impl_coherent(db, def_map, &data, self_ty) { + true => { + // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution) + if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) { + self.map.entry(fp).or_default().push(impl_id); + } + } + false => self.invalid_impls.push(impl_id), } - // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution) } // To better support custom derives, collect impls in all unnamed const items. @@ -334,6 +341,10 @@ impl InherentImpls { pub fn all_impls(&self) -> impl Iterator + '_ { self.map.values().flat_map(|v| v.iter().copied()) } + + pub fn invalid_impls(&self) -> &[ImplId] { + &self.invalid_impls + } } pub(crate) fn incoherent_inherent_impl_crates( @@ -775,6 +786,69 @@ fn find_matching_impl( } } +fn is_inherent_impl_coherent( + db: &dyn HirDatabase, + def_map: &DefMap, + impl_data: &ImplData, + self_ty: &Ty, +) -> bool { + let self_ty = self_ty.kind(Interner); + let impl_allowed = match self_ty { + TyKind::Tuple(_, _) + | TyKind::FnDef(_, _) + | TyKind::Array(_, _) + | TyKind::Never + | TyKind::Raw(_, _) + | TyKind::Ref(_, _, _) + | TyKind::Slice(_) + | TyKind::Str + | TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(), + + &TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(), + TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| { + from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate() == def_map.krate() + }), + + _ => true, + }; + impl_allowed || { + let rustc_has_incoherent_inherent_impls = match self_ty { + TyKind::Tuple(_, _) + | TyKind::FnDef(_, _) + | TyKind::Array(_, _) + | TyKind::Never + | TyKind::Raw(_, _) + | TyKind::Ref(_, _, _) + | TyKind::Slice(_) + | TyKind::Str + | TyKind::Scalar(_) => true, + + &TyKind::Adt(AdtId(adt), _) => match adt { + hir_def::AdtId::StructId(it) => { + db.struct_data(it).rustc_has_incoherent_inherent_impls + } + hir_def::AdtId::UnionId(it) => { + db.union_data(it).rustc_has_incoherent_inherent_impls + } + hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls, + }, + TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| { + db.trait_data(from_chalk_trait_id(trait_ref.trait_id)) + .rustc_has_incoherent_inherent_impls + }), + + _ => false, + }; + rustc_has_incoherent_inherent_impls + && !impl_data.items.is_empty() + && impl_data.items.iter().copied().all(|assoc| match assoc { + AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl, + AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl, + AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl, + }) + } +} + pub fn iterate_path_candidates( ty: &Canonical, db: &dyn HirDatabase, diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 435a914088bcf..c4dd7c0ace46c 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -1113,7 +1113,7 @@ impl MirLowerCtx<'_> { if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) { binding_mode = mode; } - self.push_storage_live(*id, current)?; + self.push_storage_live(*id, current); self.push_assignment( current, target_place.into(), @@ -1327,8 +1327,9 @@ impl MirLowerCtx<'_> { is_ty_uninhabited_from(&self.infer[expr_id], self.owner.module(self.db.upcast()), self.db) } - /// This function push `StorageLive` statements for each binding in the pattern. - fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<()> { + /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` in + /// the appropriated places. + fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) { // Current implementation is wrong. It adds no `StorageDead` at the end of scope, and before each break // and continue. It just add a `StorageDead` before the `StorageLive`, which is not wrong, but unneeeded in // the proper implementation. Due this limitation, implementing a borrow checker on top of this mir will falsely @@ -1356,7 +1357,6 @@ impl MirLowerCtx<'_> { let l = self.result.binding_locals[b]; self.push_statement(current, StatementKind::StorageDead(l).with_span(span)); self.push_statement(current, StatementKind::StorageLive(l).with_span(span)); - Ok(()) } fn resolve_lang_item(&self, item: LangItem) -> Result { @@ -1381,10 +1381,10 @@ impl MirLowerCtx<'_> { if let Some(expr_id) = initializer { let else_block; let Some((init_place, c)) = - self.lower_expr_as_place(current, *expr_id, true)? - else { - return Ok(None); - }; + self.lower_expr_as_place(current, *expr_id, true)? + else { + return Ok(None); + }; current = c; (current, else_block) = self.pattern_match( current, @@ -1407,6 +1407,10 @@ impl MirLowerCtx<'_> { } } } + } else { + self.body.walk_bindings_in_pat(*pat, |b| { + self.push_storage_live(b, current); + }); } } hir_def::expr::Statement::Expr { expr, has_semi: _ } => { diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs index 118e5311e9a64..8c48331b94b53 100644 --- a/crates/hir-ty/src/test_db.rs +++ b/crates/hir-ty/src/test_db.rs @@ -9,7 +9,7 @@ use base_db::{ salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, }; use hir_def::{db::DefDatabase, ModuleId}; -use hir_expand::db::AstDatabase; +use hir_expand::db::ExpandDatabase; use stdx::hash::{NoHashHashMap, NoHashHashSet}; use syntax::TextRange; use test_utils::extract_annotations; @@ -17,7 +17,7 @@ use test_utils::extract_annotations; #[salsa::database( base_db::SourceDatabaseExtStorage, base_db::SourceDatabaseStorage, - hir_expand::db::AstDatabaseStorage, + hir_expand::db::ExpandDatabaseStorage, hir_def::db::InternDatabaseStorage, hir_def::db::DefDatabaseStorage, crate::db::HirDatabaseStorage @@ -41,8 +41,8 @@ impl fmt::Debug for TestDB { } } -impl Upcast for TestDB { - fn upcast(&self) -> &(dyn AstDatabase + 'static) { +impl Upcast for TestDB { + fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { &*self } } diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index bcd63d9472a81..83d31f002a1dc 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -23,7 +23,7 @@ use hir_def::{ src::HasSource, AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, }; -use hir_expand::{db::AstDatabase, InFile}; +use hir_expand::{db::ExpandDatabase, InFile}; use once_cell::race::OnceBool; use stdx::format_to; use syntax::{ diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index e568e7013fac0..378d478336102 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -9,6 +9,7 @@ fn infer_slice_method() { check_types( r#" impl [T] { + #[rustc_allow_incoherent_impl] fn foo(&self) -> T { loop {} } @@ -35,6 +36,7 @@ fn test() { //- /lib.rs crate:other_crate mod foo { impl f32 { + #[rustc_allow_incoherent_impl] pub fn foo(self) -> f32 { 0. } } } @@ -47,6 +49,7 @@ fn infer_array_inherent_impl() { check_types( r#" impl [T; N] { + #[rustc_allow_incoherent_impl] fn foo(&self) -> T { loop {} } @@ -1437,6 +1440,7 @@ fn resolve_const_generic_array_methods() { r#" #[lang = "array"] impl [T; N] { + #[rustc_allow_incoherent_impl] pub fn map(self, f: F) -> [U; N] where F: FnMut(T) -> U, @@ -1445,6 +1449,7 @@ impl [T; N] { #[lang = "slice"] impl [T] { + #[rustc_allow_incoherent_impl] pub fn map(self, f: F) -> &[U] where F: FnMut(T) -> U, @@ -1468,6 +1473,7 @@ struct Const; #[lang = "array"] impl [T; N] { + #[rustc_allow_incoherent_impl] pub fn my_map(self, f: F, c: Const) -> [U; X] where F: FnMut(T) -> U, @@ -1476,6 +1482,7 @@ impl [T; N] { #[lang = "slice"] impl [T] { + #[rustc_allow_incoherent_impl] pub fn my_map(self, f: F, c: Const) -> &[U] where F: FnMut(T) -> U, @@ -1874,14 +1881,14 @@ fn incoherent_impls() { pub struct Box(T); use core::error::Error; -#[rustc_allow_incoherent_impl] impl dyn Error { + #[rustc_allow_incoherent_impl] pub fn downcast(self: Box) -> Result, Box> { loop {} } } -#[rustc_allow_incoherent_impl] impl dyn Error + Send { + #[rustc_allow_incoherent_impl] /// Attempts to downcast the box to a concrete type. pub fn downcast(self: Box) -> Result, Box> { let err: Box = self; diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index e6b4f13c8d113..689f0da44f680 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1756,3 +1756,35 @@ const C: usize = 2 + 2; "#, ); } + +#[test] +fn regression_14164() { + check_types( + r#" +trait Rec { + type K; + type Rebind: Rec; +} + +trait Expr { + type Part: Rec; + fn foo(_: ::Rebind) {} +} + +struct Head(K); +impl Rec for Head { + type K = K; + type Rebind = Head; +} + +fn test() +where + E: Expr>, +{ + let head; + //^^^^ Head + E::foo(head); +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 0e9c349afef36..13cc3fea52d16 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -1116,21 +1116,22 @@ fn infer_inherent_method() { fn infer_inherent_method_str() { check_infer( r#" - #[lang = "str"] - impl str { - fn foo(&self) -> i32 {} - } +#![rustc_coherence_is_core] +#[lang = "str"] +impl str { + fn foo(&self) -> i32 {} +} - fn test() { - "foo".foo(); - } - "#, +fn test() { + "foo".foo(); +} +"#, expect![[r#" - 39..43 'self': &str - 52..54 '{}': i32 - 68..88 '{ ...o(); }': () - 74..79 '"foo"': &str - 74..85 '"foo".foo()': i32 + 67..71 'self': &str + 80..82 '{}': i32 + 96..116 '{ ...o(); }': () + 102..107 '"foo"': &str + 102..113 '"foo".foo()': i32 "#]], ); } @@ -2640,6 +2641,7 @@ impl [T] {} #[lang = "slice_alloc"] impl [T] { + #[rustc_allow_incoherent_impl] pub fn into_vec(self: Box) -> Vec { unimplemented!() } @@ -2655,22 +2657,22 @@ struct Astruct; impl B for Astruct {} "#, expect![[r#" - 569..573 'self': Box<[T], A> - 602..634 '{ ... }': Vec - 648..761 '{ ...t]); }': () - 658..661 'vec': Vec - 664..679 '<[_]>::into_vec': fn into_vec(Box<[i32], Global>) -> Vec - 664..691 '<[_]>:...1i32])': Vec - 680..690 'box [1i32]': Box<[i32; 1], Global> - 684..690 '[1i32]': [i32; 1] - 685..689 '1i32': i32 - 701..702 'v': Vec, Global> - 722..739 '<[_]> ...to_vec': fn into_vec, Global>(Box<[Box], Global>) -> Vec, Global> - 722..758 '<[_]> ...ruct])': Vec, Global> - 740..757 'box [b...truct]': Box<[Box; 1], Global> - 744..757 '[box Astruct]': [Box; 1] - 745..756 'box Astruct': Box - 749..756 'Astruct': Astruct + 604..608 'self': Box<[T], A> + 637..669 '{ ... }': Vec + 683..796 '{ ...t]); }': () + 693..696 'vec': Vec + 699..714 '<[_]>::into_vec': fn into_vec(Box<[i32], Global>) -> Vec + 699..726 '<[_]>:...1i32])': Vec + 715..725 'box [1i32]': Box<[i32; 1], Global> + 719..725 '[1i32]': [i32; 1] + 720..724 '1i32': i32 + 736..737 'v': Vec, Global> + 757..774 '<[_]> ...to_vec': fn into_vec, Global>(Box<[Box], Global>) -> Vec, Global> + 757..793 '<[_]> ...ruct])': Vec, Global> + 775..792 'box [b...truct]': Box<[Box; 1], Global> + 779..792 '[box Astruct]': [Box; 1] + 780..791 'box Astruct': Box + 784..791 'Astruct': Astruct "#]], ) } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 015085bde4563..da76d7fd83f76 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -82,6 +82,46 @@ async fn test() { ); } +#[test] +fn infer_async_closure() { + check_types( + r#" +//- minicore: future, option +async fn test() { + let f = async move |x: i32| x + 42; + f; +// ^ |i32| -> impl Future + let a = f(4); + a; +// ^ impl Future + let x = a.await; + x; +// ^ i32 + let f = async move || 42; + f; +// ^ || -> impl Future + let a = f(); + a; +// ^ impl Future + let x = a.await; + x; +// ^ i32 + let b = ((async move || {})()).await; + b; +// ^ () + let c = async move || { + let y = None; + y + // ^ Option + }; + let _: Option = c().await; + c; +// ^ || -> impl Future> +} +"#, + ); +} + #[test] fn auto_sized_async_block() { check_no_mismatches( @@ -493,29 +533,30 @@ fn tuple_struct_with_fn() { r#" struct S(fn(u32) -> u64); fn test() -> u64 { - let a = S(|i| 2*i); + let a = S(|i| 2*i as u64); let b = a.0(4); a.0(2) }"#, expect![[r#" - 43..101 '{ ...0(2) }': u64 + 43..108 '{ ...0(2) }': u64 53..54 'a': S 57..58 'S': S(fn(u32) -> u64) -> S - 57..67 'S(|i| 2*i)': S - 59..66 '|i| 2*i': |u32| -> u64 + 57..74 'S(|i| ...s u64)': S + 59..73 '|i| 2*i as u64': |u32| -> u64 60..61 'i': u32 - 63..64 '2': u32 - 63..66 '2*i': u32 + 63..64 '2': u64 + 63..73 '2*i as u64': u64 65..66 'i': u32 - 77..78 'b': u64 - 81..82 'a': S - 81..84 'a.0': fn(u32) -> u64 - 81..87 'a.0(4)': u64 - 85..86 '4': u32 - 93..94 'a': S - 93..96 'a.0': fn(u32) -> u64 - 93..99 'a.0(2)': u64 - 97..98 '2': u32 + 65..73 'i as u64': u64 + 84..85 'b': u64 + 88..89 'a': S + 88..91 'a.0': fn(u32) -> u64 + 88..94 'a.0(4)': u64 + 92..93 '4': u32 + 100..101 'a': S + 100..103 'a.0': fn(u32) -> u64 + 100..106 'a.0(2)': u64 + 104..105 '2': u32 "#]], ); } diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index cd46573913965..0935b5ea51945 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -5,7 +5,7 @@ //! But we need this for at least LRU caching at the query level. pub use hir_def::db::*; pub use hir_expand::db::{ - AstDatabase, AstDatabaseStorage, AstIdMapQuery, ExpandProcMacroQuery, HygieneFrameQuery, + AstIdMapQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandErrorQuery, MacroExpandQuery, ParseMacroExpansionQuery, }; diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 8f019a81b2db4..253d62dafc60b 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -3,6 +3,8 @@ //! //! This probably isn't the best way to do this -- ideally, diagnostics should //! be expressed in terms of hir types themselves. +pub use hir_ty::diagnostics::{IncoherentImpl, IncorrectCase}; + use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; @@ -35,6 +37,7 @@ diagnostics![ InactiveCode, IncorrectCase, InvalidDeriveTarget, + IncoherentImpl, MacroError, MalformedDerive, MismatchedArgCount, @@ -220,5 +223,3 @@ pub struct NeedMut { pub struct UnusedMut { pub local: Local, } - -pub use hir_ty::diagnostics::IncorrectCase; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 25c07a2fbd3f9..35424feec8b29 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -85,10 +85,10 @@ use crate::db::{DefDatabase, HirDatabase}; pub use crate::{ attrs::{HasAttrs, Namespace}, diagnostics::{ - AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncorrectCase, - InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields, - MissingMatchArms, MissingUnsafe, NeedMut, NoSuchField, PrivateAssocItem, PrivateField, - ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, + AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncoherentImpl, + IncorrectCase, InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, + MissingFields, MissingMatchArms, MissingUnsafe, NeedMut, NoSuchField, PrivateAssocItem, + PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedField, UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule, UnresolvedProcMacro, UnusedMut, }, @@ -604,11 +604,23 @@ impl Module { } } + let inherent_impls = db.inherent_impls_in_crate(self.id.krate()); + for impl_def in self.impl_defs(db) { for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() { emit_def_diagnostic(db, acc, diag); } + if inherent_impls.invalid_impls().contains(&impl_def.id) { + let loc = impl_def.id.lookup(db.upcast()); + let tree = loc.id.item_tree(db.upcast()); + let node = &tree[loc.id.value]; + let file_id = loc.id.file_id(); + let ast_id_map = db.ast_id_map(file_id); + + acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into()) + } + for item in impl_def.items(db) { let def: DefWithBody = match item { AssocItem::Function(it) => it.into(), @@ -3210,6 +3222,14 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize))) } + pub fn is_float(&self) -> bool { + matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Float(_))) + } + + pub fn is_char(&self) -> bool { + matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Char)) + } + pub fn is_int_or_uint(&self) -> bool { match self.ty.kind(Interner) { TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)) => true, @@ -3224,6 +3244,13 @@ impl Type { } } + pub fn as_slice(&self) -> Option { + match &self.ty.kind(Interner) { + TyKind::Slice(ty) => Some(self.derived(ty.clone())), + _ => None, + } + } + pub fn strip_references(&self) -> Type { self.derived(self.ty.strip_references().clone()) } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 8bd905d0113a1..407ba6f65844e 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -15,7 +15,7 @@ use hir_def::{ AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ - db::AstDatabase, + db::ExpandDatabase, name::{known, AsName}, ExpansionInfo, MacroCallId, }; @@ -411,7 +411,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.resolve_record_field(field) } - pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option { + pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> { self.imp.resolve_record_pat_field(field) } @@ -1201,7 +1201,7 @@ impl<'db> SemanticsImpl<'db> { self.analyze(field.syntax())?.resolve_record_field(self.db, field) } - fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option { + fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> { self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field) } @@ -1536,7 +1536,7 @@ impl<'db> SemanticsImpl<'db> { fn macro_call_to_macro_id( ctx: &mut SourceToDefCtx<'_, '_>, - db: &dyn AstDatabase, + db: &dyn ExpandDatabase, macro_call_id: MacroCallId, ) -> Option { let loc = db.lookup_intern_macro_call(macro_call_id); diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 133fa810d6613..c24d196e1b624 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -441,14 +441,17 @@ impl SourceAnalyzer { &self, db: &dyn HirDatabase, field: &ast::RecordPatField, - ) -> Option { + ) -> Option<(Field, Type)> { let field_name = field.field_name()?.as_name(); let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?; let pat_id = self.pat_id(&record_pat.into())?; let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?; let variant_data = variant.variant_data(db.upcast()); let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? }; - Some(field.into()) + let (_, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id)?.as_adt()?; + let field_ty = + db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst); + Some((field.into(), Type::new_with_resolver(db, &self.resolver, field_ty))) } pub(crate) fn resolve_macro_call( diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index eef037f98754a..0768389281ca3 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -1027,7 +1027,7 @@ fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option, ) -> Option<(FileId, GeneratedFunctionTarget)> { let file = module_source.file_id.original_file(db); diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 5ac18727c1960..28d815e81b49d 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -363,10 +363,10 @@ fn inline( .collect(); if function.self_param(sema.db).is_some() { - let this = || make::name_ref("this").syntax().clone_for_update(); + let this = || make::name_ref("this").syntax().clone_for_update().first_token().unwrap(); if let Some(self_local) = params[0].2.as_local(sema.db) { usages_for_locals(self_local) - .flat_map(|FileReference { name, range, .. }| match name { + .filter_map(|FileReference { name, range, .. }| match name { ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)), _ => None, }) @@ -680,6 +680,42 @@ impl Foo { } } +fn main() { + let x = { + let ref this = Foo(3); + Foo(this.0 + 2) + }; +} +"#, + ); + } + + #[test] + fn generic_method_by_ref() { + check_assist( + inline_call, + r#" +struct Foo(u32); + +impl Foo { + fn add(&self, a: u32) -> Self { + Foo(self.0 + a) + } +} + +fn main() { + let x = Foo(3).add$0::(2); +} +"#, + r#" +struct Foo(u32); + +impl Foo { + fn add(&self, a: u32) -> Self { + Foo(self.0 + a) + } +} + fn main() { let x = { let ref this = Foo(3); diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs index 1361cdf00cc6d..a403d5bc672d2 100644 --- a/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/crates/ide-assists/src/handlers/remove_dbg.rs @@ -46,7 +46,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( acc.add( AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", - ctx.selection_trimmed(), + replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range)).unwrap(), |builder| { for (range, expr) in replacements { if let Some(expr) = expr { diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 4d489b62b5c88..8b07e29a5879f 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -265,7 +265,6 @@ mod handlers { inline_local_variable::inline_local_variable, inline_type_alias::inline_type_alias, inline_type_alias::inline_type_alias_uses, - inline_macro::inline_macro, introduce_named_generic::introduce_named_generic, introduce_named_lifetime::introduce_named_lifetime, invert_if::invert_if, @@ -286,7 +285,6 @@ mod handlers { raw_string::add_hash, raw_string::make_usual_string, raw_string::remove_hash, - remove_dbg::remove_dbg, remove_mut::remove_mut, remove_unused_param::remove_unused_param, remove_parentheses::remove_parentheses, @@ -335,6 +333,9 @@ mod handlers { generate_setter::generate_setter, generate_delegate_methods::generate_delegate_methods, generate_deref::generate_deref, + // + remove_dbg::remove_dbg, + inline_macro::inline_macro, // Are you sure you want to add new assist here, and not to the // sorted list above? ] diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index eb87d6c58262d..c3136f6df4b39 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -23,7 +23,7 @@ pub(crate) mod env_vars; use std::iter; -use hir::{known, ScopeDef}; +use hir::{known, ScopeDef, Variant}; use ide_db::{imports::import_assets::LocatedImport, SymbolKind}; use syntax::ast; @@ -537,17 +537,20 @@ fn enum_variants_with_paths( impl_: &Option, cb: impl Fn(&mut Completions, &CompletionContext<'_>, hir::Variant, hir::ModPath), ) { + let mut process_variant = |variant: Variant| { + let self_path = hir::ModPath::from_segments( + hir::PathKind::Plain, + iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))), + ); + + cb(acc, ctx, variant, self_path); + }; + let variants = enum_.variants(ctx.db); if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) { - for &variant in &variants { - let self_path = hir::ModPath::from_segments( - hir::PathKind::Plain, - iter::once(known::SELF_TYPE).chain(iter::once(variant.name(ctx.db))), - ); - cb(acc, ctx, variant, self_path); - } + variants.iter().for_each(|variant| process_variant(*variant)); } } diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 09ac57153ae8b..77246379e7bd9 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -415,7 +415,6 @@ fn foo(a: lib::A) { a.$0 } fn test_local_impls() { check( r#" -//- /lib.rs crate:lib pub struct A {} mod m { impl super::A { @@ -427,9 +426,8 @@ mod m { } } } -//- /main.rs crate:main deps:lib -fn foo(a: lib::A) { - impl lib::A { +fn foo(a: A) { + impl A { fn local_method(&self) {} } a.$0 diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 7dc29c3d5acad..8cbf89e9c3019 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -220,6 +220,8 @@ pub(super) struct PatternContext { /// The record pattern this name or ref is a field of pub(super) record_pat: Option, pub(super) impl_: Option, + /// List of missing variants in a match expr + pub(super) missing_variants: Vec, } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index db0045aef6e0b..a94c404586b11 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -1,7 +1,7 @@ //! Module responsible for analyzing the code surrounding the cursor for completion. use std::iter; -use hir::{Semantics, Type, TypeInfo}; +use hir::{Semantics, Type, TypeInfo, Variant}; use ide_db::{active_parameter::ActiveParameter, RootDatabase}; use syntax::{ algo::{find_node_at_offset, non_trivia_sibling}, @@ -353,7 +353,7 @@ fn expected_type_and_name( _ => ty, }; - loop { + let (ty, name) = loop { break match_ast! { match node { ast::LetStmt(it) => { @@ -385,9 +385,7 @@ fn expected_type_and_name( token.clone(), ).map(|ap| { let name = ap.ident().map(NameOrNameRef::Name); - - let ty = strip_refs(ap.ty); - (Some(ty), name) + (Some(ap.ty), name) }) .unwrap_or((None, None)) }, @@ -489,7 +487,8 @@ fn expected_type_and_name( }, } }; - } + }; + (ty.map(strip_refs), name) } fn classify_lifetime( @@ -1133,6 +1132,9 @@ fn pattern_context_for( pat: ast::Pat, ) -> PatternContext { let mut param_ctx = None; + + let mut missing_variants = vec![]; + let (refutability, has_type_ascription) = pat .syntax() @@ -1162,7 +1164,52 @@ fn pattern_context_for( })(); return (PatternRefutability::Irrefutable, has_type_ascription) }, - ast::MatchArm(_) => PatternRefutability::Refutable, + ast::MatchArm(match_arm) => { + let missing_variants_opt = match_arm + .syntax() + .parent() + .and_then(ast::MatchArmList::cast) + .and_then(|match_arm_list| { + match_arm_list + .syntax() + .parent() + .and_then(ast::MatchExpr::cast) + .and_then(|match_expr| { + let expr_opt = find_opt_node_in_file(&original_file, match_expr.expr()); + + expr_opt.and_then(|expr| { + sema.type_of_expr(&expr)? + .adjusted() + .autoderef(sema.db) + .find_map(|ty| match ty.as_adt() { + Some(hir::Adt::Enum(e)) => Some(e), + _ => None, + }).and_then(|enum_| { + Some(enum_.variants(sema.db)) + }) + }) + }).and_then(|variants| { + Some(variants.iter().filter_map(|variant| { + let variant_name = variant.name(sema.db).to_string(); + + let variant_already_present = match_arm_list.arms().any(|arm| { + arm.pat().and_then(|pat| { + let pat_already_present = pat.syntax().to_string().contains(&variant_name); + pat_already_present.then(|| pat_already_present) + }).is_some() + }); + + (!variant_already_present).then_some(variant.clone()) + }).collect::>()) + }) + }); + + if let Some(missing_variants_) = missing_variants_opt { + missing_variants = missing_variants_; + }; + + PatternRefutability::Refutable + }, ast::LetExpr(_) => PatternRefutability::Refutable, ast::ForExpr(_) => PatternRefutability::Irrefutable, _ => PatternRefutability::Irrefutable, @@ -1184,6 +1231,7 @@ fn pattern_context_for( ref_token, record_pat: None, impl_: fetch_immediate_impl(sema, original_file, pat.syntax()), + missing_variants, } } diff --git a/crates/ide-completion/src/context/tests.rs b/crates/ide-completion/src/context/tests.rs index a654a5db57445..82a1c10c5314f 100644 --- a/crates/ide-completion/src/context/tests.rs +++ b/crates/ide-completion/src/context/tests.rs @@ -411,3 +411,15 @@ fn main() { expect!["ty: i32, name: ?"], ); } + +#[test] +fn expected_type_ref_return_pos() { + check_expected_type_and_name( + r#" +fn f(thing: u32) -> &u32 { + &thin$0 +} +"#, + expect!["ty: u32, name: ?"], + ); +} diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index 21b4bc2174bee..9225c91bebf51 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -37,7 +37,9 @@ pub(crate) fn render_struct_pat( let lookup = format_literal_lookup(name.as_str(), kind); let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?; - Some(build_completion(ctx, label, lookup, pat, strukt)) + let db = ctx.db(); + + Some(build_completion(ctx, label, lookup, pat, strukt, strukt.ty(db), false)) } pub(crate) fn render_variant_pat( @@ -52,6 +54,7 @@ pub(crate) fn render_variant_pat( let fields = variant.fields(ctx.db()); let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?; + let enum_ty = variant.parent_enum(ctx.db()).ty(ctx.db()); let (name, escaped_name) = match path { Some(path) => (path.unescaped().to_string().into(), path.to_string().into()), @@ -81,7 +84,15 @@ pub(crate) fn render_variant_pat( } }; - Some(build_completion(ctx, label, lookup, pat, variant)) + Some(build_completion( + ctx, + label, + lookup, + pat, + variant, + enum_ty, + pattern_ctx.missing_variants.contains(&variant), + )) } fn build_completion( @@ -90,13 +101,22 @@ fn build_completion( lookup: SmolStr, pat: String, def: impl HasAttrs + Copy, + adt_ty: hir::Type, + // Missing in context of match statement completions + is_variant_missing: bool, ) -> CompletionItem { + let mut relevance = ctx.completion_relevance(); + + if is_variant_missing { + relevance.type_match = super::compute_type_match(ctx.completion, &adt_ty); + } + let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label); item.set_documentation(ctx.docs(def)) .set_deprecated(ctx.is_deprecated(def)) .detail(&pat) .lookup_by(lookup) - .set_relevance(ctx.completion_relevance()); + .set_relevance(relevance); match ctx.snippet_cap() { Some(snippet_cap) => item.insert_snippet(snippet_cap, pat), None => item.insert_text(pat), diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index ad9254e7f2ecf..c0e485c36fdd1 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -614,6 +614,7 @@ fn f(u: U) { check_empty( r#" +#![rustc_coherence_is_core] #[lang = "u32"] impl u32 { pub const MIN: Self = 0; diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index 328faaa060f04..65cefdb0856d2 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -46,6 +46,66 @@ fn foo(s: Struct) { ); } +#[test] +fn record_pattern_field_enum() { + check( + r#" +//- minicore:result +enum Baz { Foo, Bar } + +fn foo(baz: Baz) { + match baz { + Baz::Foo => (), + $0 + } +} +"#, + expect![[r#" + en Baz + en Result + md core + ev Err + ev Ok + bn Baz::Bar Baz::Bar$0 + bn Baz::Foo Baz::Foo$0 + bn Err(…) Err($1)$0 + bn Ok(…) Ok($1)$0 + kw mut + kw ref + "#]], + ); + + check( + r#" +//- minicore:result +enum Baz { Foo, Bar } + +fn foo(baz: Baz) { + use Baz::*; + match baz { + Foo => (), + $0 + } +} + "#, + expect![[r#" + en Baz + en Result + md core + ev Bar + ev Err + ev Foo + ev Ok + bn Bar Bar$0 + bn Err(…) Err($1)$0 + bn Foo Foo$0 + bn Ok(…) Ok($1)$0 + kw mut + kw ref + "#]], + ); +} + #[test] fn pattern_enum_variant() { check( diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index cb71c7b2bdef3..f8a6f6cd3ed06 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -608,6 +608,7 @@ fn f() { } //- /core.rs crate:core +#![rustc_coherence_is_core] #[lang = "u8"] impl u8 { pub const MAX: Self = 255; diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 244e99fe2e27d..ea1d9cc4919d3 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -71,7 +71,7 @@ impl RootDatabase { base_db::SourceRootQuery base_db::SourceRootCratesQuery - // AstDatabase + // ExpandDatabase hir::db::AstIdMapQuery hir::db::ParseMacroExpansionQuery hir::db::InternMacroCallQuery diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index 1322f5228e8b9..4071c490b7fc3 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -327,7 +327,7 @@ impl NameClass { let pat_parent = ident_pat.syntax().parent(); if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) { if record_pat_field.name_ref().is_none() { - if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) { + if let Some((field, _)) = sema.resolve_record_pat_field(&record_pat_field) { return Some(NameClass::PatFieldShorthand { local_def: local, field_ref: field, @@ -483,6 +483,13 @@ impl NameRefClass { }, ast::RecordPatField(record_pat_field) => { sema.resolve_record_pat_field(&record_pat_field) + .map(|(field, ..)|field) + .map(Definition::Field) + .map(NameRefClass::Definition) + }, + ast::RecordExprField(record_expr_field) => { + sema.resolve_record_field(&record_expr_field) + .map(|(field, ..)|field) .map(Definition::Field) .map(NameRefClass::Definition) }, diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index ae120470047e4..b1df11bf91172 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -50,7 +50,7 @@ use base_db::{ AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, }; use hir::{ - db::{AstDatabase, DefDatabase, HirDatabase}, + db::{DefDatabase, ExpandDatabase, HirDatabase}, symbols::FileSymbolKind, }; use stdx::hash::NoHashHashSet; @@ -68,7 +68,7 @@ pub type FxIndexMap = #[salsa::database( base_db::SourceDatabaseExtStorage, base_db::SourceDatabaseStorage, - hir::db::AstDatabaseStorage, + hir::db::ExpandDatabaseStorage, hir::db::DefDatabaseStorage, hir::db::HirDatabaseStorage, hir::db::InternDatabaseStorage, @@ -95,8 +95,8 @@ impl fmt::Debug for RootDatabase { } } -impl Upcast for RootDatabase { - fn upcast(&self) -> &(dyn AstDatabase + 'static) { +impl Upcast for RootDatabase { + fn upcast(&self) -> &(dyn ExpandDatabase + 'static) { &*self } } diff --git a/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs new file mode 100644 index 0000000000000..72af9ebfcbb62 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs @@ -0,0 +1,77 @@ +use hir::InFile; + +use crate::{Diagnostic, DiagnosticsContext, Severity}; + +// Diagnostic: incoherent-impl +// +// This diagnostic is triggered if the targe type of an impl is from a foreign crate. +pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic { + Diagnostic::new( + "incoherent-impl", + format!("cannot define inherent `impl` for foreign type"), + ctx.sema.diagnostics_display_range(InFile::new(d.file_id, d.impl_.clone().into())).range, + ) + .severity(Severity::Error) +} + +#[cfg(test)] +mod change_case { + use crate::tests::check_diagnostics; + + #[test] + fn primitive() { + check_diagnostics( + r#" + impl bool {} +//^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +"#, + ); + } + + #[test] + fn primitive_rustc_allow_incoherent_impl() { + check_diagnostics( + r#" +impl bool { + #[rustc_allow_incoherent_impl] + fn falsch(self) -> Self { false } +} +"#, + ); + } + + #[test] + fn rustc_allow_incoherent_impl() { + check_diagnostics( + r#" +//- /lib.rs crate:foo +#[rustc_has_incoherent_inherent_impls] +pub struct S; +//- /main.rs crate:main deps:foo +impl foo::S { + #[rustc_allow_incoherent_impl] + fn func(self) {} +} +"#, + ); + check_diagnostics( + r#" +//- /lib.rs crate:foo +pub struct S; +//- /main.rs crate:main deps:foo + impl foo::S { #[rustc_allow_incoherent_impl] fn func(self) {} } +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +"#, + ); + check_diagnostics( + r#" +//- /lib.rs crate:foo +#[rustc_has_incoherent_inherent_impls] +pub struct S; +//- /main.rs crate:main deps:foo + impl foo::S { fn func(self) {} } +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index 6a78c08d44c19..db88bf7b9313d 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -1,4 +1,4 @@ -use hir::{db::AstDatabase, InFile}; +use hir::{db::ExpandDatabase, InFile}; use ide_db::{assists::Assist, defs::NameClass}; use syntax::AstNode; diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 14039087b3fd6..5c4327ff93413 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -1,6 +1,6 @@ use either::Either; use hir::{ - db::{AstDatabase, HirDatabase}, + db::{ExpandDatabase, HirDatabase}, known, AssocItem, HirDisplay, InFile, Type, }; use ide_db::{ diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index ea1ea5a216dfc..eb32db250656c 100644 --- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -1,4 +1,10 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use hir::db::ExpandDatabase; +use ide_db::{assists::Assist, source_change::SourceChange}; +use syntax::{ast, SyntaxNode}; +use syntax::{match_ast, AstNode}; +use text_edit::TextEdit; + +use crate::{fix, Diagnostic, DiagnosticsContext}; // Diagnostic: missing-unsafe // @@ -9,11 +15,83 @@ pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsaf "this operation is unsafe and requires an unsafe function or block", ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, ) + .with_fixes(fixes(ctx, d)) +} + +fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Option> { + // The fixit will not work correctly for macro expansions, so we don't offer it in that case. + if d.expr.file_id.is_macro() { + return None; + } + + let root = ctx.sema.db.parse_or_expand(d.expr.file_id)?; + let expr = d.expr.value.to_node(&root); + + let node_to_add_unsafe_block = pick_best_node_to_add_unsafe_block(&expr)?; + + let replacement = format!("unsafe {{ {} }}", node_to_add_unsafe_block.text()); + let edit = TextEdit::replace(node_to_add_unsafe_block.text_range(), replacement); + let source_change = + SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), edit); + Some(vec![fix("add_unsafe", "Add unsafe block", source_change, expr.syntax().text_range())]) +} + +// Pick the first ancestor expression of the unsafe `expr` that is not a +// receiver of a method call, a field access, the left-hand side of an +// assignment, or a reference. As all of those cases would incur a forced move +// if wrapped which might not be wanted. That is: +// - `unsafe_expr.foo` -> `unsafe { unsafe_expr.foo }` +// - `unsafe_expr.foo.bar` -> `unsafe { unsafe_expr.foo.bar }` +// - `unsafe_expr.foo()` -> `unsafe { unsafe_expr.foo() }` +// - `unsafe_expr.foo.bar()` -> `unsafe { unsafe_expr.foo.bar() }` +// - `unsafe_expr += 1` -> `unsafe { unsafe_expr += 1 }` +// - `&unsafe_expr` -> `unsafe { &unsafe_expr }` +// - `&&unsafe_expr` -> `unsafe { &&unsafe_expr }` +fn pick_best_node_to_add_unsafe_block(unsafe_expr: &ast::Expr) -> Option { + // The `unsafe_expr` might be: + // - `ast::CallExpr`: call an unsafe function + // - `ast::MethodCallExpr`: call an unsafe method + // - `ast::PrefixExpr`: dereference a raw pointer + // - `ast::PathExpr`: access a static mut variable + for (node, parent) in + unsafe_expr.syntax().ancestors().zip(unsafe_expr.syntax().ancestors().skip(1)) + { + match_ast! { + match parent { + // If the `parent` is a `MethodCallExpr`, that means the `node` + // is the receiver of the method call, because only the receiver + // can be a direct child of a method call. The method name + // itself is not an expression but a `NameRef`, and an argument + // is a direct child of an `ArgList`. + ast::MethodCallExpr(_) => continue, + ast::FieldExpr(_) => continue, + ast::RefExpr(_) => continue, + ast::BinExpr(it) => { + // Check if the `node` is the left-hand side of an + // assignment, if so, we don't want to wrap it in an unsafe + // block, e.g. `unsafe_expr += 1` + let is_left_hand_side_of_assignment = { + if let Some(ast::BinaryOp::Assignment { .. }) = it.op_kind() { + it.lhs().map(|lhs| lhs.syntax().text_range().contains_range(node.text_range())).unwrap_or(false) + } else { + false + } + }; + if !is_left_hand_side_of_assignment { + return Some(node); + } + }, + _ => { return Some(node); } + + } + } + } + None } #[cfg(test)] mod tests { - use crate::tests::check_diagnostics; + use crate::tests::{check_diagnostics, check_fix, check_no_fix}; #[test] fn missing_unsafe_diagnostic_with_raw_ptr() { @@ -23,7 +101,7 @@ fn main() { let x = &5 as *const usize; unsafe { let y = *x; } let z = *x; -} //^^ error: this operation is unsafe and requires an unsafe function or block +} //^^💡 error: this operation is unsafe and requires an unsafe function or block "#, ) } @@ -48,9 +126,9 @@ unsafe fn unsafe_fn() { fn main() { unsafe_fn(); - //^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block + //^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block HasUnsafe.unsafe_fn(); - //^^^^^^^^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block + //^^^^^^^^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block unsafe { unsafe_fn(); HasUnsafe.unsafe_fn(); @@ -72,7 +150,7 @@ static mut STATIC_MUT: Ty = Ty { a: 0 }; fn main() { let x = STATIC_MUT.a; - //^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block + //^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block unsafe { let x = STATIC_MUT.a; } @@ -94,9 +172,298 @@ extern "rust-intrinsic" { fn main() { let _ = bitreverse(12); let _ = floorf32(12.0); - //^^^^^^^^^^^^^^ error: this operation is unsafe and requires an unsafe function or block + //^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block } "#, ); } + + #[test] + fn add_unsafe_block_when_dereferencing_a_raw_pointer() { + check_fix( + r#" +fn main() { + let x = &5 as *const usize; + let z = *x$0; +} +"#, + r#" +fn main() { + let x = &5 as *const usize; + let z = unsafe { *x }; +} +"#, + ); + } + + #[test] + fn add_unsafe_block_when_calling_unsafe_function() { + check_fix( + r#" +unsafe fn func() { + let x = &5 as *const usize; + let z = *x; +} +fn main() { + func$0(); +} +"#, + r#" +unsafe fn func() { + let x = &5 as *const usize; + let z = *x; +} +fn main() { + unsafe { func() }; +} +"#, + ) + } + + #[test] + fn add_unsafe_block_when_calling_unsafe_method() { + check_fix( + r#" +struct S(usize); +impl S { + unsafe fn func(&self) { + let x = &self.0 as *const usize; + let z = *x; + } +} +fn main() { + let s = S(5); + s.func$0(); +} +"#, + r#" +struct S(usize); +impl S { + unsafe fn func(&self) { + let x = &self.0 as *const usize; + let z = *x; + } +} +fn main() { + let s = S(5); + unsafe { s.func() }; +} +"#, + ) + } + + #[test] + fn add_unsafe_block_when_accessing_mutable_static() { + check_fix( + r#" +struct Ty { + a: u8, +} + +static mut STATIC_MUT: Ty = Ty { a: 0 }; + +fn main() { + let x = STATIC_MUT$0.a; +} +"#, + r#" +struct Ty { + a: u8, +} + +static mut STATIC_MUT: Ty = Ty { a: 0 }; + +fn main() { + let x = unsafe { STATIC_MUT.a }; +} +"#, + ) + } + + #[test] + fn add_unsafe_block_when_calling_unsafe_intrinsic() { + check_fix( + r#" +extern "rust-intrinsic" { + pub fn floorf32(x: f32) -> f32; +} + +fn main() { + let _ = floorf32$0(12.0); +} +"#, + r#" +extern "rust-intrinsic" { + pub fn floorf32(x: f32) -> f32; +} + +fn main() { + let _ = unsafe { floorf32(12.0) }; +} +"#, + ) + } + + #[test] + fn unsafe_expr_as_a_receiver_of_a_method_call() { + check_fix( + r#" +unsafe fn foo() -> String { + "string".to_string() +} + +fn main() { + foo$0().len(); +} +"#, + r#" +unsafe fn foo() -> String { + "string".to_string() +} + +fn main() { + unsafe { foo().len() }; +} +"#, + ) + } + + #[test] + fn unsafe_expr_as_an_argument_of_a_method_call() { + check_fix( + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let mut v = vec![]; + v.push(STATIC_MUT$0); +} +"#, + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let mut v = vec![]; + v.push(unsafe { STATIC_MUT }); +} +"#, + ) + } + + #[test] + fn unsafe_expr_as_left_hand_side_of_assignment() { + check_fix( + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + STATIC_MUT$0 = 1; +} +"#, + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + unsafe { STATIC_MUT = 1 }; +} +"#, + ) + } + + #[test] + fn unsafe_expr_as_right_hand_side_of_assignment() { + check_fix( + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x; + x = STATIC_MUT$0; +} +"#, + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x; + x = unsafe { STATIC_MUT }; +} +"#, + ) + } + + #[test] + fn unsafe_expr_in_binary_plus() { + check_fix( + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x = STATIC_MUT$0 + 1; +} +"#, + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x = unsafe { STATIC_MUT } + 1; +} +"#, + ) + } + + #[test] + fn ref_to_unsafe_expr() { + check_fix( + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x = &STATIC_MUT$0; +} +"#, + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x = unsafe { &STATIC_MUT }; +} +"#, + ) + } + + #[test] + fn ref_ref_to_unsafe_expr() { + check_fix( + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x = &&STATIC_MUT$0; +} +"#, + r#" +static mut STATIC_MUT: u8 = 0; + +fn main() { + let x = unsafe { &&STATIC_MUT }; +} +"#, + ) + } + + #[test] + fn unsafe_expr_in_macro_call() { + check_no_fix( + r#" +unsafe fn foo() -> u8 { + 0 +} + +fn main() { + let x = format!("foo: {}", foo$0()); +} + "#, + ) + } } diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 84189a5d560f9..96470265d11d1 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -505,6 +505,30 @@ fn main() { ); } + #[test] + fn initialization_is_not_mutation_in_loop() { + check_diagnostics( + r#" +fn main() { + let a; + loop { + let c @ ( + mut b, + //^^^^^ 💡 weak: variable does not need to be mutable + mut d + //^^^^^ 💡 weak: variable does not need to be mutable + ); + a = 1; + //^^^^^ 💡 error: cannot mutate immutable variable `a` + b = 1; + c = (2, 3); + d = 3; + } +} +"#, + ); + } + #[test] fn function_arguments_are_initialized() { check_diagnostics( diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index 8da04e628d670..24c521ed1a8a4 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -1,4 +1,4 @@ -use hir::{db::AstDatabase, HasSource, HirDisplay, Semantics}; +use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics}; use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase}; use syntax::{ ast::{self, edit::IndentLevel, make}, diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs index e630ae36866d3..be83ad6aaadaa 100644 --- a/crates/ide-diagnostics/src/handlers/private_field.rs +++ b/crates/ide-diagnostics/src/handlers/private_field.rs @@ -62,6 +62,26 @@ mod module { fn main(s: module::Struct) { s.field; } +"#, + ); + } + + #[test] + fn block_module_madness() { + check_diagnostics( + r#" +fn main() { + let strukt = { + use crate as ForceParentBlockDefMap; + { + pub struct Struct { + field: (), + } + Struct { field: () } + } + }; + strukt.field; +} "#, ); } diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index a0c276cc3328b..9b1c65983e615 100644 --- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -1,4 +1,4 @@ -use hir::{db::AstDatabase, InFile}; +use hir::{db::ExpandDatabase, InFile}; use ide_db::source_change::SourceChange; use syntax::{ ast::{self, HasArgList}, diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index b57a13e53e6b6..4abc25a28fbc0 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{db::AstDatabase, HirDisplay, InFile, Type}; +use hir::{db::ExpandDatabase, HirDisplay, InFile, Type}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ ast::{self, BlockExpr, ExprStmt}, diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 7de03416e5625..cefa74e523e8f 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -1,4 +1,4 @@ -use hir::{db::AstDatabase, HirDisplay, InFile}; +use hir::{db::ExpandDatabase, HirDisplay, InFile}; use ide_db::{ assists::{Assist, AssistId, AssistKind}, base_db::FileRange, diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 4b0e64cb896d5..f3ec6efa75215 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -1,4 +1,4 @@ -use hir::{db::AstDatabase, HirDisplay}; +use hir::{db::ExpandDatabase, HirDisplay}; use ide_db::{ assists::{Assist, AssistId, AssistKind}, base_db::FileRange, diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 91395f1d841ad..94614f11c3349 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -1,4 +1,4 @@ -use hir::db::AstDatabase; +use hir::db::ExpandDatabase; use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit}; use itertools::Itertools; use syntax::AstNode; diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index f6c9b79c30c3c..71f136b8c9030 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -29,6 +29,7 @@ mod handlers { pub(crate) mod break_outside_of_loop; pub(crate) mod expected_function; pub(crate) mod inactive_code; + pub(crate) mod incoherent_impl; pub(crate) mod incorrect_case; pub(crate) mod invalid_derive_target; pub(crate) mod macro_error; @@ -254,6 +255,7 @@ pub fn diagnostics( AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d), AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d), AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d), + AnyDiagnostic::IncoherentImpl(d) => handlers::incoherent_impl::incoherent_impl(&ctx, &d), AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d), AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d), AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d), diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 190ab80ba0ff3..a1a119629a94e 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -297,6 +297,7 @@ impl Foo {} //- /lib.rs crate:main deps:core fn foo(_: bool$0) {{}} //- /libcore.rs crate:core +#![rustc_coherence_is_core] #[lang = "bool"] impl bool {} //^^^^ diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 55cdb3200eac9..6d2d0bd635165 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -55,7 +55,7 @@ pub(crate) fn goto_type_definition( ty } else { let record_field = ast::RecordPatField::for_field_name_ref(&it)?; - sema.resolve_record_pat_field(&record_field)?.ty(db) + sema.resolve_record_pat_field(&record_field)?.1 } }, _ => return None, diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 729780fa0c919..46505b3044109 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -31,19 +31,31 @@ pub(super) fn hints( return None; } - // These inherit from the inner expression which would result in duplicate hints - if let ast::Expr::ParenExpr(_) - | ast::Expr::IfExpr(_) - | ast::Expr::BlockExpr(_) - | ast::Expr::MatchExpr(_) = expr - { + // ParenExpr resolve to their contained expressions HIR so they will dupe these hints + if let ast::Expr::ParenExpr(_) = expr { return None; } + if let ast::Expr::BlockExpr(b) = expr { + if !b.is_standalone() { + return None; + } + } let descended = sema.descend_node_into_attributes(expr.clone()).pop(); let desc_expr = descended.as_ref().unwrap_or(expr); let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; + if let ast::Expr::BlockExpr(_) | ast::Expr::IfExpr(_) | ast::Expr::MatchExpr(_) = desc_expr { + if let [Adjustment { kind: Adjust::Deref(_), source, .. }, Adjustment { kind: Adjust::Borrow(_), source: _, target }] = + &*adjustments + { + // Don't show unnecessary reborrows for these, they will just repeat the inner ones again + if source == target { + return None; + } + } + } + let (postfix, needs_outer_parens, needs_inner_parens) = mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode); @@ -67,6 +79,7 @@ pub(super) fn hints( for Adjustment { source, target, kind } in iter { if source == target { + cov_mark::hit!(same_type_adjustment); continue; } @@ -251,7 +264,7 @@ mod tests { check_with_config( InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, r#" -//- minicore: coerce_unsized, fn +//- minicore: coerce_unsized, fn, eq fn main() { let _: u32 = loop {}; //^^^^^^^ @@ -332,7 +345,7 @@ fn main() { loop {} //^^^^^^^ }; - let _: &mut [u32] = match () { () => &mut [] } + let _: &mut [u32] = match () { () => &mut [] }; //^^^^^^^ //^^^^^^^&mut $ //^^^^^^^* @@ -341,6 +354,12 @@ fn main() { //^^^^^^^^^^ //^^^^^^^^^^&mut $ //^^^^^^^^^^* + () == (); + // ^^& + // ^^& + (()) == {()}; + // ^^& + // ^^^^& } #[derive(Copy, Clone)] @@ -363,7 +382,7 @@ impl Struct { ..DISABLED_CONFIG }, r#" -//- minicore: coerce_unsized, fn +//- minicore: coerce_unsized, fn, eq fn main() { Struct.consume(); @@ -419,7 +438,7 @@ fn main() { loop {} //^^^^^^^. }; - let _: &mut [u32] = match () { () => &mut [] } + let _: &mut [u32] = match () { () => &mut [] }; //^^^^^^^( //^^^^^^^) //^^^^^^^.* @@ -432,6 +451,12 @@ fn main() { //^^^^^^^^^^.* //^^^^^^^^^^.&mut //^^^^^^^^^^. + () == (); + // ^^.& + // ^^.& + (()) == {()}; + // ^^.& + // ^^^^.& } #[derive(Copy, Clone)] @@ -499,6 +524,7 @@ fn main() { #[test] fn never_to_never_is_never_shown() { + cov_mark::check!(same_type_adjustment); check_with_config( InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, r#" diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index 0a7513e465a57..1e1771259b1ba 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -435,7 +435,7 @@ fn main() { file_id: FileId( 1, ), - range: 3386..3394, + range: 3415..3423, }, ), tooltip: "", @@ -448,7 +448,7 @@ fn main() { file_id: FileId( 1, ), - range: 3418..3422, + range: 3447..3451, }, ), tooltip: "", @@ -468,7 +468,7 @@ fn main() { file_id: FileId( 1, ), - range: 3386..3394, + range: 3415..3423, }, ), tooltip: "", @@ -481,7 +481,7 @@ fn main() { file_id: FileId( 1, ), - range: 3418..3422, + range: 3447..3451, }, ), tooltip: "", @@ -501,7 +501,7 @@ fn main() { file_id: FileId( 1, ), - range: 3386..3394, + range: 3415..3423, }, ), tooltip: "", @@ -514,7 +514,7 @@ fn main() { file_id: FileId( 1, ), - range: 3418..3422, + range: 3447..3451, }, ), tooltip: "", diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index 2c08c457b338c..4b2c139f6f455 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -16,7 +16,7 @@ use stdx::format_to; use syntax::{ algo, ast::{self, HasArgList}, - match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize, + match_ast, AstNode, Direction, SyntaxElementChildren, SyntaxToken, TextRange, TextSize, }; use crate::RootDatabase; @@ -102,6 +102,20 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio } return signature_help_for_record_lit(&sema, record, token); }, + ast::RecordPat(record) => { + let cursor_outside = record.record_pat_field_list().and_then(|list| list.r_curly_token()).as_ref() == Some(&token); + if cursor_outside { + continue; + } + return signature_help_for_record_pat(&sema, record, token); + }, + ast::TupleStructPat(tuple_pat) => { + let cursor_outside = tuple_pat.r_paren_token().as_ref() == Some(&token); + if cursor_outside { + continue; + } + return signature_help_for_tuple_struct_pat(&sema, tuple_pat, token); + }, _ => (), } } @@ -346,10 +360,111 @@ fn signature_help_for_record_lit( record: ast::RecordExpr, token: SyntaxToken, ) -> Option { - let active_parameter = record - .record_expr_field_list()? + signature_help_for_record_( + sema, + record.record_expr_field_list()?.syntax().children_with_tokens(), + &record.path()?, + record + .record_expr_field_list()? + .fields() + .filter_map(|field| sema.resolve_record_field(&field)) + .map(|(field, _, ty)| (field, ty)), + token, + ) +} + +fn signature_help_for_record_pat( + sema: &Semantics<'_, RootDatabase>, + record: ast::RecordPat, + token: SyntaxToken, +) -> Option { + signature_help_for_record_( + sema, + record.record_pat_field_list()?.syntax().children_with_tokens(), + &record.path()?, + record + .record_pat_field_list()? + .fields() + .filter_map(|field| sema.resolve_record_pat_field(&field)), + token, + ) +} + +fn signature_help_for_tuple_struct_pat( + sema: &Semantics<'_, RootDatabase>, + pat: ast::TupleStructPat, + token: SyntaxToken, +) -> Option { + let rest_pat = pat.fields().find(|it| matches!(it, ast::Pat::RestPat(_))); + let is_left_of_rest_pat = + rest_pat.map_or(true, |it| token.text_range().start() < it.syntax().text_range().end()); + + let mut res = SignatureHelp { + doc: None, + signature: String::new(), + parameters: vec![], + active_parameter: None, + }; + + let db = sema.db; + let path_res = sema.resolve_path(&pat.path()?)?; + let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res { + let en = variant.parent_enum(db); + + res.doc = en.docs(db).map(|it| it.into()); + format_to!(res.signature, "enum {}::{} (", en.name(db), variant.name(db)); + variant.fields(db) + } else { + let adt = match path_res { + PathResolution::SelfType(imp) => imp.self_ty(db).as_adt()?, + PathResolution::Def(ModuleDef::Adt(adt)) => adt, + _ => return None, + }; + + match adt { + hir::Adt::Struct(it) => { + res.doc = it.docs(db).map(|it| it.into()); + format_to!(res.signature, "struct {} (", it.name(db)); + it.fields(db) + } + _ => return None, + } + }; + let commas = pat .syntax() .children_with_tokens() + .filter_map(syntax::NodeOrToken::into_token) + .filter(|t| t.kind() == syntax::T![,]); + res.active_parameter = Some(if is_left_of_rest_pat { + commas.take_while(|t| t.text_range().start() <= token.text_range().start()).count() + } else { + let n_commas = commas + .collect::>() + .into_iter() + .rev() + .take_while(|t| t.text_range().start() > token.text_range().start()) + .count(); + fields.len().saturating_sub(1).saturating_sub(n_commas) + }); + + let mut buf = String::new(); + for ty in fields.into_iter().map(|it| it.ty(db)) { + format_to!(buf, "{}", ty.display_truncated(db, Some(20))); + res.push_call_param(&buf); + buf.clear(); + } + res.signature.push_str(")"); + Some(res) +} + +fn signature_help_for_record_( + sema: &Semantics<'_, RootDatabase>, + field_list_children: SyntaxElementChildren, + path: &ast::Path, + fields2: impl Iterator, + token: SyntaxToken, +) -> Option { + let active_parameter = field_list_children .filter_map(syntax::NodeOrToken::into_token) .filter(|t| t.kind() == syntax::T![,]) .take_while(|t| t.text_range().start() <= token.text_range().start()) @@ -365,7 +480,7 @@ fn signature_help_for_record_lit( let fields; let db = sema.db; - let path_res = sema.resolve_path(&record.path()?)?; + let path_res = sema.resolve_path(path)?; if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res { fields = variant.fields(db); let en = variant.parent_enum(db); @@ -397,8 +512,7 @@ fn signature_help_for_record_lit( let mut fields = fields.into_iter().map(|field| (field.name(db), Some(field))).collect::>(); let mut buf = String::new(); - for field in record.record_expr_field_list()?.fields() { - let Some((field, _, ty)) = sema.resolve_record_field(&field) else { continue }; + for (field, ty) in fields2 { let name = field.name(db); format_to!(buf, "{name}: {}", ty.display_truncated(db, Some(20))); res.push_record_field(&buf); @@ -439,6 +553,7 @@ mod tests { (database, FilePosition { file_id, offset }) } + #[track_caller] fn check(ra_fixture: &str, expect: Expect) { let fixture = format!( r#" @@ -890,6 +1005,119 @@ fn main() { ); } + #[test] + fn tuple_struct_pat() { + check( + r#" +/// A cool tuple struct +struct S(u32, i32); +fn main() { + let S(0, $0); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S (u32, i32) + --- ^^^ + "#]], + ); + } + + #[test] + fn tuple_struct_pat_rest() { + check( + r#" +/// A cool tuple struct +struct S(u32, i32, f32, u16); +fn main() { + let S(0, .., $0); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S (u32, i32, f32, u16) + --- --- --- ^^^ + "#]], + ); + check( + r#" +/// A cool tuple struct +struct S(u32, i32, f32, u16, u8); +fn main() { + let S(0, .., $0, 0); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S (u32, i32, f32, u16, u8) + --- --- --- ^^^ -- + "#]], + ); + check( + r#" +/// A cool tuple struct +struct S(u32, i32, f32, u16); +fn main() { + let S($0, .., 1); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S (u32, i32, f32, u16) + ^^^ --- --- --- + "#]], + ); + check( + r#" +/// A cool tuple struct +struct S(u32, i32, f32, u16, u8); +fn main() { + let S(1, .., 1, $0, 2); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S (u32, i32, f32, u16, u8) + --- --- --- ^^^ -- + "#]], + ); + check( + r#" +/// A cool tuple struct +struct S(u32, i32, f32, u16); +fn main() { + let S(1, $0.., 1); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S (u32, i32, f32, u16) + --- ^^^ --- --- + "#]], + ); + check( + r#" +/// A cool tuple struct +struct S(u32, i32, f32, u16); +fn main() { + let S(1, ..$0, 1); +} +"#, + expect![[r#" + A cool tuple struct + ------ + struct S (u32, i32, f32, u16) + --- ^^^ --- --- + "#]], + ); + } + #[test] fn generic_struct() { check( @@ -1550,6 +1778,29 @@ impl S { ); } + #[test] + fn record_pat() { + check( + r#" +struct Strukt { + t: T, + u: U, + unit: (), +} +fn f() { + let Strukt { + u: 0, + $0 + } +} +"#, + expect![[r#" + struct Strukt { u: i32, t: T, unit: () } + ------ ^^^^ -------- + "#]], + ); + } + #[test] fn test_enum_in_nested_method_in_lambda() { check( diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index abcefffa23f01..5f4977886f6eb 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -431,14 +431,15 @@ fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker { fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) { while !p.at(EOF) && !p.at(ket) { - if !p.at_ts(PAT_TOP_FIRST) { - p.error("expected a pattern"); - break; - } - pattern_top(p); - if !p.at(ket) { - p.expect(T![,]); + if !p.at(T![,]) { + if p.at_ts(PAT_TOP_FIRST) { + p.error(format!("expected {:?}, got {:?}", T![,], p.current())); + } else { + break; + } + } else { + p.bump(T![,]); } } } diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index 6df1273edd65d..4e5d640f175e4 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -429,8 +429,9 @@ impl WorkspaceBuildScripts { for p in rustc.packages() { let package = &rustc[p]; if package.targets.iter().any(|&it| rustc[it].is_proc_macro) { - if let Some((_, path)) = - proc_macro_dylibs.iter().find(|(name, _)| *name == package.name) + if let Some((_, path)) = proc_macro_dylibs + .iter() + .find(|(name, _)| *name.trim_start_matches("lib") == package.name) { bs.outputs[p].proc_macro_dylib_path = Some(path.clone()); } diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index 732adc50b5001..01162b1a8ba0c 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -50,7 +50,7 @@ impl ops::Index for CargoWorkspace { /// Describes how to set the rustc source directory. #[derive(Clone, Debug, PartialEq, Eq)] -pub enum RustcSource { +pub enum RustLibSource { /// Explicit path for the rustc source directory. Path(AbsPathBuf), /// Try to automatically detect where the rustc source directory is. @@ -95,10 +95,10 @@ pub struct CargoConfig { /// rustc target pub target: Option, /// Sysroot loading behavior - pub sysroot: Option, + pub sysroot: Option, pub sysroot_src: Option, /// rustc private crate source - pub rustc_source: Option, + pub rustc_source: Option, /// crates to disable `#[cfg(test)]` on pub unset_test_crates: UnsetTestCrates, /// Invoke `cargo check` through the RUSTC_WRAPPER. diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 9b6a71db81145..70cb71ae3bde8 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -44,7 +44,7 @@ pub use crate::{ build_scripts::WorkspaceBuildScripts, cargo_workspace::{ CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency, - RustcSource, Target, TargetData, TargetKind, UnsetTestCrates, + RustLibSource, Target, TargetData, TargetKind, UnsetTestCrates, }, manifest_path::ManifestPath, project_json::{ProjectJson, ProjectJsonData}, diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 749eee531eed1..3754accbb03d8 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -24,8 +24,8 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr let project_workspace = ProjectWorkspace::Cargo { cargo: cargo_workspace, build_scripts: WorkspaceBuildScripts::default(), - sysroot: None, - rustc: None, + sysroot: Err(None), + rustc: Err(None), rustc_cfg: Vec::new(), cfg_overrides, toolchain: None, @@ -37,7 +37,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr fn load_rust_project(file: &str) -> CrateGraph { let data = get_test_json_file(file); let project = rooted_project_json(data); - let sysroot = Some(get_fake_sysroot()); + let sysroot = Ok(get_fake_sysroot()); let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() }; to_crate_graph(project_workspace) } diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index faa6816fdc203..d1e53e12eebb5 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -17,7 +17,7 @@ use stdx::{always, hash::NoHashHashMap}; use crate::{ build_scripts::BuildScriptOutput, - cargo_workspace::{DepKind, PackageData, RustcSource}, + cargo_workspace::{DepKind, PackageData, RustLibSource}, cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, @@ -69,8 +69,8 @@ pub enum ProjectWorkspace { Cargo { cargo: CargoWorkspace, build_scripts: WorkspaceBuildScripts, - sysroot: Option, - rustc: Option<(CargoWorkspace, WorkspaceBuildScripts)>, + sysroot: Result>, + rustc: Result<(CargoWorkspace, WorkspaceBuildScripts), Option>, /// Holds cfg flags for the current target. We get those by running /// `rustc --print cfg`. /// @@ -82,7 +82,7 @@ pub enum ProjectWorkspace { target_layout: Result, }, /// Project workspace was manually specified using a `rust-project.json` file. - Json { project: ProjectJson, sysroot: Option, rustc_cfg: Vec }, + Json { project: ProjectJson, sysroot: Result>, rustc_cfg: Vec }, // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning. // That's not the end user experience we should strive for. // Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working. @@ -93,7 +93,11 @@ pub enum ProjectWorkspace { // // /// Project with a set of disjoint files, not belonging to any particular workspace. /// Backed by basic sysroot crates for basic completion and highlighting. - DetachedFiles { files: Vec, sysroot: Option, rustc_cfg: Vec }, + DetachedFiles { + files: Vec, + sysroot: Result>, + rustc_cfg: Vec, + }, } impl fmt::Debug for ProjectWorkspace { @@ -113,7 +117,7 @@ impl fmt::Debug for ProjectWorkspace { .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) .field("n_packages", &cargo.packages().len()) - .field("sysroot", &sysroot.is_some()) + .field("sysroot", &sysroot.is_ok()) .field( "n_rustc_compiler_crates", &rustc.as_ref().map_or(0, |(rc, _)| rc.packages().len()), @@ -126,7 +130,7 @@ impl fmt::Debug for ProjectWorkspace { ProjectWorkspace::Json { project, sysroot, rustc_cfg } => { let mut debug_struct = f.debug_struct("Json"); debug_struct.field("n_crates", &project.n_crates()); - if let Some(sysroot) = sysroot { + if let Ok(sysroot) = sysroot { debug_struct.field("n_sysroot_crates", &sysroot.crates().len()); } debug_struct.field("n_rustc_cfg", &rustc_cfg.len()); @@ -135,7 +139,7 @@ impl fmt::Debug for ProjectWorkspace { ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f .debug_struct("DetachedFiles") .field("n_files", &files.len()) - .field("sysroot", &sysroot.is_some()) + .field("sysroot", &sysroot.is_ok()) .field("n_rustc_cfg", &rustc_cfg.len()) .finish(), } @@ -191,93 +195,81 @@ impl ProjectWorkspace { let cargo = CargoWorkspace::new(meta); let sysroot = match (&config.sysroot, &config.sysroot_src) { - (Some(RustcSource::Path(path)), None) => { - match Sysroot::with_sysroot_dir(path.clone()) { - Ok(it) => Some(it), - Err(e) => { - tracing::error!(%e, "Failed to find sysroot at {}.", path.display()); - None - } - } + (Some(RustLibSource::Path(path)), None) => { + Sysroot::with_sysroot_dir(path.clone()).map_err(|e| { + Some(format!("Failed to find sysroot at {}:{e}", path.display())) + }) } - (Some(RustcSource::Discover), None) => { - match Sysroot::discover(cargo_toml.parent(), &config.extra_env) { - Ok(it) => Some(it), - Err(e) => { - tracing::error!( - %e, - "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", - cargo_toml.display() - ); - None - } - } + (Some(RustLibSource::Discover), None) => { + Sysroot::discover(cargo_toml.parent(), &config.extra_env).map_err(|e| { + Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display())) + }) } - (Some(RustcSource::Path(sysroot)), Some(sysroot_src)) => { - Some(Sysroot::load(sysroot.clone(), sysroot_src.clone())) + (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { + Ok(Sysroot::load(sysroot.clone(), sysroot_src.clone())) } - (Some(RustcSource::Discover), Some(sysroot_src)) => { - match Sysroot::discover_with_src_override( + (Some(RustLibSource::Discover), Some(sysroot_src)) => { + Sysroot::discover_with_src_override( cargo_toml.parent(), &config.extra_env, sysroot_src.clone(), - ) { - Ok(it) => Some(it), - Err(e) => { - tracing::error!( - %e, - "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", - cargo_toml.display() - ); - None - } - } + ).map_err(|e| { + Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display())) + }) } - (None, _) => None, + (None, _) => Err(None), }; - if let Some(sysroot) = &sysroot { + if let Ok(sysroot) = &sysroot { tracing::info!(workspace = %cargo_toml.display(), src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); } let rustc_dir = match &config.rustc_source { - Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(), - Some(RustcSource::Discover) => { - sysroot.as_ref().and_then(Sysroot::discover_rustc) + Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) + .map_err(|p| { + Some(format!("rustc source path is not absolute: {}", p.display())) + }), + Some(RustLibSource::Discover) => { + sysroot.as_ref().ok().and_then(Sysroot::discover_rustc).ok_or_else(|| { + Some(format!("Failed to discover rustc source for sysroot.")) + }) } - None => None, + None => Err(None), }; - let rustc = match rustc_dir { - Some(rustc_dir) => { - tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source"); - match CargoWorkspace::fetch_metadata( - &rustc_dir, - cargo_toml.parent(), - config, - progress, - ) { - Ok(meta) => { - let workspace = CargoWorkspace::new(meta); - let buildscripts = WorkspaceBuildScripts::rustc_crates( - &workspace, - cargo_toml.parent(), - &config.extra_env, - ); - Some((workspace, buildscripts)) - } - Err(e) => { - tracing::error!( - %e, - "Failed to read Cargo metadata from rustc source at {}", - rustc_dir.display() - ); - None - } + let rustc = rustc_dir.and_then(|rustc_dir| { + tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source"); + match CargoWorkspace::fetch_metadata( + &rustc_dir, + cargo_toml.parent(), + &CargoConfig { + features: crate::CargoFeatures::default(), + ..config.clone() + }, + progress, + ) { + Ok(meta) => { + let workspace = CargoWorkspace::new(meta); + let buildscripts = WorkspaceBuildScripts::rustc_crates( + &workspace, + cargo_toml.parent(), + &config.extra_env, + ); + Ok((workspace, buildscripts)) + } + Err(e) => { + tracing::error!( + %e, + "Failed to read Cargo metadata from rustc source at {}", + rustc_dir.display() + ); + Err(Some(format!( + "Failed to read Cargo metadata from rustc source at {}: {e}", + rustc_dir.display()) + )) } } - None => None, - }; + }); let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env); @@ -313,12 +305,12 @@ impl ProjectWorkspace { extra_env: &FxHashMap, ) -> ProjectWorkspace { let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { - (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)), + (Some(sysroot), Some(sysroot_src)) => Ok(Sysroot::load(sysroot, sysroot_src)), (Some(sysroot), None) => { // assume sysroot is structured like rustup's and guess `sysroot_src` let sysroot_src = sysroot.join("lib").join("rustlib").join("src").join("rust").join("library"); - Some(Sysroot::load(sysroot, sysroot_src)) + Ok(Sysroot::load(sysroot, sysroot_src)) } (None, Some(sysroot_src)) => { // assume sysroot is structured like rustup's and guess `sysroot` @@ -326,11 +318,11 @@ impl ProjectWorkspace { for _ in 0..5 { sysroot.pop(); } - Some(Sysroot::load(sysroot, sysroot_src)) + Ok(Sysroot::load(sysroot, sysroot_src)) } - (None, None) => None, + (None, None) => Err(None), }; - if let Some(sysroot) = &sysroot { + if let Ok(sysroot) = &sysroot { tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); } @@ -343,33 +335,23 @@ impl ProjectWorkspace { config: &CargoConfig, ) -> Result { let sysroot = match &config.sysroot { - Some(RustcSource::Path(path)) => match Sysroot::with_sysroot_dir(path.clone()) { - Ok(it) => Some(it), - Err(e) => { - tracing::error!(%e, "Failed to find sysroot at {}.", path.display()); - None - } - }, - Some(RustcSource::Discover) => { + Some(RustLibSource::Path(path)) => Sysroot::with_sysroot_dir(path.clone()) + .map_err(|e| Some(format!("Failed to find sysroot at {}:{e}", path.display()))), + Some(RustLibSource::Discover) => { let dir = &detached_files .first() .and_then(|it| it.parent()) .ok_or_else(|| format_err!("No detached files to load"))?; - match Sysroot::discover(dir, &config.extra_env) { - Ok(it) => Some(it), - Err(e) => { - tracing::error!( - %e, - "Failed to find sysroot for {}. Is rust-src installed?", - dir.display() - ); - None - } - } + Sysroot::discover(dir, &config.extra_env).map_err(|e| { + Some(format!( + "Failed to find sysroot for {}. Is rust-src installed? {e}", + dir.display() + )) + }) } - None => None, + None => Err(None), }; - if let Some(sysroot) = &sysroot { + if let Ok(sysroot) = &sysroot { tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); } let rustc_cfg = rustc_cfg::get(None, None, &Default::default()); @@ -450,10 +432,18 @@ impl ProjectWorkspace { } } + pub fn workspace_definition_path(&self) -> Option<&AbsPath> { + match self { + ProjectWorkspace::Cargo { cargo, .. } => Some(cargo.workspace_root()), + ProjectWorkspace::Json { project, .. } => Some(project.path()), + ProjectWorkspace::DetachedFiles { .. } => None, + } + } + pub fn find_sysroot_proc_macro_srv(&self) -> Option { match self { - ProjectWorkspace::Cargo { sysroot: Some(sysroot), .. } - | ProjectWorkspace::Json { sysroot: Some(sysroot), .. } => { + ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. } + | ProjectWorkspace::Json { sysroot: Ok(sysroot), .. } => { let standalone_server_name = format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); ["libexec", "lib"] @@ -469,7 +459,7 @@ impl ProjectWorkspace { /// The return type contains the path and whether or not /// the root is a member of the current workspace pub fn to_roots(&self) -> Vec { - let mk_sysroot = |sysroot: Option<&Sysroot>, project_root: Option<&AbsPath>| { + let mk_sysroot = |sysroot: Result<&Sysroot, _>, project_root: Option<&AbsPath>| { sysroot.map(|sysroot| PackageRoot { // mark the sysroot as mutable if it is located inside of the project is_local: project_root @@ -592,7 +582,7 @@ impl ProjectWorkspace { load_proc_macro, load, project, - sysroot.as_ref(), + sysroot.as_ref().ok(), extra_env, Err("rust-project.json projects have no target layout set".into()), ), @@ -608,9 +598,9 @@ impl ProjectWorkspace { } => cargo_to_crate_graph( load_proc_macro, load, - rustc, + rustc.as_ref().ok(), cargo, - sysroot.as_ref(), + sysroot.as_ref().ok(), rustc_cfg.clone(), cfg_overrides, build_scripts, @@ -624,7 +614,7 @@ impl ProjectWorkspace { rustc_cfg.clone(), load, files, - sysroot, + sysroot.as_ref().ok(), Err("detached file projects have no target layout set".into()), ) } @@ -786,7 +776,7 @@ fn project_json_to_crate_graph( fn cargo_to_crate_graph( load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, - rustc: &Option<(CargoWorkspace, WorkspaceBuildScripts)>, + rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>, cargo: &CargoWorkspace, sysroot: Option<&Sysroot>, rustc_cfg: Vec, @@ -932,7 +922,7 @@ fn cargo_to_crate_graph( if has_private { // If the user provided a path to rustc sources, we add all the rustc_private crates // and create dependencies on them for the crates which opt-in to that - if let Some((rustc_workspace, build_scripts)) = rustc { + if let Some((rustc_workspace, rustc_build_scripts)) = rustc { handle_rustc_crates( &mut crate_graph, &mut pkg_to_lib_crate, @@ -945,7 +935,13 @@ fn cargo_to_crate_graph( &pkg_crates, &cfg_options, override_cfg, - build_scripts, + if rustc_workspace.workspace_root() == cargo.workspace_root() { + // the rustc workspace does not use the installed toolchain's proc-macro server + // so we need to make sure we don't use the pre compiled proc-macros there either + build_scripts + } else { + rustc_build_scripts + }, target_layout, ); } @@ -957,7 +953,7 @@ fn detached_files_to_crate_graph( rustc_cfg: Vec, load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], - sysroot: &Option, + sysroot: Option<&Sysroot>, target_layout: TargetLayoutLoadResult, ) -> CrateGraph { let _p = profile::span("detached_files_to_crate_graph"); diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index e8c10927d62c4..6ce1de5d32bca 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -7,7 +7,7 @@ use std::{ }; use hir::{ - db::{AstDatabase, DefDatabase, HirDatabase}, + db::{DefDatabase, ExpandDatabase, HirDatabase}, AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef, }; use hir_def::{ @@ -24,7 +24,7 @@ use ide_db::base_db::{ use itertools::Itertools; use oorandom::Rand32; use profile::{Bytes, StopWatch}; -use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource}; +use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rayon::prelude::*; use rustc_hash::FxHashSet; use stdx::format_to; @@ -57,7 +57,7 @@ impl flags::AnalysisStats { let mut cargo_config = CargoConfig::default(); cargo_config.sysroot = match self.no_sysroot { true => None, - false => Some(RustcSource::Discover), + false => Some(RustLibSource::Discover), }; let no_progress = &|_| (); diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 0721d486ef1f9..4006d023def52 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -1,7 +1,7 @@ //! Analyze all modules in a project for diagnostics. Exits with a non-zero //! status code if any errors are found. -use project_model::{CargoConfig, RustcSource}; +use project_model::{CargoConfig, RustLibSource}; use rustc_hash::FxHashSet; use hir::{db::HirDatabase, Crate, Module}; @@ -16,7 +16,7 @@ use crate::cli::{ impl flags::Diagnostics { pub fn run(self) -> anyhow::Result<()> { let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustcSource::Discover); + cargo_config.sysroot = Some(RustLibSource::Discover); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: !self.disable_build_scripts, with_proc_macro_server: ProcMacroServerChoice::Sysroot, diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 9b5451496c6c5..7f5d084496714 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -13,7 +13,7 @@ use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; use ide_db::line_index::WideEncoding; use lsp_types::{self, lsif}; -use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource}; +use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use vfs::{AbsPathBuf, Vfs}; use crate::cli::load_cargo::ProcMacroServerChoice; @@ -290,7 +290,7 @@ impl flags::Lsif { eprintln!("Generating LSIF started..."); let now = Instant::now(); let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustcSource::Discover); + cargo_config.sysroot = Some(RustLibSource::Discover); let no_progress = &|_| (); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index df5c26cf77a94..3e5e40750e9ca 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -15,7 +15,7 @@ use ide::{ TokenStaticData, }; use ide_db::LineIndexDatabase; -use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustcSource}; +use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use scip::types as scip_types; use std::env; @@ -30,7 +30,7 @@ impl flags::Scip { eprintln!("Generating SCIP start..."); let now = Instant::now(); let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustcSource::Discover); + cargo_config.sysroot = Some(RustLibSource::Discover); let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}")); let load_cargo_config = LoadCargoConfig { diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index 35a874f89207b..82a769347df04 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs @@ -1,7 +1,7 @@ //! Applies structured search replace rules from the command line. use ide_ssr::MatchFinder; -use project_model::{CargoConfig, RustcSource}; +use project_model::{CargoConfig, RustLibSource}; use crate::cli::{ flags, @@ -13,7 +13,7 @@ impl flags::Ssr { pub fn run(self) -> Result<()> { use ide_db::base_db::SourceDatabaseExt; let mut cargo_config = CargoConfig::default(); - cargo_config.sysroot = Some(RustcSource::Discover); + cargo_config.sysroot = Some(RustLibSource::Discover); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro_server: ProcMacroServerChoice::Sysroot, diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 75233dbb2abec..c35cce103fab6 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -22,7 +22,7 @@ use ide_db::{ use itertools::Itertools; use lsp_types::{ClientCapabilities, MarkupKind}; use project_model::{ - CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, + CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustLibSource, UnsetTestCrates, }; use rustc_hash::{FxHashMap, FxHashSet}; @@ -272,7 +272,6 @@ config_data! { /// The warnings will be indicated by a blue squiggly underline in code /// and a blue icon in the `Problems Panel`. diagnostics_warningsAsInfo: Vec = "[]", - /// These directories will be ignored by rust-analyzer. They are /// relative to the workspace root, and globs are not supported. You may /// also need to add the folders to Code's `files.watcherExclude`. @@ -895,6 +894,15 @@ impl Config { } } + pub fn add_linked_projects(&mut self, linked_projects: Vec) { + let mut linked_projects = linked_projects + .into_iter() + .map(ManifestOrProjectJson::ProjectJson) + .collect::>(); + + self.data.linkedProjects.append(&mut linked_projects); + } + pub fn did_save_text_document_dynamic_registration(&self) -> bool { let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?); caps.did_save == Some(true) && caps.dynamic_registration == Some(true) @@ -1129,16 +1137,16 @@ impl Config { pub fn cargo(&self) -> CargoConfig { let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| { if rustc_src == "discover" { - RustcSource::Discover + RustLibSource::Discover } else { - RustcSource::Path(self.root_path.join(rustc_src)) + RustLibSource::Path(self.root_path.join(rustc_src)) } }); let sysroot = self.data.cargo_sysroot.as_ref().map(|sysroot| { if sysroot == "discover" { - RustcSource::Discover + RustLibSource::Discover } else { - RustcSource::Path(self.root_path.join(sysroot)) + RustLibSource::Path(self.root_path.join(sysroot)) } }); let sysroot_src = diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs index 715804449a045..313bb2ec8dffa 100644 --- a/crates/rust-analyzer/src/dispatch.rs +++ b/crates/rust-analyzer/src/dispatch.rs @@ -87,6 +87,42 @@ impl<'a> RequestDispatcher<'a> { self } + /// Dispatches the request onto thread pool + pub(crate) fn on_no_retry( + &mut self, + f: fn(GlobalStateSnapshot, R::Params) -> Result, + ) -> &mut Self + where + R: lsp_types::request::Request + 'static, + R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, + R::Result: Serialize, + { + let (req, params, panic_context) = match self.parse::() { + Some(it) => it, + None => return self, + }; + + self.global_state.task_pool.handle.spawn({ + let world = self.global_state.snapshot(); + move || { + let result = panic::catch_unwind(move || { + let _pctx = stdx::panic_context::enter(panic_context); + f(world, params) + }); + match thread_result_to_response::(req.id.clone(), result) { + Ok(response) => Task::Response(response), + Err(_) => Task::Response(lsp_server::Response::new_err( + req.id, + lsp_server::ErrorCode::ContentModified as i32, + "content modified".to_string(), + )), + } + } + }); + + self + } + /// Dispatches the request onto thread pool pub(crate) fn on( &mut self, diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 32ac9a42dec33..2fca2ab851d41 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -29,7 +29,7 @@ use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; -use vfs::AbsPathBuf; +use vfs::{AbsPath, AbsPathBuf}; use crate::{ cargo_target_spec::CargoTargetSpec, @@ -46,6 +46,7 @@ use crate::{ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> { state.proc_macro_clients.clear(); state.proc_macro_changed = false; + state.fetch_workspaces_queue.request_op("reload workspace request".to_string()); state.fetch_build_data_queue.request_op("reload workspace request".to_string()); Ok(()) @@ -84,6 +85,15 @@ pub(crate) fn handle_analyzer_status( snap.workspaces.len(), if snap.workspaces.len() == 1 { "" } else { "s" } ); + + format_to!( + buf, + "Workspace root folders: {:?}", + snap.workspaces + .iter() + .flat_map(|ws| ws.workspace_definition_path()) + .collect::>() + ); } buf.push_str("\nAnalysis:\n"); buf.push_str( diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index 30f1c53c198f3..12e5caf2cc9e4 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs @@ -36,11 +36,41 @@ impl Progress { } impl GlobalState { - pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) { - let message = message; - self.send_notification::( - lsp_types::ShowMessageParams { typ, message }, - ) + pub(crate) fn show_message( + &mut self, + typ: lsp_types::MessageType, + message: String, + show_open_log_button: bool, + ) { + match self.config.open_server_logs() && show_open_log_button { + true => self.send_request::( + lsp_types::ShowMessageRequestParams { + typ, + message, + actions: Some(vec![lsp_types::MessageActionItem { + title: "Open server logs".to_owned(), + properties: Default::default(), + }]), + }, + |this, resp| { + let lsp_server::Response { error: None, result: Some(result), .. } = resp + else { return }; + if let Ok(Some(_item)) = crate::from_json::< + ::Result, + >( + lsp_types::request::ShowMessageRequest::METHOD, &result + ) { + this.send_notification::(()); + } + }, + ), + false => self.send_notification::( + lsp_types::ShowMessageParams { + typ, + message, + }, + ), + } } /// Sends a notification to the client containing the error `message`. diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index dd0804b4398a9..67a54cde68c6f 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -406,9 +406,19 @@ impl GlobalState { if self.config.server_status_notification() { self.send_notification::(status); - } else if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) - { - self.show_and_log_error(message.clone(), None); + } else if let (health, Some(message)) = (status.health, &status.message) { + let open_log_button = tracing::enabled!(tracing::Level::ERROR) + && (self.fetch_build_data_error().is_err() + || self.fetch_workspace_error().is_err()); + self.show_message( + match health { + lsp_ext::Health::Ok => lsp_types::MessageType::INFO, + lsp_ext::Health::Warning => lsp_types::MessageType::WARNING, + lsp_ext::Health::Error => lsp_types::MessageType::ERROR, + }, + message.clone(), + open_log_button, + ); } } } @@ -653,7 +663,7 @@ impl GlobalState { .on::(handlers::handle_goto_declaration) .on::(handlers::handle_goto_implementation) .on::(handlers::handle_goto_type_definition) - .on::(handlers::handle_inlay_hints) + .on_no_retry::(handlers::handle_inlay_hints) .on::(handlers::handle_inlay_hints_resolve) .on::(handlers::handle_completion) .on::(handlers::handle_completion_resolve) @@ -919,6 +929,7 @@ impl GlobalState { this.show_message( lsp_types::MessageType::WARNING, error.to_string(), + false, ); } this.update_configuration(config); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 28d37f5685ae9..1a6e1af2eb7ed 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -90,38 +90,55 @@ impl GlobalState { quiescent: self.is_quiescent(), message: None, }; + let mut message = String::new(); if self.proc_macro_changed { status.health = lsp_ext::Health::Warning; - status.message = - Some("Reload required due to source changes of a procedural macro.".into()) + message.push_str("Reload required due to source changes of a procedural macro.\n\n"); } if let Err(_) = self.fetch_build_data_error() { status.health = lsp_ext::Health::Warning; - status.message = - Some("Failed to run build scripts of some packages, check the logs.".to_string()); + message.push_str("Failed to run build scripts of some packages.\n\n"); } if !self.config.cargo_autoreload() && self.is_quiescent() && self.fetch_workspaces_queue.op_requested() { status.health = lsp_ext::Health::Warning; - status.message = Some("Workspace reload required".to_string()) + message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n"); } - - if let Err(_) = self.fetch_workspace_error() { - status.health = lsp_ext::Health::Error; - status.message = Some("Failed to load workspaces".to_string()) - } - if self.config.linked_projects().is_empty() && self.config.detached_files().is_empty() && self.config.notifications().cargo_toml_not_found { status.health = lsp_ext::Health::Warning; - status.message = Some("Failed to discover workspace".to_string()) + message.push_str("Failed to discover workspace.\n\n"); + } + + for ws in self.workspaces.iter() { + let (ProjectWorkspace::Cargo { sysroot, .. } + | ProjectWorkspace::Json { sysroot, .. } + | ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws; + if let Err(Some(e)) = sysroot { + status.health = lsp_ext::Health::Warning; + message.push_str(e); + message.push_str("\n\n"); + } + if let ProjectWorkspace::Cargo { rustc: Err(Some(e)), .. } = ws { + status.health = lsp_ext::Health::Warning; + message.push_str(e); + message.push_str("\n\n"); + } } + if let Err(_) = self.fetch_workspace_error() { + status.health = lsp_ext::Health::Error; + message.push_str("Failed to load workspaces.\n\n"); + } + + if !message.is_empty() { + status.message = Some(message.trim_end().to_owned()); + } status } diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index db66d08a73b5e..c43d0830b9e24 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs @@ -48,23 +48,30 @@ impl From for ElseBranch { } impl ast::IfExpr { - pub fn then_branch(&self) -> Option { - self.children_after_condition().next() + pub fn condition(&self) -> Option { + // If the condition is a BlockExpr, check if the then body is missing. + // If it is assume the condition is the expression that is missing instead. + let mut exprs = support::children(self.syntax()); + let first = exprs.next(); + match first { + Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first), + first => first, + } } - pub fn else_branch(&self) -> Option { - let res = match self.children_after_condition().nth(1) { - Some(block) => ElseBranch::Block(block), - None => { - let elif = self.children_after_condition().next()?; - ElseBranch::IfExpr(elif) - } - }; - Some(res) + pub fn then_branch(&self) -> Option { + match support::children(self.syntax()).nth(1)? { + ast::Expr::BlockExpr(block) => Some(block), + _ => None, + } } - fn children_after_condition(&self) -> impl Iterator { - self.syntax().children().skip(1).filter_map(N::cast) + pub fn else_branch(&self) -> Option { + match support::children(self.syntax()).nth(2)? { + ast::Expr::BlockExpr(block) => Some(ElseBranch::Block(block)), + ast::Expr::IfExpr(elif) => Some(ElseBranch::IfExpr(elif)), + _ => None, + } } } @@ -356,7 +363,15 @@ impl ast::BlockExpr { Some(it) => it, None => return true, }; - !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR) + match parent.kind() { + FOR_EXPR | IF_EXPR => parent + .children() + .filter(|it| ast::Expr::can_cast(it.kind())) + .next() + .map_or(true, |it| it == *self.syntax()), + LET_ELSE | FN | WHILE_EXPR | LOOP_EXPR | CONST_BLOCK_PAT => false, + _ => true, + } } } diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 15bd5ab3c7299..3308077da5b12 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -937,12 +937,6 @@ impl From for ast::Item { } } -impl ast::IfExpr { - pub fn condition(&self) -> Option { - support::child(&self.syntax) - } -} - impl ast::MatchGuard { pub fn condition(&self) -> Option { support::child(&self.syntax) diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 93ff76a040c6d..ca6de4061a4b8 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -44,6 +44,8 @@ //! try: infallible //! unsize: sized +#![rustc_coherence_is_core] + pub mod marker { // region:sized #[lang = "sized"] diff --git a/editors/code/package.json b/editors/code/package.json index a3b1a3107d0c9..c5eb08748bfab 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -199,6 +199,11 @@ "title": "Reload workspace", "category": "rust-analyzer" }, + { + "command": "rust-analyzer.addProject", + "title": "Add current file's crate to workspace", + "category": "rust-analyzer" + }, { "command": "rust-analyzer.reload", "title": "Restart server", @@ -428,6 +433,17 @@ "default": false, "type": "boolean" }, + "rust-analyzer.discoverProjectCommand": { + "markdownDescription": "Sets the command that rust-analyzer uses to generate `rust-project.json` files. This command should only be used\n if a build system like Buck or Bazel is also in use. The command must accept files as arguments and return \n a rust-project.json over stdout.", + "default": null, + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + } + }, "$generated-start": {}, "rust-analyzer.assist.emitMustUse": { "markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.", diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 62980ca046450..565cb9c6432f4 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -6,7 +6,7 @@ import * as Is from "vscode-languageclient/lib/common/utils/is"; import { assert } from "./util"; import * as diagnostics from "./diagnostics"; import { WorkspaceEdit } from "vscode"; -import { Config, substituteVSCodeVariables } from "./config"; +import { Config, prepareVSCodeConfig } from "./config"; import { randomUUID } from "crypto"; export interface Env { @@ -95,7 +95,16 @@ export async function createClient( const resp = await next(params, token); if (resp && Array.isArray(resp)) { return resp.map((val) => { - return substituteVSCodeVariables(val); + return prepareVSCodeConfig(val, (key, cfg) => { + // we only want to set discovered workspaces on the right key + // and if a workspace has been discovered. + if ( + key === "linkedProjects" && + config.discoveredWorkspaces.length > 0 + ) { + cfg[key] = config.discoveredWorkspaces; + } + }); }); } else { return resp; diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index f4a4579a92c9b..8a953577e99d3 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient"; import * as ra from "./lsp_ext"; import * as path from "path"; -import { Ctx, Cmd, CtxInit } from "./ctx"; +import { Ctx, Cmd, CtxInit, discoverWorkspace } from "./ctx"; import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets"; import { spawnSync } from "child_process"; import { RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run"; @@ -749,6 +749,33 @@ export function reloadWorkspace(ctx: CtxInit): Cmd { return async () => ctx.client.sendRequest(ra.reloadWorkspace); } +export function addProject(ctx: CtxInit): Cmd { + return async () => { + const discoverProjectCommand = ctx.config.discoverProjectCommand; + if (!discoverProjectCommand) { + return; + } + + const workspaces: JsonProject[] = await Promise.all( + vscode.workspace.workspaceFolders!.map(async (folder): Promise => { + const rustDocuments = vscode.workspace.textDocuments.filter(isRustDocument); + return discoverWorkspace(rustDocuments, discoverProjectCommand, { + cwd: folder.uri.fsPath, + }); + }) + ); + + ctx.addToDiscoveredWorkspaces(workspaces); + + // this is a workaround to avoid needing writing the `rust-project.json` into + // a workspace-level VS Code-specific settings folder. We'd like to keep the + // `rust-project.json` entirely in-memory. + await ctx.client?.sendNotification(lc.DidChangeConfigurationNotification.type, { + settings: "", + }); + }; +} + async function showReferencesImpl( client: LanguageClient | undefined, uri: string, diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 1faa0ad91065b..da7c74c28bae9 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -34,6 +34,7 @@ export class Config { constructor(ctx: vscode.ExtensionContext) { this.globalStorageUri = ctx.globalStorageUri; + this.discoveredWorkspaces = []; vscode.workspace.onDidChangeConfiguration( this.onDidChangeConfiguration, this, @@ -55,6 +56,8 @@ export class Config { log.info("Using configuration", Object.fromEntries(cfg)); } + public discoveredWorkspaces: JsonProject[]; + private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) { this.refreshLogging(); @@ -191,7 +194,7 @@ export class Config { * So this getter handles this quirk by not requiring the caller to use postfix `!` */ private get(path: string): T | undefined { - return substituteVSCodeVariables(this.cfg.get(path)); + return prepareVSCodeConfig(this.cfg.get(path)); } get serverPath() { @@ -214,6 +217,10 @@ export class Config { return this.get("trace.extension"); } + get discoverProjectCommand() { + return this.get("discoverProjectCommand"); + } + get cargoRunner() { return this.get("cargoRunner"); } @@ -280,18 +287,32 @@ export class Config { } } -export function substituteVSCodeVariables(resp: T): T { +// the optional `cb?` parameter is meant to be used to add additional +// key/value pairs to the VS Code configuration. This needed for, e.g., +// including a `rust-project.json` into the `linkedProjects` key as part +// of the configuration/InitializationParams _without_ causing VS Code +// configuration to be written out to workspace-level settings. This is +// undesirable behavior because rust-project.json files can be tens of +// thousands of lines of JSON, most of which is not meant for humans +// to interact with. +export function prepareVSCodeConfig( + resp: T, + cb?: (key: Extract, res: { [key: string]: any }) => void +): T { if (Is.string(resp)) { return substituteVSCodeVariableInString(resp) as T; } else if (resp && Is.array(resp)) { return resp.map((val) => { - return substituteVSCodeVariables(val); + return prepareVSCodeConfig(val); }) as T; } else if (resp && typeof resp === "object") { const res: { [key: string]: any } = {}; for (const key in resp) { const val = resp[key]; - res[key] = substituteVSCodeVariables(val); + res[key] = prepareVSCodeConfig(val); + if (cb) { + cb(key, res); + } } return res as T; } diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 1708d47cee77d..c2dca733df8f5 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -2,12 +2,20 @@ import * as vscode from "vscode"; import * as lc from "vscode-languageclient/node"; import * as ra from "./lsp_ext"; -import { Config, substituteVSCodeVariables } from "./config"; +import { Config, prepareVSCodeConfig } from "./config"; import { createClient } from "./client"; -import { isRustDocument, isRustEditor, LazyOutputChannel, log, RustEditor } from "./util"; +import { + executeDiscoverProject, + isRustDocument, + isRustEditor, + LazyOutputChannel, + log, + RustEditor, +} from "./util"; import { ServerStatusParams } from "./lsp_ext"; import { PersistentState } from "./persistent_state"; import { bootstrap } from "./bootstrap"; +import { ExecOptions } from "child_process"; // We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if // only those are in use. We use "Empty" to represent these scenarios @@ -41,6 +49,17 @@ export function fetchWorkspace(): Workspace { : { kind: "Workspace Folder" }; } +export async function discoverWorkspace( + files: readonly vscode.TextDocument[], + command: string[], + options: ExecOptions +): Promise { + const paths = files.map((f) => `"${f.uri.fsPath}"`).join(" "); + const joinedCommand = command.join(" "); + const data = await executeDiscoverProject(`${joinedCommand} ${paths}`, options); + return JSON.parse(data) as JsonProject; +} + export type CommandFactory = { enabled: (ctx: CtxInit) => Cmd; disabled?: (ctx: Ctx) => Cmd; @@ -52,7 +71,7 @@ export type CtxInit = Ctx & { export class Ctx { readonly statusBar: vscode.StatusBarItem; - readonly config: Config; + config: Config; readonly workspace: Workspace; private _client: lc.LanguageClient | undefined; @@ -169,7 +188,30 @@ export class Ctx { }; } - const initializationOptions = substituteVSCodeVariables(rawInitializationOptions); + const discoverProjectCommand = this.config.discoverProjectCommand; + if (discoverProjectCommand) { + const workspaces: JsonProject[] = await Promise.all( + vscode.workspace.workspaceFolders!.map(async (folder): Promise => { + const rustDocuments = vscode.workspace.textDocuments.filter(isRustDocument); + return discoverWorkspace(rustDocuments, discoverProjectCommand, { + cwd: folder.uri.fsPath, + }); + }) + ); + + this.addToDiscoveredWorkspaces(workspaces); + } + + const initializationOptions = prepareVSCodeConfig( + rawInitializationOptions, + (key, obj) => { + // we only want to set discovered workspaces on the right key + // and if a workspace has been discovered. + if (key === "linkedProjects" && this.config.discoveredWorkspaces.length > 0) { + obj["linkedProjects"] = this.config.discoveredWorkspaces; + } + } + ); this._client = await createClient( this.traceOutputChannel, @@ -251,6 +293,17 @@ export class Ctx { return this._serverPath; } + addToDiscoveredWorkspaces(workspaces: JsonProject[]) { + for (const workspace of workspaces) { + const index = this.config.discoveredWorkspaces.indexOf(workspace); + if (~index) { + this.config.discoveredWorkspaces[index] = workspace; + } else { + this.config.discoveredWorkspaces.push(workspace); + } + } + } + private updateCommands(forceDisable?: "disable") { this.commandDisposables.forEach((disposable) => disposable.dispose()); this.commandDisposables = []; @@ -289,6 +342,7 @@ export class Ctx { statusBar.tooltip.appendText(status.message ?? "Ready"); statusBar.color = undefined; statusBar.backgroundColor = undefined; + statusBar.command = "rust-analyzer.stopServer"; break; case "warning": if (status.message) { @@ -298,6 +352,7 @@ export class Ctx { statusBar.backgroundColor = new vscode.ThemeColor( "statusBarItem.warningBackground" ); + statusBar.command = "rust-analyzer.openLogs"; icon = "$(warning) "; break; case "error": @@ -306,6 +361,7 @@ export class Ctx { } statusBar.color = new vscode.ThemeColor("statusBarItem.errorForeground"); statusBar.backgroundColor = new vscode.ThemeColor("statusBarItem.errorBackground"); + statusBar.command = "rust-analyzer.openLogs"; icon = "$(error) "; break; case "stopped": @@ -315,18 +371,19 @@ export class Ctx { ); statusBar.color = undefined; statusBar.backgroundColor = undefined; + statusBar.command = "rust-analyzer.startServer"; statusBar.text = `$(stop-circle) rust-analyzer`; return; } if (statusBar.tooltip.value) { statusBar.tooltip.appendText("\n\n"); } - statusBar.tooltip.appendMarkdown("[Stop server](command:rust-analyzer.stopServer)"); statusBar.tooltip.appendMarkdown( "\n\n[Reload Workspace](command:rust-analyzer.reloadWorkspace)" ); - statusBar.tooltip.appendMarkdown("\n\n[Restart server](command:rust-analyzer.startServer)"); statusBar.tooltip.appendMarkdown("\n\n[Open logs](command:rust-analyzer.openLogs)"); + statusBar.tooltip.appendMarkdown("\n\n[Restart server](command:rust-analyzer.startServer)"); + statusBar.tooltip.appendMarkdown("[Stop server](command:rust-analyzer.stopServer)"); if (!status.quiescent) icon = "$(sync~spin) "; statusBar.text = `${icon}rust-analyzer`; } diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts index 400cd207d41b0..872d7199b838a 100644 --- a/editors/code/src/lsp_ext.ts +++ b/editors/code/src/lsp_ext.ts @@ -43,6 +43,7 @@ export const relatedTests = new lc.RequestType("rust-analyzer/reloadWorkspace"); + export const runFlycheck = new lc.NotificationType<{ textDocument: lc.TextDocumentIdentifier | null; }>("rust-analyzer/runFlycheck"); diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 8a2412af849cd..d5de00561b123 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -153,6 +153,7 @@ function createCommands(): Record { memoryUsage: { enabled: commands.memoryUsage }, shuffleCrateGraph: { enabled: commands.shuffleCrateGraph }, reloadWorkspace: { enabled: commands.reloadWorkspace }, + addProject: { enabled: commands.addProject }, matchingBrace: { enabled: commands.matchingBrace }, joinLines: { enabled: commands.joinLines }, parentModule: { enabled: commands.parentModule }, diff --git a/editors/code/src/rust_project.ts b/editors/code/src/rust_project.ts new file mode 100644 index 0000000000000..187a1a96c10c5 --- /dev/null +++ b/editors/code/src/rust_project.ts @@ -0,0 +1,91 @@ +interface JsonProject { + /// Path to the directory with *source code* of + /// sysroot crates. + /// + /// It should point to the directory where std, + /// core, and friends can be found: + /// + /// https://github.com/rust-lang/rust/tree/master/library. + /// + /// If provided, rust-analyzer automatically adds + /// dependencies on sysroot crates. Conversely, + /// if you omit this path, you can specify sysroot + /// dependencies yourself and, for example, have + /// several different "sysroots" in one graph of + /// crates. + sysroot_src?: string; + /// The set of crates comprising the current + /// project. Must include all transitive + /// dependencies as well as sysroot crate (libstd, + /// libcore and such). + crates: Crate[]; +} + +interface Crate { + /// Optional crate name used for display purposes, + /// without affecting semantics. See the `deps` + /// key for semantically-significant crate names. + display_name?: string; + /// Path to the root module of the crate. + root_module: string; + /// Edition of the crate. + edition: "2015" | "2018" | "2021"; + /// Dependencies + deps: Dep[]; + /// Should this crate be treated as a member of + /// current "workspace". + /// + /// By default, inferred from the `root_module` + /// (members are the crates which reside inside + /// the directory opened in the editor). + /// + /// Set this to `false` for things like standard + /// library and 3rd party crates to enable + /// performance optimizations (rust-analyzer + /// assumes that non-member crates don't change). + is_workspace_member?: boolean; + /// Optionally specify the (super)set of `.rs` + /// files comprising this crate. + /// + /// By default, rust-analyzer assumes that only + /// files under `root_module.parent` can belong + /// to a crate. `include_dirs` are included + /// recursively, unless a subdirectory is in + /// `exclude_dirs`. + /// + /// Different crates can share the same `source`. + /// + /// If two crates share an `.rs` file in common, + /// they *must* have the same `source`. + /// rust-analyzer assumes that files from one + /// source can't refer to files in another source. + source?: { + include_dirs: string[]; + exclude_dirs: string[]; + }; + /// The set of cfgs activated for a given crate, like + /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`. + cfg: string[]; + /// Target triple for this Crate. + /// + /// Used when running `rustc --print cfg` + /// to get target-specific cfgs. + target?: string; + /// Environment variables, used for + /// the `env!` macro + env: { [key: string]: string }; + + /// Whether the crate is a proc-macro crate. + is_proc_macro: boolean; + /// For proc-macro crates, path to compiled + /// proc-macro (.so file). + proc_macro_dylib_path?: string; +} + +interface Dep { + /// Index of a crate in the `crates` array. + crate: number; + /// Name as should appear in the (implicit) + /// `extern crate name` declaration. + name: string; +} diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts index d93b9caeb1648..922fbcbcf35a5 100644 --- a/editors/code/src/util.ts +++ b/editors/code/src/util.ts @@ -150,9 +150,11 @@ export function memoizeAsync( /** Awaitable wrapper around `child_process.exec` */ export function execute(command: string, options: ExecOptions): Promise { + log.info(`running command: ${command}`); return new Promise((resolve, reject) => { exec(command, options, (err, stdout, stderr) => { if (err) { + log.error(err); reject(err); return; } @@ -167,6 +169,21 @@ export function execute(command: string, options: ExecOptions): Promise }); } +export function executeDiscoverProject(command: string, options: ExecOptions): Promise { + log.info(`running command: ${command}`); + return new Promise((resolve, reject) => { + exec(command, options, (err, stdout, _) => { + if (err) { + log.error(err); + reject(err); + return; + } + + resolve(stdout.trimEnd()); + }); + }); +} + export class LazyOutputChannel implements vscode.OutputChannel { constructor(name: string) { this.name = name;