diff --git a/src/tools/rust-analyzer/CONTRIBUTING.md b/src/tools/rust-analyzer/CONTRIBUTING.md index 6f270fc63bad8..35d03780c1d5c 100644 --- a/src/tools/rust-analyzer/CONTRIBUTING.md +++ b/src/tools/rust-analyzer/CONTRIBUTING.md @@ -1,3 +1,8 @@ +> [!IMPORTANT] +> We have enacted a feature freeze for IDE assists to cope with the PR backlog as well as allowing us to prepare for the rowan transition! +> If you submit a PR that **adds** new ide-assists, chances are very high that we will just close it on this basis alone until we have the capacity to deal with them again. + + # Contributing to rust-analyzer Thank you for your interest in contributing to rust-analyzer! There are many ways to contribute @@ -28,3 +33,11 @@ possibility of someone putting a lot of work into a feature that is then going t it out of scope (be it due to generally not fitting in with rust-analyzer, or just not having the maintenance capacity). If there already is a feature issue open but it is not clear whether it is considered accepted feel free to just drop a comment and ask! + +## Use of AI tools + +AI tool use is not discouraged on the rust-analyzer codebase, as long as it meets our quality standards. +We kindly ask you to disclose usage of AI tools in your contributions. +If you used them without disclosing it, we may reject your contribution on that basis alone due to the assumption that you likely not reviewed your own submission (so why should we?). + +We may still reject AI-assisted contributions if we deem the quality of the contribution to be unsatisfactory as to reduce impact on the team's review budget. diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index efe56cb7f61cb..1e924d92f4244 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -788,6 +788,7 @@ dependencies = [ "itertools 0.14.0", "ra-ap-rustc_type_ir", "rustc-hash 2.1.1", + "serde_json", "smallvec", "span", "stdx", @@ -901,6 +902,8 @@ dependencies = [ "rustc_apfloat", "salsa", "salsa-macros", + "serde", + "serde_derive", "smallvec", "span", "stdx", @@ -1214,7 +1217,9 @@ version = "0.0.0" dependencies = [ "dashmap", "hashbrown 0.14.5", + "rayon", "rustc-hash 2.1.1", + "smallvec", "triomphe", ] @@ -2040,9 +2045,9 @@ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "ra-ap-rustc_abi" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce480c45c05462cf6b700468118201b00132613a968a1849da5f7a555c0f1db9" +checksum = "1d49dbe5d570793b3c3227972a6ac85fc3e830f09b32c3cb3b68cfceebad3b0a" dependencies = [ "bitflags 2.9.4", "ra-ap-rustc_hashes", @@ -2052,34 +2057,33 @@ dependencies = [ [[package]] name = "ra-ap-rustc_ast_ir" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453da2376de406d740ca28412a31ae3d5a6039cd45698c1c2fb01b577dff64ae" +checksum = "cd0956db62c264a899d15667993cbbd2e8f0b02108712217e2579c61ac30b94b" [[package]] name = "ra-ap-rustc_hashes" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf411a55deaa3ea348594c8273fb2d1200265bf87b881b40c62b32f75caf8323" +checksum = "7df512084c24f4c96c8cc9a59cbd264301efbc8913d3759b065398024af316c9" dependencies = [ "rustc-stable-hash", ] [[package]] name = "ra-ap-rustc_index" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d0dd4cf1417ea8a809e9e7bf296c6ce6e05b75b043483872d1bd2951a08142c" +checksum = "bca3a49a928d38ba7927605e5909b6abe77d09ff359e4695c070c3f91d69cc8a" dependencies = [ "ra-ap-rustc_index_macros", - "smallvec", ] [[package]] name = "ra-ap-rustc_index_macros" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b0d218fb91f8969716a962142c722d88b3cd3fd1f7ef03093261bf37e85dfd" +checksum = "4463e908a62c64c2a65c1966c2f4995d0e1f8b7dfc85a8b8de2562edf3d89070" dependencies = [ "proc-macro2", "quote", @@ -2088,9 +2092,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ec7c26e92c44d5433b29cf661faf0027e263b70a411d0f28996bd67e3bdb57e" +checksum = "228e01e1b237adb4bd8793487e1c37019c1e526a8f93716d99602301be267056" dependencies = [ "memchr", "unicode-properties", @@ -2099,9 +2103,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_next_trait_solver" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "029686fdbc8a058cf3d81ad157e1cdc81a37b9de0400289ccb86a62465484313" +checksum = "10d6f91143011d474bb844d268b0784c6a4c6db57743558b83f5ad34511627f1" dependencies = [ "derive-where", "ra-ap-rustc_index", @@ -2112,9 +2116,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "509d279f1e87acc33476da3fbd05a6054e9ffeb4427cb38ba01b9d2656aec268" +checksum = "37fa8effbc436c0ddd9d7b1421aa3cccf8b94566c841c4e4aa3e09063b8f423f" dependencies = [ "ra-ap-rustc_lexer", "rustc-literal-escaper 0.0.5", @@ -2122,9 +2126,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bb2c9930854314b03bd7aab060a14bca6f194b76381a4c309e3905ec3a02bbc" +checksum = "883c843fc27847ad03b8e772dd4a2d2728af4333a6d6821a22dfcfe7136dff3e" dependencies = [ "ra-ap-rustc_index", "rustc-hash 2.1.1", @@ -2135,9 +2139,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_type_ir" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4a92a3e4dbdebb0d4c9caceb52eff45c4df784d21fb2da90dac50e218f95c0" +checksum = "a86e33c46b2b261a173b23f207461a514812a8b2d2d7935bbc685f733eacce10" dependencies = [ "arrayvec", "bitflags 2.9.4", @@ -2155,9 +2159,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_type_ir_macros" -version = "0.139.0" +version = "0.143.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca368eca2472367f2e6fdfb431c8342e99d848e4ce89cb20dd3b3bdcc43cbc28" +checksum = "15034c2fcaa5cf302aea6db20eda0f71fffeb0b372d6073cc50f940e974a2a47" dependencies = [ "proc-macro2", "quote", @@ -2445,9 +2449,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "salsa" -version = "0.24.0" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27956164373aeec733ac24ff1736de8541234e3a8e7e6f916b28175b5752af3b" +checksum = "e2e2aa2fca57727371eeafc975acc8e6f4c52f8166a78035543f6ee1c74c2dcc" dependencies = [ "boxcar", "crossbeam-queue", @@ -2470,15 +2474,15 @@ dependencies = [ [[package]] name = "salsa-macro-rules" -version = "0.24.0" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ca3b9d6e47c08b5de4b218e0c5f7ec910b51bce6314e651c8e7b9d154d174da" +checksum = "1bfc2a1e7bf06964105515451d728f2422dedc3a112383324a00b191a5c397a3" [[package]] name = "salsa-macros" -version = "0.24.0" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6337b62f2968be6b8afa30017d7564ecbde6832ada47ed2261fb14d0fd402ff4" +checksum = "3d844c1aa34946da46af683b5c27ec1088a3d9d84a2b837a108223fd830220e1" dependencies = [ "proc-macro2", "quote", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 6991eeec738ee..8003cb2fba8ed 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -86,14 +86,14 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } edition = { path = "./crates/edition", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.139", default-features = false } -ra-ap-rustc_parse_format = { version = "0.139", default-features = false } -ra-ap-rustc_index = { version = "0.139", default-features = false } -ra-ap-rustc_abi = { version = "0.139", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.139", default-features = false } -ra-ap-rustc_ast_ir = { version = "0.139", default-features = false } -ra-ap-rustc_type_ir = { version = "0.139", default-features = false } -ra-ap-rustc_next_trait_solver = { version = "0.139", default-features = false } +ra-ap-rustc_lexer = { version = "0.143", default-features = false } +ra-ap-rustc_parse_format = { version = "0.143", default-features = false } +ra-ap-rustc_index = { version = "0.143", default-features = false } +ra-ap-rustc_abi = { version = "0.143", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.143", default-features = false } +ra-ap-rustc_ast_ir = { version = "0.143", default-features = false } +ra-ap-rustc_type_ir = { version = "0.143", default-features = false } +ra-ap-rustc_next_trait_solver = { version = "0.143", default-features = false } # local crates that aren't published to crates.io. These should not have versions. @@ -135,13 +135,13 @@ rayon = "1.10.0" rowan = "=0.15.17" # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work # on impls without it -salsa = { version = "0.24.0", default-features = false, features = [ +salsa = { version = "0.25.2", default-features = false, features = [ "rayon", "salsa_unstable", "macros", "inventory", ] } -salsa-macros = "0.24.0" +salsa-macros = "0.25.2" semver = "1.0.26" serde = { version = "1.0.219" } serde_derive = { version = "1.0.219" } diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 1b41386adf774..240f1264917a0 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -351,6 +351,8 @@ pub struct CrateData { /// declared in source via `extern crate test`. pub dependencies: Vec>, pub origin: CrateOrigin, + /// Extra crate-level attributes, including the surrounding `#![]`. + pub crate_attrs: Box<[Box]>, pub is_proc_macro: bool, /// The working directory to run proc-macros in invoked in the context of this crate. /// This is the workspace root of the cargo workspace for workspace members, the crate manifest @@ -465,7 +467,7 @@ impl Crate { /// including the crate itself. /// /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications. - pub fn transitive_deps(self, db: &dyn salsa::Database) -> Box<[Crate]> { + pub fn transitive_deps(self, db: &dyn salsa::Database) -> Vec { // There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible // and removing that is a bit difficult. let mut worklist = vec![self]; @@ -480,7 +482,7 @@ impl Crate { worklist.extend(krate.data(db).dependencies.iter().map(|dep| dep.crate_id)); } - deps.into_boxed_slice() + deps } /// Returns all transitive reverse dependencies of the given crate, @@ -530,6 +532,7 @@ impl CrateGraphBuilder { mut potential_cfg_options: Option, mut env: Env, origin: CrateOrigin, + crate_attrs: Vec, is_proc_macro: bool, proc_macro_cwd: Arc, ws_data: Arc, @@ -539,12 +542,17 @@ impl CrateGraphBuilder { if let Some(potential_cfg_options) = &mut potential_cfg_options { potential_cfg_options.shrink_to_fit(); } + let crate_attrs: Vec<_> = crate_attrs + .into_iter() + .map(|raw_attr| format!("#![{raw_attr}]").into_boxed_str()) + .collect(); self.arena.alloc(CrateBuilder { basic: CrateData { root_file_id, edition, dependencies: Vec::new(), origin, + crate_attrs: crate_attrs.into_boxed_slice(), is_proc_macro, proc_macro_cwd, }, @@ -648,6 +656,7 @@ impl CrateGraphBuilder { edition: krate.basic.edition, is_proc_macro: krate.basic.is_proc_macro, origin: krate.basic.origin.clone(), + crate_attrs: krate.basic.crate_attrs.clone(), root_file_id: krate.basic.root_file_id, proc_macro_cwd: krate.basic.proc_macro_cwd.clone(), }; @@ -975,6 +984,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -988,6 +998,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1001,6 +1012,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1034,6 +1046,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1047,6 +1060,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1075,6 +1089,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1088,6 +1103,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1101,6 +1117,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1129,6 +1146,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), @@ -1142,6 +1160,7 @@ mod tests { Default::default(), Env::default(), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())), empty_ws_data(), diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs index 76e0aba859e68..a0e0dc5ff090e 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs @@ -56,6 +56,36 @@ pub enum CfgExpr { Not(Box), } +impl fmt::Display for CfgExpr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + CfgExpr::Atom(atom) => atom.fmt(f), + CfgExpr::All(exprs) => { + write!(f, "all(")?; + for (i, expr) in exprs.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + expr.fmt(f)?; + } + write!(f, ")") + } + CfgExpr::Any(exprs) => { + write!(f, "any(")?; + for (i, expr) in exprs.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + expr.fmt(f)?; + } + write!(f, ")") + } + CfgExpr::Not(expr) => write!(f, "not({})", expr), + CfgExpr::Invalid => write!(f, "invalid"), + } + } +} + impl From for CfgExpr { fn from(atom: CfgAtom) -> Self { CfgExpr::Atom(atom) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs index febc794b5a05f..34a9230794d18 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs @@ -39,7 +39,7 @@ use rustc_abi::ReprOptions; use rustc_hash::FxHashSet; use smallvec::SmallVec; use syntax::{ - AstNode, AstToken, NodeOrToken, SmolStr, SyntaxNode, SyntaxToken, T, + AstNode, AstToken, NodeOrToken, SmolStr, SourceFile, SyntaxNode, SyntaxToken, T, ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren}, }; use tt::{TextRange, TextSize}; @@ -292,35 +292,69 @@ bitflags::bitflags! { } } +pub fn parse_extra_crate_attrs(db: &dyn DefDatabase, krate: Crate) -> Option { + let crate_data = krate.data(db); + let crate_attrs = &crate_data.crate_attrs; + if crate_attrs.is_empty() { + return None; + } + // All attributes are already enclosed in `#![]`. + let combined = crate_attrs.concat(); + let p = SourceFile::parse(&combined, crate_data.edition); + + let errs = p.errors(); + if !errs.is_empty() { + let base_msg = "Failed to parse extra crate-level attribute"; + let crate_name = + krate.extra_data(db).display_name.as_ref().map_or("{unknown}", |name| name.as_str()); + let mut errs = errs.iter().peekable(); + let mut offset = TextSize::from(0); + for raw_attr in crate_attrs { + let attr_end = offset + TextSize::of(&**raw_attr); + if errs.peeking_take_while(|e| e.range().start() < attr_end).count() > 0 { + tracing::error!("{base_msg} {raw_attr} for crate {crate_name}"); + } + offset = attr_end + } + return None; + } + + Some(p.tree()) +} + fn attrs_source( db: &dyn DefDatabase, owner: AttrDefId, -) -> (InFile, Option>, Crate) { +) -> (InFile, Option>, Option, Crate) { let (owner, krate) = match owner { AttrDefId::ModuleId(id) => { let def_map = id.def_map(db); - let (definition, declaration) = match def_map[id].origin { + let krate = def_map.krate(); + let (definition, declaration, extra_crate_attrs) = match def_map[id].origin { ModuleOrigin::CrateRoot { definition } => { - let file = db.parse(definition).tree(); - (InFile::new(definition.into(), ast::AnyHasAttrs::from(file)), None) + let definition_source = db.parse(definition).tree(); + let definition = InFile::new(definition.into(), definition_source.into()); + let extra_crate_attrs = parse_extra_crate_attrs(db, krate); + (definition, None, extra_crate_attrs) } ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => { + let definition_source = db.parse(definition).tree(); + let definition = InFile::new(definition.into(), definition_source.into()); let declaration = InFile::new(declaration_tree_id.file_id(), declaration); let declaration = declaration.with_value(declaration.to_node(db)); - let definition_source = db.parse(definition).tree(); - (InFile::new(definition.into(), definition_source.into()), Some(declaration)) + (definition, Some(declaration), None) } ModuleOrigin::Inline { definition_tree_id, definition } => { let definition = InFile::new(definition_tree_id.file_id(), definition); let definition = definition.with_value(definition.to_node(db).into()); - (definition, None) + (definition, None, None) } ModuleOrigin::BlockExpr { block, .. } => { let definition = block.to_node(db); - (block.with_value(definition.into()), None) + (block.with_value(definition.into()), None, None) } }; - return (definition, declaration, def_map.krate()); + return (definition, declaration, extra_crate_attrs, krate); } AttrDefId::AdtId(AdtId::StructId(it)) => attrs_from_ast_id_loc(db, it), AttrDefId::AdtId(AdtId::UnionId(it)) => attrs_from_ast_id_loc(db, it), @@ -339,7 +373,7 @@ fn attrs_source( AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it), AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it), }; - (owner, None, krate) + (owner, None, None, krate) } fn collect_attrs( @@ -347,14 +381,15 @@ fn collect_attrs( owner: AttrDefId, mut callback: impl FnMut(Meta) -> ControlFlow, ) -> Option { - let (source, outer_mod_decl, krate) = attrs_source(db, owner); + let (source, outer_mod_decl, extra_crate_attrs, krate) = attrs_source(db, owner); + let extra_attrs = extra_crate_attrs + .into_iter() + .flat_map(|src| src.attrs()) + .chain(outer_mod_decl.into_iter().flat_map(|it| it.value.attrs())); let mut cfg_options = None; expand_cfg_attr( - outer_mod_decl - .into_iter() - .flat_map(|it| it.value.attrs()) - .chain(ast::attrs_including_inner(&source.value)), + extra_attrs.chain(ast::attrs_including_inner(&source.value)), || cfg_options.get_or_insert_with(|| krate.cfg_options(db)), move |meta, _, _, _| callback(meta), ) @@ -1013,10 +1048,12 @@ impl AttrFlags { pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option { let root_file_id = krate.root_file_id(db); let syntax = db.parse(root_file_id).tree(); + let extra_crate_attrs = + parse_extra_crate_attrs(db, krate).into_iter().flat_map(|src| src.attrs()); let mut cfg_options = None; expand_cfg_attr( - syntax.attrs(), + extra_crate_attrs.chain(syntax.attrs()), || cfg_options.get_or_insert(krate.cfg_options(db)), |attr, _, _, _| { if let Meta::TokenTree { path, tt } = attr @@ -1231,8 +1268,11 @@ impl AttrFlags { // We LRU this query because it is only used by IDE. #[salsa::tracked(returns(ref), lru = 250)] pub fn docs(db: &dyn DefDatabase, owner: AttrDefId) -> Option> { - let (source, outer_mod_decl, krate) = attrs_source(db, owner); + let (source, outer_mod_decl, _extra_crate_attrs, krate) = attrs_source(db, owner); let inner_attrs_node = source.value.inner_attributes_node(); + // Note: we don't have to pass down `_extra_crate_attrs` here, since `extract_docs` + // does not handle crate-level attributes related to docs. + // See: https://doc.rust-lang.org/rustdoc/write-documentation/the-doc-attribute.html#at-the-crate-level extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node) } @@ -1480,8 +1520,9 @@ mod tests { use test_fixture::WithFixture; use tt::{TextRange, TextSize}; - use crate::attrs::IsInnerDoc; - use crate::{attrs::Docs, test_db::TestDB}; + use crate::AttrDefId; + use crate::attrs::{AttrFlags, Docs, IsInnerDoc}; + use crate::test_db::TestDB; #[test] fn docs() { @@ -1617,4 +1658,15 @@ mod tests { Some((in_file(range(263, 265)), IsInnerDoc::Yes)) ); } + + #[test] + fn crate_attrs() { + let fixture = r#" +//- /lib.rs crate:foo crate-attr:no_std crate-attr:cfg(target_arch="x86") + "#; + let (db, file_id) = TestDB::with_single_file(fixture); + let module = db.module_for_file(file_id.file_id(&db)); + let attrs = AttrFlags::query(&db, AttrDefId::ModuleId(module)); + assert!(attrs.contains(AttrFlags::IS_NO_STD | AttrFlags::HAS_CFG)); + } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index 2a104fff2b92c..6eab8888d92d9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -44,6 +44,7 @@ use std::{ }; use ast::{AstNode, StructKind}; +use cfg::CfgOptions; use hir_expand::{ ExpandTo, HirFileId, mod_path::{ModPath, PathKind}, @@ -52,13 +53,17 @@ use hir_expand::{ use intern::Interned; use la_arena::{Idx, RawIdx}; use rustc_hash::FxHashMap; -use span::{AstIdNode, Edition, FileAstId, SyntaxContext}; +use span::{ + AstIdNode, Edition, FileAstId, NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, Span, SpanAnchor, + SyntaxContext, +}; use stdx::never; -use syntax::{SyntaxKind, ast, match_ast}; +use syntax::{SourceFile, SyntaxKind, ast, match_ast}; use thin_vec::ThinVec; use triomphe::Arc; +use tt::TextRange; -use crate::{BlockId, Lookup, db::DefDatabase}; +use crate::{BlockId, Lookup, attrs::parse_extra_crate_attrs, db::DefDatabase}; pub(crate) use crate::item_tree::{ attrs::*, @@ -88,6 +93,33 @@ impl fmt::Debug for RawVisibilityId { } } +fn lower_extra_crate_attrs<'a>( + db: &dyn DefDatabase, + crate_attrs_as_src: SourceFile, + file_id: span::EditionedFileId, + cfg_options: &dyn Fn() -> &'a CfgOptions, +) -> AttrsOrCfg { + #[derive(Copy, Clone)] + struct FakeSpanMap { + file_id: span::EditionedFileId, + } + impl syntax_bridge::SpanMapper for FakeSpanMap { + fn span_for(&self, range: TextRange) -> Span { + Span { + range, + anchor: SpanAnchor { + file_id: self.file_id, + ast_id: NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, + }, + ctx: SyntaxContext::root(self.file_id.edition()), + } + } + } + + let span_map = FakeSpanMap { file_id }; + AttrsOrCfg::lower(db, &crate_attrs_as_src, cfg_options, span_map) +} + #[salsa_macros::tracked(returns(deref))] pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered(); @@ -98,7 +130,19 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - let top_attrs = ctx.lower_attrs(&file); + let krate = file_id.krate(db); + let root_file_id = krate.root_file_id(db); + let extra_top_attrs = (file_id == root_file_id).then(|| { + parse_extra_crate_attrs(db, krate).map(|crate_attrs| { + let file_id = root_file_id.editioned_file_id(db); + lower_extra_crate_attrs(db, crate_attrs, file_id, &|| ctx.cfg_options()) + }) + }).flatten(); + let top_attrs = match extra_top_attrs { + Some(attrs @ AttrsOrCfg::Enabled { .. }) => attrs.merge(ctx.lower_attrs(&file)), + Some(attrs @ AttrsOrCfg::CfgDisabled(_)) => attrs, + None => ctx.lower_attrs(&file) + }; let mut item_tree = ctx.lower_module_items(&file); item_tree.top_attrs = top_attrs; item_tree diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs index 5c635a4b3831c..81a9b28b628eb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs @@ -16,9 +16,9 @@ use hir_expand::{ attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs}, mod_path::ModPath, name::Name, - span_map::SpanMapRef, }; use intern::{Interned, Symbol, sym}; +use span::Span; use syntax::{AstNode, T, ast}; use syntax_bridge::DocCommentDesugarMode; use tt::token_to_literal; @@ -42,12 +42,15 @@ impl Default for AttrsOrCfg { } impl AttrsOrCfg { - pub(crate) fn lower<'a>( + pub(crate) fn lower<'a, S>( db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, cfg_options: &dyn Fn() -> &'a CfgOptions, - span_map: SpanMapRef<'_>, - ) -> AttrsOrCfg { + span_map: S, + ) -> AttrsOrCfg + where + S: syntax_bridge::SpanMapper + Copy, + { let mut attrs = Vec::new(); let result = collect_item_tree_attrs::(owner, cfg_options, |meta, container, _, _| { @@ -55,17 +58,17 @@ impl AttrsOrCfg { // tracking. let (span, path_range, input) = match meta { Meta::NamedKeyValue { path_range, name: _, value } => { - let span = span_map.span_for_range(path_range); + let span = span_map.span_for(path_range); let input = value.map(|value| { Box::new(AttrInput::Literal(token_to_literal( value.text(), - span_map.span_for_range(value.text_range()), + span_map.span_for(value.text_range()), ))) }); (span, path_range, input) } Meta::TokenTree { path, tt } => { - let span = span_map.span_for_range(path.range); + let span = span_map.span_for(path.range); let tt = syntax_bridge::syntax_node_to_token_tree( tt.syntax(), span_map, @@ -76,7 +79,7 @@ impl AttrsOrCfg { (span, path.range, input) } Meta::Path { path } => { - let span = span_map.span_for_range(path.range); + let span = span_map.span_for(path.range); (span, path.range, None) } }; @@ -90,7 +93,7 @@ impl AttrsOrCfg { .filter(|it| it.kind().is_any_identifier()); ModPath::from_tokens( db, - &mut |range| span_map.span_for_range(range).ctx, + &mut |range| span_map.span_for(range).ctx, is_abs, segments, ) @@ -107,6 +110,44 @@ impl AttrsOrCfg { None => AttrsOrCfg::Enabled { attrs }, } } + + // Merges two `AttrsOrCfg`s, assuming `self` is placed before `other` in the source code. + // The operation follows these rules: + // + // - If `self` and `other` are both `AttrsOrCfg::Enabled`, the result is a new + // `AttrsOrCfg::Enabled`. It contains the concatenation of `self`'s attributes followed by + // `other`'s. + // - If `self` is `AttrsOrCfg::Enabled` but `other` is `AttrsOrCfg::CfgDisabled`, the result + // is a new `AttrsOrCfg::CfgDisabled`. It contains the concatenation of `self`'s attributes + // followed by `other`'s. + // - If `self` is `AttrsOrCfg::CfgDisabled`, return `self` as-is. + // + // The rationale is that attribute collection is sequential and order-sensitive. This operation + // preserves those semantics when combining attributes from two different sources. + // `AttrsOrCfg::CfgDisabled` marks a point where collection stops due to a false `#![cfg(...)]` + // condition. It acts as a "breakpoint": attributes beyond it are not collected. Therefore, + // when merging, an `AttrsOrCfg::CfgDisabled` on the left-hand side short-circuits the + // operation, while an `AttrsOrCfg::CfgDisabled` on the right-hand side preserves all + // attributes collected up to that point. + // + // Note that this operation is neither commutative nor associative. + pub(crate) fn merge(self, other: AttrsOrCfg) -> AttrsOrCfg { + match (self, other) { + (AttrsOrCfg::Enabled { attrs }, AttrsOrCfg::Enabled { attrs: other_attrs }) => { + let mut v = attrs.0.into_vec(); + v.extend(other_attrs.0); + AttrsOrCfg::Enabled { attrs: AttrsOwned(v.into_boxed_slice()) } + } + (AttrsOrCfg::Enabled { attrs }, AttrsOrCfg::CfgDisabled(mut other)) => { + let other_attrs = &mut other.1; + let mut v = attrs.0.into_vec(); + v.extend(std::mem::take(&mut other_attrs.0)); + other_attrs.0 = v.into_boxed_slice(); + AttrsOrCfg::CfgDisabled(other) + } + (this @ AttrsOrCfg::CfgDisabled(_), _) => this, + } + } } #[derive(Debug, PartialEq, Eq)] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 66a2d14a734fe..c89299e6d863b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -86,9 +86,9 @@ impl Printer<'_> { } fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) { - let AttrsOrCfg::Enabled { attrs } = attrs else { - w!(self, "#[cfg(false)]{separated_by}"); - return; + let (cfg_disabled_expr, attrs) = match attrs { + AttrsOrCfg::Enabled { attrs } => (None, attrs), + AttrsOrCfg::CfgDisabled(inner_box) => (Some(&inner_box.0), &inner_box.1), }; let inner = if inner { "!" } else { "" }; for attr in &*attrs.as_ref() { @@ -101,6 +101,9 @@ impl Printer<'_> { separated_by, ); } + if let Some(expr) = cfg_disabled_expr { + w!(self, "#{inner}[cfg({expr})]{separated_by}"); + } } fn print_attrs_of(&mut self, of: ModItemId, separated_by: &str) { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs index a57432f33c3dc..1926ed74e869e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs @@ -244,3 +244,45 @@ pub(self) struct S; "#]], ) } + +#[test] +fn crate_attrs_should_preserve_order() { + check( + r#" +//- /main.rs crate:foo crate-attr:no_std crate-attr:features(f16) crate-attr:crate_type="bin" + "#, + expect![[r##" + #![no_std] + #![features(f16)] + #![crate_type = "bin"] + "##]], + ); +} + +#[test] +fn crate_attrs_with_disabled_cfg_injected() { + check( + r#" +//- /main.rs crate:foo crate-attr:no_std crate-attr:cfg(false) crate-attr:features(f16,f128) crate-attr:crate_type="bin" + "#, + expect![[r#" + #![no_std] + #![cfg(false)] + "#]], + ); +} + +#[test] +fn crate_attrs_with_disabled_cfg_in_source() { + check( + r#" +//- /lib.rs crate:foo crate-attr:no_std +#![cfg(false)] +#![no_core] + "#, + expect![[r#" + #![no_std] + #![cfg(false)] + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 9fdfb5f5b32e2..fd693477a4b4b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -154,16 +154,10 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option if traits.is_empty() { None } else { Some(traits.into_iter().collect()) } } -pub enum GenericRequirement { - None, - Minimum(usize), - Exact(usize), -} - macro_rules! language_item_table { ( $LangItems:ident => - $( $(#[$attr:meta])* $lang_item:ident, $module:ident :: $name:ident, $method:ident, $target:ident, $generics:expr; )* + $( $(#[$attr:meta])* $lang_item:ident, $module:ident :: $name:ident, $target:ident; )* ) => { #[allow(non_snake_case)] // FIXME: Should we remove this? #[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] @@ -226,100 +220,101 @@ macro_rules! language_item_table { } language_item_table! { LangItems => -// Variant name, Name, Getter method name, Target Generic requirements; - Sized, sym::sized, sized_trait, TraitId, GenericRequirement::Exact(0); - MetaSized, sym::meta_sized, sized_trait, TraitId, GenericRequirement::Exact(0); - PointeeSized, sym::pointee_sized, sized_trait, TraitId, GenericRequirement::Exact(0); - Unsize, sym::unsize, unsize_trait, TraitId, GenericRequirement::Minimum(1); +// Variant name, Name, Target; + Sized, sym::sized, TraitId; + MetaSized, sym::meta_sized, TraitId; + PointeeSized, sym::pointee_sized, TraitId; + Unsize, sym::unsize, TraitId; /// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ"). - StructuralPeq, sym::structural_peq, structural_peq_trait, TraitId, GenericRequirement::None; + StructuralPeq, sym::structural_peq, TraitId; /// Trait injected by `#[derive(Eq)]`, (i.e. "Total EQ"; no, I will not apologize). - StructuralTeq, sym::structural_teq, structural_teq_trait, TraitId, GenericRequirement::None; - Copy, sym::copy, copy_trait, TraitId, GenericRequirement::Exact(0); - Clone, sym::clone, clone_trait, TraitId, GenericRequirement::None; - Sync, sym::sync, sync_trait, TraitId, GenericRequirement::Exact(0); - DiscriminantKind, sym::discriminant_kind, discriminant_kind_trait, TraitId, GenericRequirement::None; + StructuralTeq, sym::structural_teq, TraitId; + Copy, sym::copy, TraitId; + Clone, sym::clone, TraitId; + TrivialClone, sym::trivial_clone, TraitId; + Sync, sym::sync, TraitId; + DiscriminantKind, sym::discriminant_kind, TraitId; /// The associated item of the `DiscriminantKind` trait. - Discriminant, sym::discriminant_type, discriminant_type, TypeAliasId, GenericRequirement::None; + Discriminant, sym::discriminant_type, TypeAliasId; - PointeeTrait, sym::pointee_trait, pointee_trait, TraitId, GenericRequirement::None; - Metadata, sym::metadata_type, metadata_type, TypeAliasId, GenericRequirement::None; - DynMetadata, sym::dyn_metadata, dyn_metadata, StructId, GenericRequirement::None; + PointeeTrait, sym::pointee_trait, TraitId; + Metadata, sym::metadata_type, TypeAliasId; + DynMetadata, sym::dyn_metadata, StructId; - Freeze, sym::freeze, freeze_trait, TraitId, GenericRequirement::Exact(0); + Freeze, sym::freeze, TraitId; - FnPtrTrait, sym::fn_ptr_trait, fn_ptr_trait, TraitId, GenericRequirement::Exact(0); - FnPtrAddr, sym::fn_ptr_addr, fn_ptr_addr, FunctionId, GenericRequirement::None; + FnPtrTrait, sym::fn_ptr_trait, TraitId; + FnPtrAddr, sym::fn_ptr_addr, FunctionId; - Drop, sym::drop, drop_trait, TraitId, GenericRequirement::None; - Destruct, sym::destruct, destruct_trait, TraitId, GenericRequirement::None; + Drop, sym::drop, TraitId; + Destruct, sym::destruct, TraitId; - CoerceUnsized, sym::coerce_unsized, coerce_unsized_trait, TraitId, GenericRequirement::Minimum(1); - DispatchFromDyn, sym::dispatch_from_dyn, dispatch_from_dyn_trait, TraitId, GenericRequirement::Minimum(1); + CoerceUnsized, sym::coerce_unsized, TraitId; + DispatchFromDyn, sym::dispatch_from_dyn, TraitId; // language items relating to transmutability - TransmuteOpts, sym::transmute_opts, transmute_opts, StructId, GenericRequirement::Exact(0); - TransmuteTrait, sym::transmute_trait, transmute_trait, TraitId, GenericRequirement::Exact(3); - - Add, sym::add, add_trait, TraitId, GenericRequirement::Exact(1); - Sub, sym::sub, sub_trait, TraitId, GenericRequirement::Exact(1); - Mul, sym::mul, mul_trait, TraitId, GenericRequirement::Exact(1); - Div, sym::div, div_trait, TraitId, GenericRequirement::Exact(1); - Rem, sym::rem, rem_trait, TraitId, GenericRequirement::Exact(1); - Neg, sym::neg, neg_trait, TraitId, GenericRequirement::Exact(0); - Not, sym::not, not_trait, TraitId, GenericRequirement::Exact(0); - BitXor, sym::bitxor, bitxor_trait, TraitId, GenericRequirement::Exact(1); - BitAnd, sym::bitand, bitand_trait, TraitId, GenericRequirement::Exact(1); - BitOr, sym::bitor, bitor_trait, TraitId, GenericRequirement::Exact(1); - Shl, sym::shl, shl_trait, TraitId, GenericRequirement::Exact(1); - Shr, sym::shr, shr_trait, TraitId, GenericRequirement::Exact(1); - AddAssign, sym::add_assign, add_assign_trait, TraitId, GenericRequirement::Exact(1); - SubAssign, sym::sub_assign, sub_assign_trait, TraitId, GenericRequirement::Exact(1); - MulAssign, sym::mul_assign, mul_assign_trait, TraitId, GenericRequirement::Exact(1); - DivAssign, sym::div_assign, div_assign_trait, TraitId, GenericRequirement::Exact(1); - RemAssign, sym::rem_assign, rem_assign_trait, TraitId, GenericRequirement::Exact(1); - BitXorAssign, sym::bitxor_assign, bitxor_assign_trait, TraitId, GenericRequirement::Exact(1); - BitAndAssign, sym::bitand_assign, bitand_assign_trait, TraitId, GenericRequirement::Exact(1); - BitOrAssign, sym::bitor_assign, bitor_assign_trait, TraitId, GenericRequirement::Exact(1); - ShlAssign, sym::shl_assign, shl_assign_trait, TraitId, GenericRequirement::Exact(1); - ShrAssign, sym::shr_assign, shr_assign_trait, TraitId, GenericRequirement::Exact(1); - Index, sym::index, index_trait, TraitId, GenericRequirement::Exact(1); - IndexMut, sym::index_mut, index_mut_trait, TraitId, GenericRequirement::Exact(1); - - UnsafeCell, sym::unsafe_cell, unsafe_cell_type, StructId, GenericRequirement::None; - UnsafePinned, sym::unsafe_pinned, unsafe_pinned_type, StructId, GenericRequirement::None; - VaList, sym::va_list, va_list, StructId, GenericRequirement::None; - - Deref, sym::deref, deref_trait, TraitId, GenericRequirement::Exact(0); - DerefMut, sym::deref_mut, deref_mut_trait, TraitId, GenericRequirement::Exact(0); - DerefTarget, sym::deref_target, deref_target, TypeAliasId, GenericRequirement::None; - Receiver, sym::receiver, receiver_trait, TraitId, GenericRequirement::None; - ReceiverTarget, sym::receiver_target, receiver_target, TypeAliasId, GenericRequirement::None; - - Fn, sym::fn_, fn_trait, TraitId, GenericRequirement::Exact(1); - FnMut, sym::fn_mut, fn_mut_trait, TraitId, GenericRequirement::Exact(1); - FnOnce, sym::fn_once, fn_once_trait, TraitId, GenericRequirement::Exact(1); - AsyncFn, sym::async_fn, async_fn_trait, TraitId, GenericRequirement::Exact(1); - AsyncFnMut, sym::async_fn_mut, async_fn_mut_trait, TraitId, GenericRequirement::Exact(1); - AsyncFnOnce, sym::async_fn_once, async_fn_once_trait, TraitId, GenericRequirement::Exact(1); - - CallRefFuture, sym::call_ref_future, call_ref_future_ty, TypeAliasId, GenericRequirement::None; - CallOnceFuture, sym::call_once_future, call_once_future_ty, TypeAliasId, GenericRequirement::None; - AsyncFnOnceOutput, sym::async_fn_once_output, async_fn_once_output_ty, TypeAliasId, GenericRequirement::None; - - FnOnceOutput, sym::fn_once_output, fn_once_output, TypeAliasId, GenericRequirement::None; - - Future, sym::future_trait, future_trait, TraitId, GenericRequirement::Exact(0); - CoroutineState, sym::coroutine_state, coroutine_state, EnumId, GenericRequirement::None; - Coroutine, sym::coroutine, coroutine_trait, TraitId, GenericRequirement::Minimum(1); - CoroutineReturn, sym::coroutine_return, coroutine_return_ty, TypeAliasId, GenericRequirement::None; - CoroutineYield, sym::coroutine_yield, coroutine_yield_ty, TypeAliasId, GenericRequirement::None; - Unpin, sym::unpin, unpin_trait, TraitId, GenericRequirement::None; - Pin, sym::pin, pin_type, StructId, GenericRequirement::None; - - PartialEq, sym::eq, eq_trait, TraitId, GenericRequirement::Exact(1); - PartialOrd, sym::partial_ord, partial_ord_trait, TraitId, GenericRequirement::Exact(1); - CVoid, sym::c_void, c_void, EnumId, GenericRequirement::None; + TransmuteOpts, sym::transmute_opts, StructId; + TransmuteTrait, sym::transmute_trait, TraitId; + + Add, sym::add, TraitId; + Sub, sym::sub, TraitId; + Mul, sym::mul, TraitId; + Div, sym::div, TraitId; + Rem, sym::rem, TraitId; + Neg, sym::neg, TraitId; + Not, sym::not, TraitId; + BitXor, sym::bitxor, TraitId; + BitAnd, sym::bitand, TraitId; + BitOr, sym::bitor, TraitId; + Shl, sym::shl, TraitId; + Shr, sym::shr, TraitId; + AddAssign, sym::add_assign, TraitId; + SubAssign, sym::sub_assign, TraitId; + MulAssign, sym::mul_assign, TraitId; + DivAssign, sym::div_assign, TraitId; + RemAssign, sym::rem_assign, TraitId; + BitXorAssign, sym::bitxor_assign, TraitId; + BitAndAssign, sym::bitand_assign, TraitId; + BitOrAssign, sym::bitor_assign, TraitId; + ShlAssign, sym::shl_assign, TraitId; + ShrAssign, sym::shr_assign, TraitId; + Index, sym::index, TraitId; + IndexMut, sym::index_mut, TraitId; + + UnsafeCell, sym::unsafe_cell, StructId; + UnsafePinned, sym::unsafe_pinned, StructId; + VaList, sym::va_list, StructId; + + Deref, sym::deref, TraitId; + DerefMut, sym::deref_mut, TraitId; + DerefTarget, sym::deref_target, TypeAliasId; + Receiver, sym::receiver, TraitId; + ReceiverTarget, sym::receiver_target, TypeAliasId; + + Fn, sym::fn_, TraitId; + FnMut, sym::fn_mut, TraitId; + FnOnce, sym::fn_once, TraitId; + AsyncFn, sym::async_fn, TraitId; + AsyncFnMut, sym::async_fn_mut, TraitId; + AsyncFnOnce, sym::async_fn_once, TraitId; + + CallRefFuture, sym::call_ref_future, TypeAliasId; + CallOnceFuture, sym::call_once_future, TypeAliasId; + AsyncFnOnceOutput, sym::async_fn_once_output, TypeAliasId; + + FnOnceOutput, sym::fn_once_output, TypeAliasId; + + Future, sym::future_trait, TraitId; + CoroutineState, sym::coroutine_state, EnumId; + Coroutine, sym::coroutine, TraitId; + CoroutineReturn, sym::coroutine_return, TypeAliasId; + CoroutineYield, sym::coroutine_yield, TypeAliasId; + Unpin, sym::unpin, TraitId; + Pin, sym::pin, StructId; + + PartialEq, sym::eq, TraitId; + PartialOrd, sym::partial_ord, TraitId; + CVoid, sym::c_void, EnumId; // A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and // various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays. @@ -328,107 +323,107 @@ language_item_table! { LangItems => // in the sense that a crate is not required to have it defined to use it, but a final product // is required to define it somewhere. Additionally, there are restrictions on crates that use // a weak lang item, but do not have it defined. - Panic, sym::panic, panic_fn, FunctionId, GenericRequirement::Exact(0); - PanicNounwind, sym::panic_nounwind, panic_nounwind, FunctionId, GenericRequirement::Exact(0); - PanicFmt, sym::panic_fmt, panic_fmt, FunctionId, GenericRequirement::None; - PanicDisplay, sym::panic_display, panic_display, FunctionId, GenericRequirement::None; - ConstPanicFmt, sym::const_panic_fmt, const_panic_fmt, FunctionId, GenericRequirement::None; - PanicBoundsCheck, sym::panic_bounds_check, panic_bounds_check_fn, FunctionId, GenericRequirement::Exact(0); - PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, panic_misaligned_pointer_dereference_fn, FunctionId, GenericRequirement::Exact(0); - PanicInfo, sym::panic_info, panic_info, StructId, GenericRequirement::None; - PanicLocation, sym::panic_location, panic_location, StructId, GenericRequirement::None; - PanicImpl, sym::panic_impl, panic_impl, FunctionId, GenericRequirement::None; - PanicCannotUnwind, sym::panic_cannot_unwind, panic_cannot_unwind, FunctionId, GenericRequirement::Exact(0); - PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, FunctionId, GenericRequirement::None; + Panic, sym::panic, FunctionId; + PanicNounwind, sym::panic_nounwind, FunctionId; + PanicFmt, sym::panic_fmt, FunctionId; + PanicDisplay, sym::panic_display, FunctionId; + ConstPanicFmt, sym::const_panic_fmt, FunctionId; + PanicBoundsCheck, sym::panic_bounds_check, FunctionId; + PanicMisalignedPointerDereference, sym::panic_misaligned_pointer_dereference, FunctionId; + PanicInfo, sym::panic_info, StructId; + PanicLocation, sym::panic_location, StructId; + PanicImpl, sym::panic_impl, FunctionId; + PanicCannotUnwind, sym::panic_cannot_unwind, FunctionId; + PanicNullPointerDereference, sym::panic_null_pointer_dereference, FunctionId; /// libstd panic entry point. Necessary for const eval to be able to catch it - BeginPanic, sym::begin_panic, begin_panic_fn, FunctionId, GenericRequirement::None; + BeginPanic, sym::begin_panic, FunctionId; // Lang items needed for `format_args!()`. - FormatAlignment, sym::format_alignment, format_alignment, EnumId, GenericRequirement::None; - FormatArgument, sym::format_argument, format_argument, StructId, GenericRequirement::None; - FormatArguments, sym::format_arguments, format_arguments, StructId, GenericRequirement::None; - FormatCount, sym::format_count, format_count, EnumId, GenericRequirement::None; - FormatPlaceholder, sym::format_placeholder, format_placeholder, StructId, GenericRequirement::None; - FormatUnsafeArg, sym::format_unsafe_arg, format_unsafe_arg, StructId, GenericRequirement::None; + FormatAlignment, sym::format_alignment, EnumId; + FormatArgument, sym::format_argument, StructId; + FormatArguments, sym::format_arguments, StructId; + FormatCount, sym::format_count, EnumId; + FormatPlaceholder, sym::format_placeholder, StructId; + FormatUnsafeArg, sym::format_unsafe_arg, StructId; - ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, FunctionId, GenericRequirement::None; - BoxFree, sym::box_free, box_free_fn, FunctionId, GenericRequirement::Minimum(1); - DropInPlace, sym::drop_in_place, drop_in_place_fn, FunctionId, GenericRequirement::Minimum(1); - AllocLayout, sym::alloc_layout, alloc_layout, StructId, GenericRequirement::None; + ExchangeMalloc, sym::exchange_malloc, FunctionId; + BoxFree, sym::box_free, FunctionId; + DropInPlace, sym::drop_in_place, FunctionId; + AllocLayout, sym::alloc_layout, StructId; - Start, sym::start, start_fn, FunctionId, GenericRequirement::Exact(1); + Start, sym::start, FunctionId; - EhPersonality, sym::eh_personality, eh_personality, FunctionId, GenericRequirement::None; - EhCatchTypeinfo, sym::eh_catch_typeinfo, eh_catch_typeinfo, StaticId, GenericRequirement::None; + EhPersonality, sym::eh_personality, FunctionId; + EhCatchTypeinfo, sym::eh_catch_typeinfo, StaticId; - OwnedBox, sym::owned_box, owned_box, StructId, GenericRequirement::Minimum(1); + OwnedBox, sym::owned_box, StructId; - PhantomData, sym::phantom_data, phantom_data, StructId, GenericRequirement::Exact(1); + PhantomData, sym::phantom_data, StructId; - ManuallyDrop, sym::manually_drop, manually_drop, StructId, GenericRequirement::None; + ManuallyDrop, sym::manually_drop, StructId; - MaybeUninit, sym::maybe_uninit, maybe_uninit, UnionId, GenericRequirement::None; + MaybeUninit, sym::maybe_uninit, UnionId; /// Align offset for stride != 1; must not panic. - AlignOffset, sym::align_offset, align_offset_fn, FunctionId, GenericRequirement::None; + AlignOffset, sym::align_offset, FunctionId; - Termination, sym::termination, termination, TraitId, GenericRequirement::None; + Termination, sym::termination, TraitId; - Try, sym::Try, try_trait, TraitId, GenericRequirement::None; + Try, sym::Try, TraitId; - Tuple, sym::tuple_trait, tuple_trait, TraitId, GenericRequirement::Exact(0); + Tuple, sym::tuple_trait, TraitId; - SliceLen, sym::slice_len_fn, slice_len_fn, FunctionId, GenericRequirement::None; + SliceLen, sym::slice_len_fn, FunctionId; // Language items from AST lowering - TryTraitFromResidual, sym::from_residual, from_residual_fn, FunctionId, GenericRequirement::None; - TryTraitFromOutput, sym::from_output, from_output_fn, FunctionId, GenericRequirement::None; - TryTraitBranch, sym::branch, branch_fn, FunctionId, GenericRequirement::None; - TryTraitFromYeet, sym::from_yeet, from_yeet_fn, FunctionId, GenericRequirement::None; + TryTraitFromResidual, sym::from_residual, FunctionId; + TryTraitFromOutput, sym::from_output, FunctionId; + TryTraitBranch, sym::branch, FunctionId; + TryTraitFromYeet, sym::from_yeet, FunctionId; - PointerLike, sym::pointer_like, pointer_like, TraitId, GenericRequirement::Exact(0); + PointerLike, sym::pointer_like, TraitId; - ConstParamTy, sym::const_param_ty, const_param_ty_trait, TraitId, GenericRequirement::Exact(0); + ConstParamTy, sym::const_param_ty, TraitId; - Poll, sym::Poll, poll, EnumId, GenericRequirement::None; - PollReady, sym::Ready, poll_ready_variant, EnumVariantId, GenericRequirement::None; - PollPending, sym::Pending, poll_pending_variant, EnumVariantId, GenericRequirement::None; + Poll, sym::Poll, EnumId; + PollReady, sym::Ready, EnumVariantId; + PollPending, sym::Pending, EnumVariantId; // FIXME(swatinem): the following lang items are used for async lowering and // should become obsolete eventually. - ResumeTy, sym::ResumeTy, resume_ty, StructId, GenericRequirement::None; - GetContext, sym::get_context, get_context_fn, FunctionId, GenericRequirement::None; + ResumeTy, sym::ResumeTy, StructId; + GetContext, sym::get_context, FunctionId; - Context, sym::Context, context, StructId, GenericRequirement::None; - FuturePoll, sym::poll, future_poll_fn, FunctionId, GenericRequirement::None; - FutureOutput, sym::future_output, future_output, TypeAliasId, GenericRequirement::None; + Context, sym::Context, StructId; + FuturePoll, sym::poll, FunctionId; + FutureOutput, sym::future_output, TypeAliasId; - Option, sym::Option, option_type, EnumId, GenericRequirement::None; - OptionSome, sym::Some, option_some_variant, EnumVariantId, GenericRequirement::None; - OptionNone, sym::None, option_none_variant, EnumVariantId, GenericRequirement::None; + Option, sym::Option, EnumId; + OptionSome, sym::Some, EnumVariantId; + OptionNone, sym::None, EnumVariantId; - ResultOk, sym::Ok, result_ok_variant, EnumVariantId, GenericRequirement::None; - ResultErr, sym::Err, result_err_variant, EnumVariantId, GenericRequirement::None; + ResultOk, sym::Ok, EnumVariantId; + ResultErr, sym::Err, EnumVariantId; - ControlFlowContinue, sym::Continue, cf_continue_variant, EnumVariantId, GenericRequirement::None; - ControlFlowBreak, sym::Break, cf_break_variant, EnumVariantId, GenericRequirement::None; + ControlFlowContinue, sym::Continue, EnumVariantId; + ControlFlowBreak, sym::Break, EnumVariantId; - IntoFutureIntoFuture, sym::into_future, into_future_fn, FunctionId, GenericRequirement::None; - IntoIterIntoIter, sym::into_iter, into_iter_fn, FunctionId, GenericRequirement::None; - IteratorNext, sym::next, next_fn, FunctionId, GenericRequirement::None; - Iterator, sym::iterator, iterator, TraitId, GenericRequirement::None; + IntoFutureIntoFuture, sym::into_future, FunctionId; + IntoIterIntoIter, sym::into_iter, FunctionId; + IteratorNext, sym::next, FunctionId; + Iterator, sym::iterator, TraitId; - PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, FunctionId, GenericRequirement::None; + PinNewUnchecked, sym::new_unchecked, FunctionId; - RangeFrom, sym::RangeFrom, range_from_struct, StructId, GenericRequirement::None; - RangeFull, sym::RangeFull, range_full_struct, StructId, GenericRequirement::None; - RangeInclusiveStruct, sym::RangeInclusive, range_inclusive_struct, StructId, GenericRequirement::None; - RangeInclusiveNew, sym::range_inclusive_new, range_inclusive_new_method, FunctionId, GenericRequirement::None; - Range, sym::Range, range_struct, StructId, GenericRequirement::None; - RangeToInclusive, sym::RangeToInclusive, range_to_inclusive_struct, StructId, GenericRequirement::None; - RangeTo, sym::RangeTo, range_to_struct, StructId, GenericRequirement::None; + RangeFrom, sym::RangeFrom, StructId; + RangeFull, sym::RangeFull, StructId; + RangeInclusiveStruct, sym::RangeInclusive, StructId; + RangeInclusiveNew, sym::range_inclusive_new, FunctionId; + Range, sym::Range, StructId; + RangeToInclusive, sym::RangeToInclusive, StructId; + RangeTo, sym::RangeTo, StructId; - String, sym::String, string, StructId, GenericRequirement::None; - CStr, sym::CStr, c_str, StructId, GenericRequirement::None; - Ordering, sym::Ordering, ordering, EnumId, GenericRequirement::None; + String, sym::String, StructId; + CStr, sym::CStr, StructId; + Ordering, sym::Ordering, EnumId; } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index e2022c7967d86..ddabb50251a4d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -108,6 +108,42 @@ fn main() { ); } +#[test] +fn ty_fragment_followed_by_expr() { + check( + r#" +macro_rules! a { + ($t:tt) => {}; +} + +macro_rules! b { + ($t:ty) => { + a!($t); + }; +} + +fn main() { + b!(&'static str); +} +"#, + expect![[r#" +macro_rules! a { + ($t:tt) => {}; +} + +macro_rules! b { + ($t:ty) => { + a!($t); + }; +} + +fn main() { + a!(&'static str);; +} +"#]], + ); +} + #[test] fn test_winapi_struct() { // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366 diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 78af976e1b132..59bd9474a9595 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -19,7 +19,7 @@ use std::{any::TypeId, iter, ops::Range, sync}; use base_db::RootQueryDb; use expect_test::Expect; use hir_expand::{ - AstId, InFile, MacroCallId, MacroCallKind, MacroKind, + AstId, ExpansionInfo, InFile, MacroCallId, MacroCallKind, MacroKind, builtin::quote::quote, db::ExpandDatabase, proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind}, @@ -27,7 +27,10 @@ use hir_expand::{ }; use intern::{Symbol, sym}; use itertools::Itertools; -use span::{Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext}; +use span::{ + Edition, NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, + SyntaxContext, +}; use stdx::{format_to, format_to_acc}; use syntax::{ AstNode, AstPtr, @@ -97,37 +100,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream }, )]; - fn resolve( - db: &dyn DefDatabase, - def_map: &DefMap, - ast_id: AstId, - ast_ptr: InFile>, - ) -> Option { - def_map.modules().find_map(|module| { - for decl in - module.1.scope.declarations().chain(module.1.scope.unnamed_consts().map(Into::into)) - { - let body = match decl { - ModuleDefId::FunctionId(it) => it.into(), - ModuleDefId::ConstId(it) => it.into(), - ModuleDefId::StaticId(it) => it.into(), - _ => continue, - }; - - let (body, sm) = db.body_with_source_map(body); - if let Some(it) = - body.blocks(db).find_map(|block| resolve(db, block.1, ast_id, ast_ptr)) - { - return Some(it); - } - if let Some((_, res)) = sm.macro_calls().find(|it| it.0 == ast_ptr) { - return Some(res); - } - } - module.1.scope.macro_invoc(ast_id) - }) - } - let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros); let krate = db.fetch_test_crate(); let def_map = crate_def_map(&db, krate); @@ -144,7 +116,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let ast_id = db.ast_id_map(source.file_id).ast_id(¯o_call_node); let ast_id = InFile::new(source.file_id, ast_id); let ptr = InFile::new(source.file_id, AstPtr::new(¯o_call_node)); - let macro_call_id = resolve(&db, def_map, ast_id, ptr) + let macro_call_id = resolve_macro_call_id(&db, def_map, ast_id, ptr) .unwrap_or_else(|| panic!("unable to find semantic macro call {macro_call_node}")); let expansion_result = db.parse_macro_expansion(macro_call_id); expansions.push((macro_call_node.clone(), expansion_result)); @@ -278,6 +250,38 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream expect.assert_eq(&expanded_text); } +fn resolve_macro_call_id( + db: &dyn DefDatabase, + def_map: &DefMap, + ast_id: AstId, + ast_ptr: InFile>, +) -> Option { + def_map.modules().find_map(|module| { + for decl in + module.1.scope.declarations().chain(module.1.scope.unnamed_consts().map(Into::into)) + { + let body = match decl { + ModuleDefId::FunctionId(it) => it.into(), + ModuleDefId::ConstId(it) => it.into(), + ModuleDefId::StaticId(it) => it.into(), + _ => continue, + }; + + let (body, sm) = db.body_with_source_map(body); + if let Some(it) = body + .blocks(db) + .find_map(|block| resolve_macro_call_id(db, block.1, ast_id, ast_ptr)) + { + return Some(it); + } + if let Some((_, res)) = sm.macro_calls().find(|it| it.0 == ast_ptr) { + return Some(res); + } + } + module.1.scope.macro_invoc(ast_id) + }) +} + fn reindent(indent: IndentLevel, pp: String) -> String { if !pp.contains('\n') { return pp; @@ -430,3 +434,47 @@ fn regression_20171() { Edition::CURRENT }); } + +#[test] +fn no_downmap() { + let fixture = r#" +macro_rules! m { + ($func_name:ident) => { + fn $func_name() { todo!() } + }; +} +m!(f); +m!(g); + "#; + + let (db, file_id) = TestDB::with_single_file(fixture); + let krate = file_id.krate(&db); + let def_map = crate_def_map(&db, krate); + let source = def_map[def_map.root].definition_source(&db); + let source_file = match source.value { + ModuleSource::SourceFile(it) => it, + ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(), + }; + let no_downmap_spans: Vec<_> = source_file + .syntax() + .descendants() + .map(|node| { + let mut span = db.real_span_map(file_id).span_for_range(node.text_range()); + span.anchor.ast_id = NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER; + span + }) + .collect(); + + for macro_call_node in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { + let ast_id = db.ast_id_map(source.file_id).ast_id(¯o_call_node); + let ast_id = InFile::new(source.file_id, ast_id); + let ptr = InFile::new(source.file_id, AstPtr::new(¯o_call_node)); + let macro_call_id = resolve_macro_call_id(&db, def_map, ast_id, ptr) + .unwrap_or_else(|| panic!("unable to find semantic macro call {macro_call_node}")); + let expansion_info = ExpansionInfo::new(&db, macro_call_id); + for &span in no_downmap_spans.iter() { + assert!(expansion_info.map_range_down(span).is_none()); + assert!(expansion_info.map_range_down_exact(span).is_none()); + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs index ec05c02bd6359..1cbd2c10b5a1b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs @@ -63,7 +63,7 @@ impl DefMap { return Ok(ResolvedAttr::Other); } } - None => return Err(UnresolvedMacro { path: ast_id.path.as_ref().clone() }), + None => return Err(UnresolvedMacro { path: (*ast_id.path).clone() }), }; Ok(ResolvedAttr::Macro(attr_macro_as_call_id( @@ -145,7 +145,7 @@ pub(super) fn derive_macro_as_call_id( ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { let (macro_id, def_id) = resolver(&item_attr.path) .filter(|(_, def_id)| def_id.is_derive()) - .ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?; + .ok_or_else(|| UnresolvedMacro { path: (*item_attr.path).clone() })?; let call_id = def_id.make_call( db, krate, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 08edf41c56947..7e1ec526a7bc7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -1675,7 +1675,7 @@ impl<'db> DefCollector<'db> { derive_index: *derive_pos as u32, derive_macro_id: *derive_macro_id, }, - ast_id.path.as_ref().clone(), + (*ast_id.path).clone(), )); } // These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them @@ -2608,4 +2608,17 @@ foo!(KABOOM); "#, ); } + + #[test] + fn crate_attrs() { + let fixture = r#" +//- /lib.rs crate:foo crate-attr:recursion_limit="4" crate-attr:no_core crate-attr:no_std crate-attr:feature(register_tool) + "#; + let (db, file_id) = TestDB::with_single_file(fixture); + let def_map = crate_def_map(&db, file_id.krate(&db)); + assert_eq!(def_map.recursion_limit(), 4); + assert!(def_map.is_no_core()); + assert!(def_map.is_no_std()); + assert!(def_map.is_unstable_feature_enabled(&sym::register_tool)); + } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index 57243346019f5..225ba958634eb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -76,6 +76,7 @@ pub const BAZ: u32 = 0; None, Env::default(), CrateOrigin::Local { repo: None, name: Some(Symbol::intern(crate_name)) }, + Vec::new(), false, Arc::new( // FIXME: This is less than ideal @@ -117,6 +118,7 @@ pub const BAZ: u32 = 0; expect![[r#" [ "crate_local_def_map", + "file_item_tree_query", "crate_local_def_map", ] "#]], diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index c92e41f5070c8..047996c978539 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -37,7 +37,9 @@ use std::{hash::Hash, ops}; use base_db::Crate; use either::Either; -use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext}; +use span::{ + Edition, ErasedFileAstId, FileAstId, NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, Span, SyntaxContext, +}; use syntax::{ SyntaxNode, SyntaxToken, TextRange, TextSize, ast::{self, AstNode}, @@ -854,6 +856,10 @@ impl ExpansionInfo { &self, span: Span, ) -> Option + '_>> { + if span.anchor.ast_id == NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER { + return None; + } + let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| { self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) }); @@ -869,6 +875,10 @@ impl ExpansionInfo { &self, span: Span, ) -> Option + '_>> { + if span.anchor.ast_id == NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER { + return None; + } + let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| { self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) }); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 217d991d110d5..1e5efb6e146ff 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -197,6 +197,10 @@ impl Name { pub fn symbol(&self) -> &Symbol { &self.symbol } + + pub fn is_generated(&self) -> bool { + self.as_str().starts_with("") + } } struct Display<'a> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index c60ecef58eafb..238d1b08ae4fe 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -18,6 +18,8 @@ itertools.workspace = true arrayvec.workspace = true smallvec.workspace = true ena = "0.14.3" +serde.workspace = true +serde_derive.workspace = true either.workspace = true oorandom = "11.1.5" tracing = { workspace = true, features = ["attributes"] } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 012632aa55b5c..f11240e0f78ce 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -23,8 +23,9 @@ use crate::{ mir::{MirEvalError, MirLowerError}, next_solver::{ Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, - ParamEnv, Ty, ValueConst, + ParamEnv, StoredConst, StoredGenericArgs, Ty, ValueConst, }, + traits::StoredParamEnvAndCrate, }; use super::mir::{interpret_mir, lower_to_mir, pad16}; @@ -38,12 +39,12 @@ pub fn unknown_const_as_generic<'db>(ty: Ty<'db>) -> GenericArg<'db> { } #[derive(Debug, Clone, PartialEq, Eq)] -pub enum ConstEvalError<'db> { - MirLowerError(MirLowerError<'db>), - MirEvalError(MirEvalError<'db>), +pub enum ConstEvalError { + MirLowerError(MirLowerError), + MirEvalError(MirEvalError), } -impl ConstEvalError<'_> { +impl ConstEvalError { pub fn pretty_print( &self, f: &mut String, @@ -62,8 +63,8 @@ impl ConstEvalError<'_> { } } -impl<'db> From> for ConstEvalError<'db> { - fn from(value: MirLowerError<'db>) -> Self { +impl From for ConstEvalError { + fn from(value: MirLowerError) -> Self { match value { MirLowerError::ConstEvalError(_, e) => *e, _ => ConstEvalError::MirLowerError(value), @@ -71,8 +72,8 @@ impl<'db> From> for ConstEvalError<'db> { } } -impl<'db> From> for ConstEvalError<'db> { - fn from(value: MirEvalError<'db>) -> Self { +impl From for ConstEvalError { + fn from(value: MirEvalError) -> Self { ConstEvalError::MirEvalError(value) } } @@ -85,7 +86,8 @@ pub fn intern_const_ref<'a>( krate: Crate, ) -> Const<'a> { let interner = DbInterner::new_no_crate(db); - let layout = db.layout_of_ty(ty, ParamEnvAndCrate { param_env: ParamEnv::empty(), krate }); + let layout = db + .layout_of_ty(ty.store(), ParamEnvAndCrate { param_env: ParamEnv::empty(), krate }.store()); let kind = match value { LiteralConstRef::Int(i) => { // FIXME: We should handle failure of layout better. @@ -180,10 +182,10 @@ pub fn try_const_isize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option< } } -pub(crate) fn const_eval_discriminant_variant<'db>( - db: &'db dyn HirDatabase, +pub(crate) fn const_eval_discriminant_variant( + db: &dyn HirDatabase, variant_id: EnumVariantId, -) -> Result> { +) -> Result { let interner = DbInterner::new_no_crate(db); let def = variant_id.into(); let body = db.body(def); @@ -206,8 +208,9 @@ pub(crate) fn const_eval_discriminant_variant<'db>( let mir_body = db.monomorphized_mir_body( def, - GenericArgs::new_from_iter(interner, []), - ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) }, + GenericArgs::empty(interner).store(), + ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) } + .store(), )?; let c = interpret_mir(db, mir_body, false, None)?.0?; let c = if is_signed { @@ -233,7 +236,7 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd } if has_closure(ctx.body, expr) { // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic. - return unknown_const(infer[expr]); + return Const::error(ctx.interner()); } if let Expr::Path(p) = &ctx.body[expr] { let mut ctx = TyLoweringContext::new( @@ -252,60 +255,89 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd { return result; } - unknown_const(infer[expr]) -} - -pub(crate) fn const_eval_cycle_result<'db>( - _: &'db dyn HirDatabase, - _: ConstId, - _: GenericArgs<'db>, - _: Option>, -) -> Result, ConstEvalError<'db>> { - Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) -} - -pub(crate) fn const_eval_static_cycle_result<'db>( - _: &'db dyn HirDatabase, - _: StaticId, -) -> Result, ConstEvalError<'db>> { - Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) + Const::error(ctx.interner()) } -pub(crate) fn const_eval_discriminant_cycle_result<'db>( - _: &'db dyn HirDatabase, +pub(crate) fn const_eval_discriminant_cycle_result( + _: &dyn HirDatabase, + _: salsa::Id, _: EnumVariantId, -) -> Result> { +) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) } -pub(crate) fn const_eval_query<'db>( +pub(crate) fn const_eval<'db>( db: &'db dyn HirDatabase, def: ConstId, subst: GenericArgs<'db>, trait_env: Option>, -) -> Result, ConstEvalError<'db>> { - let body = db.monomorphized_mir_body( - def.into(), - subst, - ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) }, - )?; - let c = interpret_mir(db, body, false, trait_env)?.0?; - Ok(c) +) -> Result, ConstEvalError> { + return match const_eval_query(db, def, subst.store(), trait_env.map(|env| env.store())) { + Ok(konst) => Ok(konst.as_ref()), + Err(err) => Err(err.clone()), + }; + + #[salsa::tracked(returns(ref), cycle_result = const_eval_cycle_result)] + pub(crate) fn const_eval_query<'db>( + db: &'db dyn HirDatabase, + def: ConstId, + subst: StoredGenericArgs, + trait_env: Option, + ) -> Result { + let body = db.monomorphized_mir_body( + def.into(), + subst, + ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) } + .store(), + )?; + let c = interpret_mir(db, body, false, trait_env.as_ref().map(|env| env.as_ref()))?.0?; + Ok(c.store()) + } + + pub(crate) fn const_eval_cycle_result( + _: &dyn HirDatabase, + _: salsa::Id, + _: ConstId, + _: StoredGenericArgs, + _: Option, + ) -> Result { + Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) + } } -pub(crate) fn const_eval_static_query<'db>( +pub(crate) fn const_eval_static<'db>( db: &'db dyn HirDatabase, def: StaticId, -) -> Result, ConstEvalError<'db>> { - let interner = DbInterner::new_no_crate(db); - let body = db.monomorphized_mir_body( - def.into(), - GenericArgs::new_from_iter(interner, []), - ParamEnvAndCrate { - param_env: db.trait_environment_for_body(def.into()), - krate: def.krate(db), - }, - )?; - let c = interpret_mir(db, body, false, None)?.0?; - Ok(c) +) -> Result, ConstEvalError> { + return match const_eval_static_query(db, def) { + Ok(konst) => Ok(konst.as_ref()), + Err(err) => Err(err.clone()), + }; + + #[salsa::tracked(returns(ref), cycle_result = const_eval_static_cycle_result)] + pub(crate) fn const_eval_static_query<'db>( + db: &'db dyn HirDatabase, + def: StaticId, + ) -> Result { + let interner = DbInterner::new_no_crate(db); + let body = db.monomorphized_mir_body( + def.into(), + GenericArgs::empty(interner).store(), + ParamEnvAndCrate { + param_env: db.trait_environment_for_body(def.into()), + krate: def.krate(db), + } + .store(), + )?; + let c = interpret_mir(db, body, false, None)?.0?; + Ok(c.store()) + } + + pub(crate) fn const_eval_static_cycle_result( + _: &dyn HirDatabase, + _: salsa::Id, + _: StaticId, + ) -> Result { + Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) + } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index 2dc937d760316..8816e13ba7b66 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -27,7 +27,7 @@ use super::{ mod intrinsics; -fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> { +fn simplify(e: ConstEvalError) -> ConstEvalError { match e { ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => { simplify(ConstEvalError::MirEvalError(*e)) @@ -39,7 +39,7 @@ fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> { #[track_caller] fn check_fail( #[rust_analyzer::rust_fixture] ra_fixture: &str, - error: impl FnOnce(ConstEvalError<'_>) -> bool, + error: impl FnOnce(ConstEvalError) -> bool, ) { let (db, file_id) = TestDB::with_single_file(ra_fixture); crate::attach_db(&db, || match eval_goal(&db, file_id) { @@ -104,7 +104,7 @@ fn check_answer( }); } -fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String { +fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String { let mut err = String::new(); let span_formatter = |file, range| format!("{file:?} {range:?}"); let display_target = @@ -121,7 +121,7 @@ fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String { err } -fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result, ConstEvalError<'_>> { +fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result, ConstEvalError> { let _tracing = setup_tracing(); let interner = DbInterner::new_no_crate(db); let module_id = db.module_for_file(file_id.file_id(db)); @@ -142,7 +142,7 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result, ConstEv _ => None, }) .expect("No const named GOAL found in the test"); - db.const_eval(const_id, GenericArgs::new_from_iter(interner, []), None) + db.const_eval(const_id, GenericArgs::empty(interner), None) } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index f9523e7168de4..f0f65eedbce9a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -19,9 +19,10 @@ use crate::{ lower::{Diagnostics, GenericDefaults}, mir::{BorrowckResult, MirBody, MirLowerError}, next_solver::{ - Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, TraitRef, Ty, VariancesOf, + Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, StoredEarlyBinder, StoredGenericArgs, + StoredTy, TraitRef, Ty, VariancesOf, }, - traits::ParamEnvAndCrate, + traits::{ParamEnvAndCrate, StoredParamEnvAndCrate}, }; #[query_group::query_group] @@ -32,60 +33,48 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { // and `monomorphized_mir_body_for_closure` into `monomorphized_mir_body` #[salsa::invoke(crate::mir::mir_body_query)] #[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)] - fn mir_body<'db>( - &'db self, - def: DefWithBodyId, - ) -> Result>, MirLowerError<'db>>; + fn mir_body(&self, def: DefWithBodyId) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::mir_body_for_closure_query)] - fn mir_body_for_closure<'db>( - &'db self, - def: InternedClosureId, - ) -> Result>, MirLowerError<'db>>; + fn mir_body_for_closure(&self, def: InternedClosureId) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::monomorphized_mir_body_query)] #[salsa::cycle(cycle_result = crate::mir::monomorphized_mir_body_cycle_result)] - fn monomorphized_mir_body<'db>( - &'db self, + fn monomorphized_mir_body( + &self, def: DefWithBodyId, - subst: GenericArgs<'db>, - env: ParamEnvAndCrate<'db>, - ) -> Result>, MirLowerError<'db>>; + subst: StoredGenericArgs, + env: StoredParamEnvAndCrate, + ) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)] - fn monomorphized_mir_body_for_closure<'db>( - &'db self, + fn monomorphized_mir_body_for_closure( + &self, def: InternedClosureId, - subst: GenericArgs<'db>, - env: ParamEnvAndCrate<'db>, - ) -> Result>, MirLowerError<'db>>; + subst: StoredGenericArgs, + env: StoredParamEnvAndCrate, + ) -> Result, MirLowerError>; #[salsa::invoke(crate::mir::borrowck_query)] #[salsa::lru(2024)] - fn borrowck<'db>( - &'db self, - def: DefWithBodyId, - ) -> Result]>, MirLowerError<'db>>; + fn borrowck(&self, def: DefWithBodyId) -> Result, MirLowerError>; - #[salsa::invoke(crate::consteval::const_eval_query)] - #[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)] + #[salsa::invoke(crate::consteval::const_eval)] + #[salsa::transparent] fn const_eval<'db>( &'db self, def: ConstId, subst: GenericArgs<'db>, trait_env: Option>, - ) -> Result, ConstEvalError<'db>>; + ) -> Result, ConstEvalError>; - #[salsa::invoke(crate::consteval::const_eval_static_query)] - #[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)] - fn const_eval_static<'db>(&'db self, def: StaticId) -> Result, ConstEvalError<'db>>; + #[salsa::invoke(crate::consteval::const_eval_static)] + #[salsa::transparent] + fn const_eval_static<'db>(&'db self, def: StaticId) -> Result, ConstEvalError>; #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)] #[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)] - fn const_eval_discriminant<'db>( - &'db self, - def: EnumVariantId, - ) -> Result>; + fn const_eval_discriminant(&self, def: EnumVariantId) -> Result; #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] #[salsa::transparent] @@ -100,19 +89,19 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::invoke(crate::layout::layout_of_adt_query)] #[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)] - fn layout_of_adt<'db>( - &'db self, + fn layout_of_adt( + &self, def: AdtId, - args: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, + args: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError>; #[salsa::invoke(crate::layout::layout_of_ty_query)] #[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)] - fn layout_of_ty<'db>( - &'db self, - ty: Ty<'db>, - env: ParamEnvAndCrate<'db>, + fn layout_of_ty( + &self, + ty: StoredTy, + env: StoredParamEnvAndCrate, ) -> Result, LayoutError>; #[salsa::invoke(crate::layout::target_data_layout_query)] @@ -125,8 +114,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn ty<'db>(&'db self, def: TyDefId) -> EarlyBinder<'db, Ty<'db>>; - #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics)] + #[salsa::transparent] fn type_for_type_alias_with_diagnostics<'db>( &'db self, def: TypeAliasId, @@ -134,11 +123,12 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is /// a `StructId` or `EnumVariantId` with a record constructor. - #[salsa::invoke(crate::lower::value_ty_query)] + #[salsa::invoke(crate::lower::value_ty)] + #[salsa::transparent] fn value_ty<'db>(&'db self, def: ValueTyDefId) -> Option>>; - #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics)] + #[salsa::transparent] fn impl_self_ty_with_diagnostics<'db>( &'db self, def: ImplId, @@ -148,9 +138,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn impl_self_ty<'db>(&'db self, def: ImplId) -> EarlyBinder<'db, Ty<'db>>; - // FIXME: Make this a non-interned query. - #[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::const_param_ty_with_diagnostics)] + #[salsa::transparent] fn const_param_ty_with_diagnostics<'db>(&'db self, def: ConstParamId) -> (Ty<'db>, Diagnostics); @@ -158,7 +147,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> Ty<'db>; - #[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)] + #[salsa::invoke(crate::lower::impl_trait_with_diagnostics)] + #[salsa::transparent] fn impl_trait_with_diagnostics<'db>( &'db self, def: ImplId, @@ -169,19 +159,18 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn impl_trait<'db>(&'db self, def: ImplId) -> Option>>; #[salsa::invoke(crate::lower::field_types_with_diagnostics_query)] - fn field_types_with_diagnostics<'db>( - &'db self, + #[salsa::transparent] + fn field_types_with_diagnostics( + &self, var: VariantId, - ) -> (Arc>>>, Diagnostics); + ) -> &(ArenaMap>, Diagnostics); #[salsa::invoke(crate::lower::field_types_query)] #[salsa::transparent] - fn field_types<'db>( - &'db self, - var: VariantId, - ) -> Arc>>>; + fn field_types(&self, var: VariantId) -> &ArenaMap>; - #[salsa::invoke(crate::lower::callable_item_signature_query)] + #[salsa::invoke(crate::lower::callable_item_signature)] + #[salsa::transparent] fn callable_item_signature<'db>( &'db self, def: CallableDefId, @@ -191,26 +180,27 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::transparent] fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId) -> ParamEnv<'db>; - #[salsa::invoke(crate::lower::trait_environment_query)] + #[salsa::invoke(crate::lower::trait_environment)] + #[salsa::transparent] fn trait_environment<'db>(&'db self, def: GenericDefId) -> ParamEnv<'db>; #[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)] #[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)] - fn generic_defaults_with_diagnostics<'db>( - &'db self, + fn generic_defaults_with_diagnostics( + &self, def: GenericDefId, - ) -> (GenericDefaults<'db>, Diagnostics); + ) -> (GenericDefaults, Diagnostics); /// This returns an empty list if no parameter has default. /// /// The binders of the returned defaults are only up to (not including) this parameter. #[salsa::invoke(crate::lower::generic_defaults_query)] #[salsa::transparent] - fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>; + fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults; // Interned IDs for solver integration #[salsa::interned] - fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId; + fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId; #[salsa::interned] fn intern_closure(&self, id: InternedClosure) -> InternedClosureId; @@ -219,11 +209,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId; #[salsa::invoke(crate::variance::variances_of)] - #[salsa::cycle( - // cycle_fn = crate::variance::variances_of_cycle_fn, - // cycle_initial = crate::variance::variances_of_cycle_initial, - cycle_result = crate::variance::variances_of_cycle_initial, - )] + #[salsa::transparent] fn variances_of<'db>(&'db self, def: GenericDefId) -> VariancesOf<'db>; } @@ -248,7 +234,7 @@ pub struct InternedConstParamId { #[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] #[derive(PartialOrd, Ord)] pub struct InternedOpaqueTyId { - pub loc: ImplTraitId<'db>, + pub loc: ImplTraitId, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 0de7fab8d1b28..dd1fc3b36ef8d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -99,7 +99,7 @@ impl BodyValidationDiagnostic { struct ExprValidator<'db> { owner: DefWithBodyId, body: Arc, - infer: &'db InferenceResult<'db>, + infer: &'db InferenceResult, env: ParamEnv<'db>, diagnostics: Vec, validate_lints: bool, @@ -313,7 +313,7 @@ impl<'db> ExprValidator<'db> { ); value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_))) } - Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind() { + Expr::Field { expr, .. } => match self.infer.expr_ty(*expr).kind() { TyKind::Adt(adt, ..) if matches!(adt.def_id().0, AdtId::UnionId(_)) => false, _ => self.is_known_valid_scrutinee(*expr), }, @@ -554,7 +554,7 @@ impl<'db> FilterMapNextChecker<'db> { pub fn record_literal_missing_fields( db: &dyn HirDatabase, - infer: &InferenceResult<'_>, + infer: &InferenceResult, id: ExprId, expr: &Expr, ) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { @@ -584,7 +584,7 @@ pub fn record_literal_missing_fields( pub fn record_pattern_missing_fields( db: &dyn HirDatabase, - infer: &InferenceResult<'_>, + infer: &InferenceResult, id: PatId, pat: &Pat, ) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { @@ -612,8 +612,8 @@ pub fn record_pattern_missing_fields( Some((variant_def, missed_fields, exhaustive)) } -fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult<'_>) -> bool { - fn walk(pat: PatId, body: &Body, infer: &InferenceResult<'_>, has_type_mismatches: &mut bool) { +fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool { + fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) { match infer.type_mismatch_for_pat(pat) { Some(_) => *has_type_mismatches = true, None if *has_type_mismatches => (), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index 80b65ace77cd7..8e6101e6a0e3e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -16,7 +16,7 @@ use hir_def::{ item_tree::FieldsShape, }; use hir_expand::name::Name; -use rustc_type_ir::inherent::{IntoKind, SliceLike}; +use rustc_type_ir::inherent::IntoKind; use span::Edition; use stdx::{always, never, variance::PhantomCovariantLifetime}; @@ -96,7 +96,7 @@ pub(crate) enum PatKind<'db> { pub(crate) struct PatCtxt<'a, 'db> { db: &'db dyn HirDatabase, - infer: &'a InferenceResult<'db>, + infer: &'db InferenceResult, body: &'a Body, pub(crate) errors: Vec, } @@ -104,7 +104,7 @@ pub(crate) struct PatCtxt<'a, 'db> { impl<'a, 'db> PatCtxt<'a, 'db> { pub(crate) fn new( db: &'db dyn HirDatabase, - infer: &'a InferenceResult<'db>, + infer: &'db InferenceResult, body: &'a Body, ) -> Self { Self { db, infer, body, errors: Vec::new() } @@ -119,12 +119,15 @@ impl<'a, 'db> PatCtxt<'a, 'db> { let unadjusted_pat = self.lower_pattern_unadjusted(pat); self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold( unadjusted_pat, - |subpattern, ref_ty| Pat { ty: *ref_ty, kind: Box::new(PatKind::Deref { subpattern }) }, + |subpattern, ref_ty| Pat { + ty: ref_ty.as_ref(), + kind: Box::new(PatKind::Deref { subpattern }), + }, ) } fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat<'db> { - let mut ty = self.infer[pat]; + let mut ty = self.infer.pat_ty(pat); let variant = self.infer.variant_resolution_for_pat(pat); let kind = match self.body[pat] { @@ -151,7 +154,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> { hir_def::hir::Pat::Bind { id, subpat, .. } => { let bm = self.infer.binding_modes[pat]; - ty = self.infer[id]; + ty = self.infer.binding_ty(id); let name = &self.body[id].name; match (bm, ty.kind()) { (BindingMode::Ref(_), TyKind::Ref(_, rty, _)) => ty = rty, @@ -273,7 +276,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> { } fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat<'db> { - let ty = self.infer[pat]; + let ty = self.infer.pat_ty(pat); let pat_from_kind = |kind| Pat { ty, kind: Box::new(kind) }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 91448d5806f97..eda7e7e249b38 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -11,7 +11,7 @@ use rustc_pattern_analysis::{ constructor::{Constructor, ConstructorSet, VariantVisibility}, usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness}, }; -use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike}; +use rustc_type_ir::inherent::{AdtDef, IntoKind}; use smallvec::{SmallVec, smallvec}; use stdx::never; @@ -150,7 +150,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> { let fields_len = variant.fields(self.db).fields().len() as u32; (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| { - let ty = field_tys[fid].instantiate(self.infcx.interner, substs); + let ty = field_tys[fid].get().instantiate(self.infcx.interner, substs); let ty = self .infcx .at(&ObligationCause::dummy(), self.env) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index bbc381ba5d511..50d4517d01254 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -97,9 +97,9 @@ enum UnsafeDiagnostic { DeprecatedSafe2024 { node: ExprId, inside_unsafe_block: InsideUnsafeBlock }, } -pub fn unsafe_operations_for_body<'db>( - db: &'db dyn HirDatabase, - infer: &InferenceResult<'db>, +pub fn unsafe_operations_for_body( + db: &dyn HirDatabase, + infer: &InferenceResult, def: DefWithBodyId, body: &Body, callback: &mut dyn FnMut(ExprOrPatId), @@ -116,9 +116,9 @@ pub fn unsafe_operations_for_body<'db>( } } -pub fn unsafe_operations<'db>( - db: &'db dyn HirDatabase, - infer: &InferenceResult<'db>, +pub fn unsafe_operations( + db: &dyn HirDatabase, + infer: &InferenceResult, def: DefWithBodyId, body: &Body, current: ExprId, @@ -136,7 +136,7 @@ pub fn unsafe_operations<'db>( struct UnsafeVisitor<'db> { db: &'db dyn HirDatabase, - infer: &'db InferenceResult<'db>, + infer: &'db InferenceResult, body: &'db Body, resolver: Resolver<'db>, def: DefWithBodyId, @@ -155,7 +155,7 @@ struct UnsafeVisitor<'db> { impl<'db> UnsafeVisitor<'db> { fn new( db: &'db dyn HirDatabase, - infer: &'db InferenceResult<'db>, + infer: &'db InferenceResult, body: &'db Body, def: DefWithBodyId, unsafe_expr_cb: &'db mut dyn FnMut(UnsafeDiagnostic), @@ -260,7 +260,7 @@ impl<'db> UnsafeVisitor<'db> { match pat { Pat::Record { .. } => { - if let Some((AdtId::UnionId(_), _)) = self.infer[current].as_adt() { + if let Some((AdtId::UnionId(_), _)) = self.infer.pat_ty(current).as_adt() { let old_inside_union_destructure = mem::replace(&mut self.inside_union_destructure, true); self.body.walk_pats_shallow(current, |pat| self.walk_pat(pat)); @@ -286,7 +286,7 @@ impl<'db> UnsafeVisitor<'db> { let inside_assignment = mem::replace(&mut self.inside_assignment, false); match expr { &Expr::Call { callee, .. } => { - let callee = self.infer[callee]; + let callee = self.infer.expr_ty(callee); if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(func)), _) = callee.kind() { @@ -341,7 +341,7 @@ impl<'db> UnsafeVisitor<'db> { } } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - if let TyKind::RawPtr(..) = self.infer[*expr].kind() { + if let TyKind::RawPtr(..) = self.infer.expr_ty(*expr).kind() { self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index e1d62a9c7a3e2..b9e23464e9803 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -38,7 +38,7 @@ use rustc_hash::FxHashSet; use rustc_type_ir::{ AliasTyKind, BoundVarIndexKind, CoroutineArgsParts, CoroutineClosureArgsParts, RegionKind, Upcast, - inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, Tys as _}, + inherent::{AdtDef, GenericArgs as _, IntoKind, Term as _, Ty as _, Tys as _}, }; use smallvec::SmallVec; use span::Edition; @@ -52,9 +52,9 @@ use crate::{ lower::GenericPredicates, mir::pad16, next_solver::{ - AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder, - ExistentialPredicate, FnSig, GenericArg, GenericArgs, ParamEnv, PolyFnSig, Region, - SolverDefId, Term, TraitRef, Ty, TyKind, TypingMode, + AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, ExistentialPredicate, FnSig, + GenericArg, GenericArgKind, GenericArgs, ParamEnv, PolyFnSig, Region, SolverDefId, + StoredEarlyBinder, StoredTy, Term, TermKind, TraitRef, Ty, TyKind, TypingMode, abi::Safety, infer::{DbInternerInferExt, traits::ObligationCause}, }, @@ -602,7 +602,7 @@ impl<'db, T: HirDisplay<'db>> HirDisplay<'db> for &T { impl<'db, T: HirDisplay<'db> + Internable> HirDisplay<'db> for Interned { fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result { - HirDisplay::hir_fmt(self.as_ref(), f) + HirDisplay::hir_fmt(&**self, f) } } @@ -664,10 +664,10 @@ fn write_projection<'db>(f: &mut HirFormatter<'_, 'db>, alias: &AliasTy<'db>) -> impl<'db> HirDisplay<'db> for GenericArg<'db> { fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result { - match self { - GenericArg::Ty(ty) => ty.hir_fmt(f), - GenericArg::Lifetime(lt) => lt.hir_fmt(f), - GenericArg::Const(c) => c.hir_fmt(f), + match self.kind() { + GenericArgKind::Type(ty) => ty.hir_fmt(f), + GenericArgKind::Lifetime(lt) => lt.hir_fmt(f), + GenericArgKind::Const(c) => c.hir_fmt(f), } } } @@ -790,7 +790,7 @@ fn render_const_scalar_inner<'db>( TyKind::Slice(ty) => { let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; let size_one = layout.size.bytes_usize(); @@ -824,7 +824,7 @@ fn render_const_scalar_inner<'db>( let Ok(t) = memory_map.vtable_ty(ty_id) else { return f.write_str(""); }; - let Ok(layout) = f.db.layout_of_ty(t, param_env) else { + let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -854,7 +854,7 @@ fn render_const_scalar_inner<'db>( return f.write_str(""); } }); - let Ok(layout) = f.db.layout_of_ty(t, param_env) else { + let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -866,7 +866,7 @@ fn render_const_scalar_inner<'db>( } }, TyKind::Tuple(tys) => { - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; f.write_str("(")?; @@ -878,7 +878,7 @@ fn render_const_scalar_inner<'db>( f.write_str(", ")?; } let offset = layout.fields.offset(id).bytes_usize(); - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { f.write_str("")?; continue; }; @@ -889,7 +889,7 @@ fn render_const_scalar_inner<'db>( } TyKind::Adt(def, args) => { let def = def.def_id().0; - let Ok(layout) = f.db.layout_of_adt(def, args, param_env) else { + let Ok(layout) = f.db.layout_of_adt(def, args.store(), param_env.store()) else { return f.write_str(""); }; match def { @@ -900,7 +900,7 @@ fn render_const_scalar_inner<'db>( render_variant_after_name( s.fields(f.db), f, - &field_types, + field_types, f.db.trait_environment(def.into()), &layout, args, @@ -932,7 +932,7 @@ fn render_const_scalar_inner<'db>( render_variant_after_name( var_id.fields(f.db), f, - &field_types, + field_types, f.db.trait_environment(def.into()), var_layout, args, @@ -952,7 +952,7 @@ fn render_const_scalar_inner<'db>( let Some(len) = consteval::try_const_usize(f.db, len) else { return f.write_str(""); }; - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; let size_one = layout.size.bytes_usize(); @@ -992,7 +992,7 @@ fn render_const_scalar_inner<'db>( fn render_variant_after_name<'db>( data: &VariantFields, f: &mut HirFormatter<'_, 'db>, - field_types: &ArenaMap>>, + field_types: &'db ArenaMap>, param_env: ParamEnv<'db>, layout: &Layout, args: GenericArgs<'db>, @@ -1004,8 +1004,8 @@ fn render_variant_after_name<'db>( FieldsShape::Record | FieldsShape::Tuple => { let render_field = |f: &mut HirFormatter<'_, 'db>, id: LocalFieldId| { let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize(); - let ty = field_types[id].instantiate(f.interner, args); - let Ok(layout) = f.db.layout_of_ty(ty, param_env) else { + let ty = field_types[id].get().instantiate(f.interner, args); + let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); @@ -1223,7 +1223,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> { }; f.end_location_link(); - if args.len() > 0 { + if !args.is_empty() { let generic_def_id = GenericDefId::from_callable(db, def); let generics = generics(db, generic_def_id); let (parent_len, self_param, type_, const_, impl_, lifetime) = @@ -1459,7 +1459,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> { }; let coroutine_sig = coroutine_sig.skip_binder(); let coroutine_inputs = coroutine_sig.inputs(); - let TyKind::Tuple(coroutine_inputs) = coroutine_inputs.as_slice()[1].kind() else { + let TyKind::Tuple(coroutine_inputs) = coroutine_inputs[1].kind() else { unreachable!("invalid coroutine closure signature"); }; let TyKind::Tuple(coroutine_output) = coroutine_sig.output().kind() else { @@ -1787,9 +1787,9 @@ impl<'db> HirDisplay<'db> for PolyFnSig<'db> { impl<'db> HirDisplay<'db> for Term<'db> { fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result { - match self { - Term::Ty(it) => it.hir_fmt(f), - Term::Const(it) => it.hir_fmt(f), + match self.kind() { + TermKind::Ty(it) => it.hir_fmt(f), + TermKind::Const(it) => it.hir_fmt(f), } } } @@ -1942,7 +1942,7 @@ fn write_bounds_like_dyn_trait<'db>( let own_args = projection.projection_term.own_args(f.interner); if !own_args.is_empty() { write!(f, "<")?; - hir_fmt_generic_arguments(f, own_args.as_slice(), None)?; + hir_fmt_generic_arguments(f, own_args, None)?; write!(f, ">")?; } write!(f, " = ")?; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs index 3ae6451d69528..66692143bc1aa 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs @@ -2,7 +2,7 @@ use hir_def::{AdtId, signatures::StructFlags}; use rustc_hash::FxHashSet; -use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike}; +use rustc_type_ir::inherent::{AdtDef, IntoKind}; use stdx::never; use crate::{ @@ -85,7 +85,7 @@ fn has_drop_glue_impl<'db>( .map(|(_, field_ty)| { has_drop_glue_impl( infcx, - field_ty.instantiate(infcx.interner, subst), + field_ty.get().instantiate(infcx.interner, subst), env, visited, ) @@ -105,7 +105,7 @@ fn has_drop_glue_impl<'db>( .map(|(_, field_ty)| { has_drop_glue_impl( infcx, - field_ty.instantiate(infcx.interner, subst), + field_ty.get().instantiate(infcx.interner, subst), env, visited, ) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index 64b15eb017a6d..59cfd3fdc98c1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -10,8 +10,7 @@ use hir_def::{ use rustc_hash::FxHashSet; use rustc_type_ir::{ AliasTyKind, ClauseKind, PredicatePolarity, TypeSuperVisitable as _, TypeVisitable as _, - Upcast, elaborate, - inherent::{IntoKind, SliceLike}, + Upcast, elaborate, inherent::IntoKind, }; use smallvec::SmallVec; @@ -329,13 +328,9 @@ where } let sig = db.callable_item_signature(func.into()); - if sig - .skip_binder() - .inputs() - .iter() - .skip(1) - .any(|ty| contains_illegal_self_type_reference(db, trait_, &ty, AllowSelfProjection::Yes)) - { + if sig.skip_binder().inputs().iter().skip(1).any(|ty| { + contains_illegal_self_type_reference(db, trait_, ty.skip_binder(), AllowSelfProjection::Yes) + }) { cb(MethodViolationCode::ReferencesSelfInput)?; } @@ -412,11 +407,11 @@ fn receiver_is_dispatchable<'db>( // `self: Self` can't be dispatched on, but this is already considered dyn-compatible // See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437 - if sig.inputs().iter().next().is_some_and(|p| p.skip_binder() == self_param_ty) { + if sig.inputs().iter().next().is_some_and(|p| *p.skip_binder() == self_param_ty) { return true; } - let Some(&receiver_ty) = sig.inputs().skip_binder().as_slice().first() else { + let Some(&receiver_ty) = sig.inputs().skip_binder().first() else { return false; }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 70868e4b95aa4..d527a4ae29c28 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -28,7 +28,7 @@ mod path; mod place_op; pub(crate) mod unify; -use std::{cell::OnceCell, convert::identity, iter, ops::Index}; +use std::{cell::OnceCell, convert::identity, iter}; use base_db::Crate; use either::Either; @@ -47,14 +47,12 @@ use hir_expand::{mod_path::ModPath, name::Name}; use indexmap::IndexSet; use intern::sym; use la_arena::ArenaMap; -use macros::{TypeFoldable, TypeVisitable}; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ AliasTyKind, TypeFoldable, - inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _}, + inherent::{AdtDef, IntoKind, Ty as _}, }; -use salsa::Update; use span::Edition; use stdx::never; use thin_vec::ThinVec; @@ -74,10 +72,10 @@ use crate::{ method_resolution::{CandidateId, MethodResolutionUnstableFeatures}, mir::MirSpan, next_solver::{ - AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind, - Tys, + AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, + StoredGenericArgs, StoredTy, StoredTys, Ty, TyKind, Tys, abi::Safety, - infer::{InferCtxt, traits::ObligationCause}, + infer::{InferCtxt, ObligationInspector, traits::ObligationCause}, }, traits::FnTrait, utils::TargetFeatureIsSafeInTarget, @@ -95,12 +93,24 @@ use cast::{CastCheck, CastError}; pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; /// The entry point of type inference. -fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> { +fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult { + infer_query_with_inspect(db, def, None) +} + +pub fn infer_query_with_inspect<'db>( + db: &'db dyn HirDatabase, + def: DefWithBodyId, + inspect: Option>, +) -> InferenceResult { let _p = tracing::info_span!("infer_query").entered(); let resolver = def.resolver(db); let body = db.body(def); let mut ctx = InferenceContext::new(db, def, &body, resolver); + if let Some(inspect) = inspect { + ctx.table.infer_ctxt.attach_obligation_inspector(inspect); + } + match def { DefWithBodyId::FunctionId(f) => { ctx.collect_fn(f); @@ -110,23 +120,23 @@ fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> DefWithBodyId::VariantId(v) => { ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) { hir_def::layout::IntegerType::Pointer(signed) => match signed { - true => ctx.types.isize, - false => ctx.types.usize, + true => ctx.types.types.isize, + false => ctx.types.types.usize, }, hir_def::layout::IntegerType::Fixed(size, signed) => match signed { true => match size { - Integer::I8 => ctx.types.i8, - Integer::I16 => ctx.types.i16, - Integer::I32 => ctx.types.i32, - Integer::I64 => ctx.types.i64, - Integer::I128 => ctx.types.i128, + Integer::I8 => ctx.types.types.i8, + Integer::I16 => ctx.types.types.i16, + Integer::I32 => ctx.types.types.i32, + Integer::I64 => ctx.types.types.i64, + Integer::I128 => ctx.types.types.i128, }, false => match size { - Integer::I8 => ctx.types.u8, - Integer::I16 => ctx.types.u16, - Integer::I32 => ctx.types.u32, - Integer::I64 => ctx.types.u64, - Integer::I128 => ctx.types.u128, + Integer::I8 => ctx.types.types.u8, + Integer::I16 => ctx.types.types.u16, + Integer::I32 => ctx.types.types.u32, + Integer::I64 => ctx.types.types.u64, + Integer::I128 => ctx.types.types.u128, }, }, }; @@ -162,7 +172,7 @@ fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> ctx.resolve_all() } -fn infer_cycle_result(db: &dyn HirDatabase, _: DefWithBodyId) -> InferenceResult<'_> { +fn infer_cycle_result(db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId) -> InferenceResult { InferenceResult { has_errors: true, ..InferenceResult::new(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)) @@ -196,8 +206,8 @@ pub enum InferenceTyDiagnosticSource { Signature, } -#[derive(Debug, PartialEq, Eq, Clone, Update)] -pub enum InferenceDiagnostic<'db> { +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum InferenceDiagnostic { NoSuchField { field: ExprOrPatId, private: Option, @@ -213,16 +223,16 @@ pub enum InferenceDiagnostic<'db> { }, UnresolvedField { expr: ExprId, - receiver: Ty<'db>, + receiver: StoredTy, name: Name, method_with_same_name_exists: bool, }, UnresolvedMethodCall { expr: ExprId, - receiver: Ty<'db>, + receiver: StoredTy, name: Name, /// Contains the type the field resolves to - field_with_same_name: Option>, + field_with_same_name: Option, assoc_func_with_same_name: Option, }, UnresolvedAssocItem { @@ -249,21 +259,21 @@ pub enum InferenceDiagnostic<'db> { }, ExpectedFunction { call_expr: ExprId, - found: Ty<'db>, + found: StoredTy, }, TypedHole { expr: ExprId, - expected: Ty<'db>, + expected: StoredTy, }, CastToUnsized { expr: ExprId, - cast_ty: Ty<'db>, + cast_ty: StoredTy, }, InvalidCast { expr: ExprId, error: CastError, - expr_ty: Ty<'db>, - cast_ty: Ty<'db>, + expr_ty: StoredTy, + cast_ty: StoredTy, }, TyDiagnostic { source: InferenceTyDiagnosticSource, @@ -290,10 +300,10 @@ pub enum InferenceDiagnostic<'db> { } /// A mismatch between an expected and an inferred type. -#[derive(Clone, PartialEq, Eq, Debug, Hash, Update)] -pub struct TypeMismatch<'db> { - pub expected: Ty<'db>, - pub actual: Ty<'db>, +#[derive(Clone, PartialEq, Eq, Debug, Hash)] +pub struct TypeMismatch { + pub expected: StoredTy, + pub actual: StoredTy, } /// Represents coercing a value to a different type of value. @@ -336,20 +346,23 @@ pub struct TypeMismatch<'db> { /// At some point, of course, `Box` should move out of the compiler, in which /// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> -> /// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`. -#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, Update)] -pub struct Adjustment<'db> { - #[type_visitable(ignore)] - #[type_foldable(identity)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct Adjustment { pub kind: Adjust, - pub target: Ty<'db>, + pub target: StoredTy, } -impl<'db> Adjustment<'db> { - pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self { +impl Adjustment { + pub fn borrow<'db>( + interner: DbInterner<'db>, + m: Mutability, + ty: Ty<'db>, + lt: Region<'db>, + ) -> Self { let ty = Ty::new_ref(interner, lt, ty, m); Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))), - target: ty, + target: ty.store(), } } } @@ -473,56 +486,47 @@ pub enum PointerCast { /// When you add a field that stores types (including `Substitution` and the like), don't forget /// `resolve_completely()`'ing them in `InferenceContext::resolve_all()`. Inference variables must /// not appear in the final inference result. -#[derive(Clone, PartialEq, Eq, Debug, Update)] -pub struct InferenceResult<'db> { +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct InferenceResult { /// For each method call expr, records the function it resolves to. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))] - method_resolutions: FxHashMap)>, + method_resolutions: FxHashMap, /// For each field access expr, records the field it resolves to. field_resolutions: FxHashMap>, /// For each struct literal or pattern, records the variant it resolves to. variant_resolutions: FxHashMap, /// For each associated item record what it resolves to - assoc_resolutions: FxHashMap)>, + assoc_resolutions: FxHashMap, /// Whenever a tuple field expression access a tuple field, we allocate a tuple id in /// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of /// that which allows us to resolve a [`TupleFieldId`]s type. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))] - tuple_field_access_types: ThinVec>, + tuple_field_access_types: ThinVec, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))] - pub(crate) type_of_expr: ArenaMap>, + pub(crate) type_of_expr: ArenaMap, /// For each pattern record the type it resolves to. /// /// **Note**: When a pattern type is resolved it may still contain /// unresolved or missing subpatterns or subpatterns of mismatched types. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))] - pub(crate) type_of_pat: ArenaMap>, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* binding id is technically update */)))] - pub(crate) type_of_binding: ArenaMap>, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* type ref id is technically update */)))] - pub(crate) type_of_type_placeholder: FxHashMap>, - pub(crate) type_of_opaque: FxHashMap>, - - pub(crate) type_mismatches: Option>>>, + pub(crate) type_of_pat: ArenaMap, + pub(crate) type_of_binding: ArenaMap, + pub(crate) type_of_type_placeholder: FxHashMap, + pub(crate) type_of_opaque: FxHashMap, + + pub(crate) type_mismatches: Option>>, /// Whether there are any type-mismatching errors in the result. // FIXME: This isn't as useful as initially thought due to us falling back placeholders to // `TyKind::Error`. // Which will then mark this field. pub(crate) has_errors: bool, /// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))] - diagnostics: ThinVec>, + diagnostics: ThinVec, /// Interned `Error` type to return references to. // FIXME: Remove this. - error_ty: Ty<'db>, + error_ty: StoredTy, - #[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))] - pub(crate) expr_adjustments: FxHashMap]>>, + pub(crate) expr_adjustments: FxHashMap>, /// Stores the types which were implicitly dereferenced in pattern binding modes. - #[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))] - pub(crate) pat_adjustments: FxHashMap>>, + pub(crate) pat_adjustments: FxHashMap>, /// Stores the binding mode (`ref` in `let ref x = 2`) of bindings. /// /// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an @@ -538,7 +542,7 @@ pub struct InferenceResult<'db> { /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`. pub(crate) binding_modes: ArenaMap, - pub(crate) closure_info: FxHashMap>, FnTrait)>, + pub(crate) closure_info: FxHashMap, FnTrait)>, // FIXME: remove this field pub mutated_bindings_in_closure: FxHashSet, @@ -546,15 +550,15 @@ pub struct InferenceResult<'db> { } #[salsa::tracked] -impl<'db> InferenceResult<'db> { +impl InferenceResult { #[salsa::tracked(returns(ref), cycle_result = infer_cycle_result)] - pub fn for_body(db: &'db dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'db> { + pub fn for_body(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult { infer_query(db, def) } } -impl<'db> InferenceResult<'db> { - fn new(error_ty: Ty<'db>) -> Self { +impl InferenceResult { + fn new(error_ty: Ty<'_>) -> Self { Self { method_resolutions: Default::default(), field_resolutions: Default::default(), @@ -569,7 +573,7 @@ impl<'db> InferenceResult<'db> { type_of_opaque: Default::default(), type_mismatches: Default::default(), has_errors: Default::default(), - error_ty, + error_ty: error_ty.store(), pat_adjustments: Default::default(), binding_modes: Default::default(), expr_adjustments: Default::default(), @@ -579,8 +583,8 @@ impl<'db> InferenceResult<'db> { } } - pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> { - self.method_resolutions.get(&expr).copied() + pub fn method_resolution<'db>(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> { + self.method_resolutions.get(&expr).map(|(func, args)| (*func, args.as_ref())) } pub fn field_resolution(&self, expr: ExprId) -> Option> { self.field_resolutions.get(&expr).copied() @@ -597,16 +601,19 @@ impl<'db> InferenceResult<'db> { ExprOrPatId::PatId(id) => self.variant_resolution_for_pat(id), } } - pub fn assoc_resolutions_for_expr( + pub fn assoc_resolutions_for_expr<'db>( &self, id: ExprId, ) -> Option<(CandidateId, GenericArgs<'db>)> { - self.assoc_resolutions.get(&id.into()).copied() + self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref())) } - pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(CandidateId, GenericArgs<'db>)> { - self.assoc_resolutions.get(&id.into()).copied() + pub fn assoc_resolutions_for_pat<'db>( + &self, + id: PatId, + ) -> Option<(CandidateId, GenericArgs<'db>)> { + self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref())) } - pub fn assoc_resolutions_for_expr_or_pat( + pub fn assoc_resolutions_for_expr_or_pat<'db>( &self, id: ExprOrPatId, ) -> Option<(CandidateId, GenericArgs<'db>)> { @@ -615,20 +622,20 @@ impl<'db> InferenceResult<'db> { ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id), } } - pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch<'db>> { + pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { self.type_mismatches.as_deref()?.get(&expr.into()) } - pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch<'db>> { + pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> { self.type_mismatches.as_deref()?.get(&pat.into()) } - pub fn type_mismatches(&self) -> impl Iterator)> { + pub fn type_mismatches(&self) -> impl Iterator { self.type_mismatches .as_deref() .into_iter() .flatten() .map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch)) } - pub fn expr_type_mismatches(&self) -> impl Iterator)> { + pub fn expr_type_mismatches(&self) -> impl Iterator { self.type_mismatches.as_deref().into_iter().flatten().filter_map( |(expr_or_pat, mismatch)| match *expr_or_pat { ExprOrPatId::ExprId(expr) => Some((expr, mismatch)), @@ -636,22 +643,22 @@ impl<'db> InferenceResult<'db> { }, ) } - pub fn placeholder_types(&self) -> impl Iterator)> { - self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty)) + pub fn placeholder_types<'db>(&self) -> impl Iterator)> { + self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty.as_ref())) } - pub fn type_of_type_placeholder(&self, type_ref: TypeRefId) -> Option> { - self.type_of_type_placeholder.get(&type_ref).copied() + pub fn type_of_type_placeholder<'db>(&self, type_ref: TypeRefId) -> Option> { + self.type_of_type_placeholder.get(&type_ref).map(|ty| ty.as_ref()) } - pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec>, FnTrait) { + pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec, FnTrait) { self.closure_info.get(&closure).unwrap() } - pub fn type_of_expr_or_pat(&self, id: ExprOrPatId) -> Option> { + pub fn type_of_expr_or_pat<'db>(&self, id: ExprOrPatId) -> Option> { match id { - ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).copied(), - ExprOrPatId::PatId(id) => self.type_of_pat.get(id).copied(), + ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).map(|it| it.as_ref()), + ExprOrPatId::PatId(id) => self.type_of_pat.get(id).map(|it| it.as_ref()), } } - pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option> { + pub fn type_of_expr_with_adjust<'db>(&self, id: ExprId) -> Option> { match self.expr_adjustments.get(&id).and_then(|adjustments| { adjustments.iter().rfind(|adj| { // https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140 @@ -660,37 +667,37 @@ impl<'db> InferenceResult<'db> { Adjustment { kind: Adjust::NeverToAny, target, - } if target.is_never() + } if target.as_ref().is_never() ) }) }) { - Some(adjustment) => Some(adjustment.target), - None => self.type_of_expr.get(id).copied(), + Some(adjustment) => Some(adjustment.target.as_ref()), + None => self.type_of_expr.get(id).map(|it| it.as_ref()), } } - pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option> { + pub fn type_of_pat_with_adjust<'db>(&self, id: PatId) -> Option> { match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) { - Some(adjusted) => Some(*adjusted), - None => self.type_of_pat.get(id).copied(), + Some(adjusted) => Some(adjusted.as_ref()), + None => self.type_of_pat.get(id).map(|it| it.as_ref()), } } pub fn is_erroneous(&self) -> bool { self.has_errors && self.type_of_expr.iter().count() == 0 } - pub fn diagnostics(&self) -> &[InferenceDiagnostic<'db>] { + pub fn diagnostics(&self) -> &[InferenceDiagnostic] { &self.diagnostics } - pub fn tuple_field_access_type(&self, id: TupleId) -> Tys<'db> { - self.tuple_field_access_types[id.0 as usize] + pub fn tuple_field_access_type<'db>(&self, id: TupleId) -> Tys<'db> { + self.tuple_field_access_types[id.0 as usize].as_ref() } - pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty<'db>]> { + pub fn pat_adjustment(&self, id: PatId) -> Option<&[StoredTy]> { self.pat_adjustments.get(&id).map(|it| &**it) } - pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment<'db>]> { + pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> { self.expr_adjustments.get(&id).map(|it| &**it) } @@ -699,135 +706,47 @@ impl<'db> InferenceResult<'db> { } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn expression_types(&self) -> impl Iterator)> { - self.type_of_expr.iter().map(|(k, v)| (k, *v)) + pub fn expression_types<'db>(&self) -> impl Iterator)> { + self.type_of_expr.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn pattern_types(&self) -> impl Iterator)> { - self.type_of_pat.iter().map(|(k, v)| (k, *v)) + pub fn pattern_types<'db>(&self) -> impl Iterator)> { + self.type_of_pat.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn binding_types(&self) -> impl Iterator)> { - self.type_of_binding.iter().map(|(k, v)| (k, *v)) + pub fn binding_types<'db>(&self) -> impl Iterator)> { + self.type_of_binding.iter().map(|(k, v)| (k, v.as_ref())) } // This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please. - pub fn return_position_impl_trait_types( - &self, + pub fn return_position_impl_trait_types<'db>( + &'db self, db: &'db dyn HirDatabase, - ) -> impl Iterator, Ty<'db>)> { - self.type_of_opaque.iter().filter_map(move |(&id, &ty)| { + ) -> impl Iterator)> { + self.type_of_opaque.iter().filter_map(move |(&id, ty)| { let ImplTraitId::ReturnTypeImplTrait(_, rpit_idx) = id.loc(db) else { return None; }; - Some((rpit_idx, ty)) + Some((rpit_idx, ty.as_ref())) }) } -} -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - - fn index(&self, expr: ExprId) -> &Ty<'db> { - self.type_of_expr.get(expr).unwrap_or(&self.error_ty) + pub fn expr_ty<'db>(&self, id: ExprId) -> Ty<'db> { + self.type_of_expr.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } -} -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - - fn index(&self, pat: PatId) -> &Ty<'db> { - self.type_of_pat.get(pat).unwrap_or(&self.error_ty) + pub fn pat_ty<'db>(&self, id: PatId) -> Ty<'db> { + self.type_of_pat.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } -} -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - - fn index(&self, id: ExprOrPatId) -> &Ty<'db> { - match id { - ExprOrPatId::ExprId(id) => &self[id], - ExprOrPatId::PatId(id) => &self[id], - } + pub fn expr_or_pat_ty<'db>(&self, id: ExprOrPatId) -> Ty<'db> { + self.type_of_expr_or_pat(id).unwrap_or(self.error_ty.as_ref()) } -} -impl<'db> Index for InferenceResult<'db> { - type Output = Ty<'db>; - - fn index(&self, b: BindingId) -> &Ty<'db> { - self.type_of_binding.get(b).unwrap_or(&self.error_ty) - } -} - -#[derive(Debug, Clone)] -struct InternedStandardTypes<'db> { - unit: Ty<'db>, - never: Ty<'db>, - char: Ty<'db>, - bool: Ty<'db>, - i8: Ty<'db>, - i16: Ty<'db>, - i32: Ty<'db>, - i64: Ty<'db>, - i128: Ty<'db>, - isize: Ty<'db>, - u8: Ty<'db>, - u16: Ty<'db>, - u32: Ty<'db>, - u64: Ty<'db>, - u128: Ty<'db>, - usize: Ty<'db>, - f16: Ty<'db>, - f32: Ty<'db>, - f64: Ty<'db>, - f128: Ty<'db>, - static_str_ref: Ty<'db>, - error: Ty<'db>, - - re_static: Region<'db>, - re_error: Region<'db>, - re_erased: Region<'db>, - - empty_args: GenericArgs<'db>, -} - -impl<'db> InternedStandardTypes<'db> { - fn new(interner: DbInterner<'db>) -> Self { - let str = Ty::new(interner, rustc_type_ir::TyKind::Str); - let re_static = Region::new_static(interner); - Self { - unit: Ty::new_unit(interner), - never: Ty::new(interner, TyKind::Never), - char: Ty::new(interner, TyKind::Char), - bool: Ty::new(interner, TyKind::Bool), - i8: Ty::new_int(interner, rustc_type_ir::IntTy::I8), - i16: Ty::new_int(interner, rustc_type_ir::IntTy::I16), - i32: Ty::new_int(interner, rustc_type_ir::IntTy::I32), - i64: Ty::new_int(interner, rustc_type_ir::IntTy::I64), - i128: Ty::new_int(interner, rustc_type_ir::IntTy::I128), - isize: Ty::new_int(interner, rustc_type_ir::IntTy::Isize), - u8: Ty::new_uint(interner, rustc_type_ir::UintTy::U8), - u16: Ty::new_uint(interner, rustc_type_ir::UintTy::U16), - u32: Ty::new_uint(interner, rustc_type_ir::UintTy::U32), - u64: Ty::new_uint(interner, rustc_type_ir::UintTy::U64), - u128: Ty::new_uint(interner, rustc_type_ir::UintTy::U128), - usize: Ty::new_uint(interner, rustc_type_ir::UintTy::Usize), - f16: Ty::new_float(interner, rustc_type_ir::FloatTy::F16), - f32: Ty::new_float(interner, rustc_type_ir::FloatTy::F32), - f64: Ty::new_float(interner, rustc_type_ir::FloatTy::F64), - f128: Ty::new_float(interner, rustc_type_ir::FloatTy::F128), - static_str_ref: Ty::new_ref(interner, re_static, str, Mutability::Not), - error: Ty::new_error(interner, ErrorGuaranteed), - - re_static, - re_error: Region::error(interner), - re_erased: Region::new_erased(interner), - - empty_args: GenericArgs::new_from_iter(interner, []), - } + pub fn binding_ty<'db>(&self, id: BindingId) -> Ty<'db> { + self.type_of_binding.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref()) } } @@ -848,7 +767,7 @@ pub(crate) struct InferenceContext<'body, 'db> { pub(crate) lang_items: &'db LangItems, /// The traits in scope, disregarding block modules. This is used for caching purposes. traits_in_scope: FxHashSet, - pub(crate) result: InferenceResult<'db>, + pub(crate) result: InferenceResult, tuple_field_accesses_rev: IndexSet, std::hash::BuildHasherDefault>, /// The return type of the function being inferred, the closure or async block if we're @@ -865,7 +784,7 @@ pub(crate) struct InferenceContext<'body, 'db> { resume_yield_tys: Option<(Ty<'db>, Ty<'db>)>, diverges: Diverges, breakables: Vec>, - types: InternedStandardTypes<'db>, + types: &'db crate::next_solver::DefaultAny<'db>, /// Whether we are inside the pattern of a destructuring assignment. inside_assignment: bool, @@ -873,7 +792,7 @@ pub(crate) struct InferenceContext<'body, 'db> { deferred_cast_checks: Vec>, // fields related to closure capture - current_captures: Vec>, + current_captures: Vec, /// A stack that has an entry for each projection in the current capture. /// /// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`. @@ -886,7 +805,7 @@ pub(crate) struct InferenceContext<'body, 'db> { closure_dependencies: FxHashMap>, deferred_closures: FxHashMap, Ty<'db>, Vec>, ExprId)>>, - diagnostics: Diagnostics<'db>, + diagnostics: Diagnostics, } #[derive(Clone, Debug)] @@ -942,10 +861,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ) -> Self { let trait_env = db.trait_environment_for_body(owner); let table = unify::InferenceTable::new(db, trait_env, resolver.krate(), Some(owner)); - let types = InternedStandardTypes::new(table.interner()); + let types = crate::next_solver::default_types(db); InferenceContext { - result: InferenceResult::new(types.error), - return_ty: types.error, // set in collect_* calls + result: InferenceResult::new(types.types.error), + return_ty: types.types.error, // set in collect_* calls types, target_features: OnceCell::new(), unstable_features: MethodResolutionUnstableFeatures::from_def_map( @@ -1008,7 +927,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { /// Clones `self` and calls `resolve_all()` on it. // FIXME: Remove this. - pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult<'db> { + pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult { let mut ctx = self.clone(); ctx.type_inference_fallback(); @@ -1032,7 +951,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { // `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you // used this function for another workaround, mention it here. If you really need this function and believe that // there is no problem in it being `pub(crate)`, remove this comment. - fn resolve_all(self) -> InferenceResult<'db> { + fn resolve_all(self) -> InferenceResult { let InferenceContext { mut table, mut result, tuple_field_accesses_rev, diagnostics, .. } = self; @@ -1066,23 +985,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } = &mut result; for ty in type_of_expr.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_expr.shrink_to_fit(); for ty in type_of_pat.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_pat.shrink_to_fit(); for ty in type_of_binding.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_binding.shrink_to_fit(); for ty in type_of_type_placeholder.values_mut() { - *ty = table.resolve_completely(*ty); - *has_errors = *has_errors || ty.references_non_lt_error(); + *ty = table.resolve_completely(ty.as_ref()).store(); + *has_errors = *has_errors || ty.as_ref().references_non_lt_error(); } type_of_type_placeholder.shrink_to_fit(); type_of_opaque.shrink_to_fit(); @@ -1090,8 +1009,8 @@ impl<'body, 'db> InferenceContext<'body, 'db> { if let Some(type_mismatches) = type_mismatches { *has_errors = true; for mismatch in type_mismatches.values_mut() { - mismatch.expected = table.resolve_completely(mismatch.expected); - mismatch.actual = table.resolve_completely(mismatch.actual); + mismatch.expected = table.resolve_completely(mismatch.expected.as_ref()).store(); + mismatch.actual = table.resolve_completely(mismatch.actual.as_ref()).store(); } type_mismatches.shrink_to_fit(); } @@ -1101,23 +1020,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ExpectedFunction { found: ty, .. } | UnresolvedField { receiver: ty, .. } | UnresolvedMethodCall { receiver: ty, .. } => { - *ty = table.resolve_completely(*ty); + *ty = table.resolve_completely(ty.as_ref()).store(); // FIXME: Remove this when we are on par with rustc in terms of inference - if ty.references_non_lt_error() { + if ty.as_ref().references_non_lt_error() { return false; } if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic && let Some(ty) = field_with_same_name { - *ty = table.resolve_completely(*ty); - if ty.references_non_lt_error() { + *ty = table.resolve_completely(ty.as_ref()).store(); + if ty.as_ref().references_non_lt_error() { *field_with_same_name = None; } } } TypedHole { expected: ty, .. } => { - *ty = table.resolve_completely(*ty); + *ty = table.resolve_completely(ty.as_ref()).store(); } _ => (), } @@ -1125,30 +1044,33 @@ impl<'body, 'db> InferenceContext<'body, 'db> { }); diagnostics.shrink_to_fit(); for (_, subst) in method_resolutions.values_mut() { - *subst = table.resolve_completely(*subst); - *has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error()); + *subst = table.resolve_completely(subst.as_ref()).store(); + *has_errors = + *has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error()); } method_resolutions.shrink_to_fit(); for (_, subst) in assoc_resolutions.values_mut() { - *subst = table.resolve_completely(*subst); - *has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error()); + *subst = table.resolve_completely(subst.as_ref()).store(); + *has_errors = + *has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error()); } assoc_resolutions.shrink_to_fit(); for adjustment in expr_adjustments.values_mut().flatten() { - adjustment.target = table.resolve_completely(adjustment.target); - *has_errors = *has_errors || adjustment.target.references_non_lt_error(); + adjustment.target = table.resolve_completely(adjustment.target.as_ref()).store(); + *has_errors = *has_errors || adjustment.target.as_ref().references_non_lt_error(); } expr_adjustments.shrink_to_fit(); for adjustment in pat_adjustments.values_mut().flatten() { - *adjustment = table.resolve_completely(*adjustment); - *has_errors = *has_errors || adjustment.references_non_lt_error(); + *adjustment = table.resolve_completely(adjustment.as_ref()).store(); + *has_errors = *has_errors || adjustment.as_ref().references_non_lt_error(); } pat_adjustments.shrink_to_fit(); result.tuple_field_access_types = tuple_field_accesses_rev .into_iter() - .map(|subst| table.resolve_completely(subst)) + .map(|subst| table.resolve_completely(subst).store()) .inspect(|subst| { - *has_errors = *has_errors || subst.iter().any(|ty| ty.references_non_lt_error()); + *has_errors = + *has_errors || subst.as_ref().iter().any(|ty| ty.references_non_lt_error()); }) .collect(); result.tuple_field_access_types.shrink_to_fit(); @@ -1174,7 +1096,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { data.type_ref, &data.store, InferenceTyDiagnosticSource::Signature, - LifetimeElisionKind::Elided(self.types.re_static), + LifetimeElisionKind::Elided(self.types.regions.statik), ); self.return_ty = return_ty; @@ -1232,7 +1154,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ); self.process_user_written_ty(return_ty) } - None => self.types.unit, + None => self.types.types.unit, }; self.return_coercion = Some(CoerceMany::new(self.return_ty)); @@ -1262,10 +1184,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn write_expr_ty(&mut self, expr: ExprId, ty: Ty<'db>) { - self.result.type_of_expr.insert(expr, ty); + self.result.type_of_expr.insert(expr, ty.store()); } - pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) { + pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) { if adjustments.is_empty() { return; } @@ -1278,7 +1200,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { ) => { // NeverToAny coercion can target any type, so instead of adding a new // adjustment on top we can change the target. - *target = *new_target; + *target = new_target.clone(); } _ => { *entry.get_mut() = adjustments; @@ -1291,7 +1213,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } } - fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[Ty<'db>]>) { + fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[StoredTy]>) { if adjustments.is_empty() { return; } @@ -1304,7 +1226,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { func: FunctionId, subst: GenericArgs<'db>, ) { - self.result.method_resolutions.insert(expr, (func, subst)); + self.result.method_resolutions.insert(expr, (func, subst.store())); } fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { @@ -1317,22 +1239,22 @@ impl<'body, 'db> InferenceContext<'body, 'db> { item: CandidateId, subs: GenericArgs<'db>, ) { - self.result.assoc_resolutions.insert(id, (item, subs)); + self.result.assoc_resolutions.insert(id, (item, subs.store())); } fn write_pat_ty(&mut self, pat: PatId, ty: Ty<'db>) { - self.result.type_of_pat.insert(pat, ty); + self.result.type_of_pat.insert(pat, ty.store()); } fn write_type_placeholder_ty(&mut self, type_ref: TypeRefId, ty: Ty<'db>) { - self.result.type_of_type_placeholder.insert(type_ref, ty); + self.result.type_of_type_placeholder.insert(type_ref, ty.store()); } fn write_binding_ty(&mut self, id: BindingId, ty: Ty<'db>) { - self.result.type_of_binding.insert(id, ty); + self.result.type_of_binding.insert(id, ty.store()); } - pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) { + pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic) { self.diagnostics.push(diagnostic); } @@ -1429,7 +1351,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn err_ty(&self) -> Ty<'db> { - self.types.error + self.types.types.error } pub(crate) fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> { @@ -1486,7 +1408,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> { match ty.kind() { TyKind::Adt(adt_def, substs) => match adt_def.def_id().0 { AdtId::StructId(struct_id) => { - match self.db.field_types(struct_id.into()).values().next_back().copied() { + match self + .db + .field_types(struct_id.into()) + .values() + .next_back() + .map(|it| it.get()) + { Some(field) => { ty = field.instantiate(self.interner(), substs); } @@ -1547,7 +1475,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { self.result .type_mismatches .get_or_insert_default() - .insert(id, TypeMismatch { expected, actual }); + .insert(id, TypeMismatch { expected: expected.store(), actual: actual.store() }); } result } @@ -1588,11 +1516,11 @@ impl<'body, 'db> InferenceContext<'body, 'db> { if let Err(_err) = result { // FIXME: Emit diagnostic. } - result.unwrap_or(self.types.error) + result.unwrap_or(self.types.types.error) } fn expr_ty(&self, expr: ExprId) -> Ty<'db> { - self.result[expr] + self.result.expr_ty(expr) } fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> { @@ -1600,7 +1528,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { if let Some(it) = self.result.expr_adjustments.get(&e) && let Some(it) = it.last() { - ty = Some(it.target); + ty = Some(it.target.as_ref()); } ty.unwrap_or_else(|| self.expr_ty(e)) } @@ -1820,7 +1748,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { result } else { // FIXME diagnostic - (ctx.types.error, None) + (ctx.types.types.error, None) } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/autoderef.rs index b54a6cdee2d45..d748c89e67759 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/autoderef.rs @@ -25,7 +25,7 @@ impl<'db> InferenceTable<'db> { } impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> { - pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec>> { + pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec> { let steps = self.steps(); if steps.is_empty() { return InferOk { obligations: PredicateObligations::new(), value: vec![] }; @@ -42,7 +42,10 @@ impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> { } }) .zip(targets) - .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target }) + .map(|(autoderef, target)| Adjustment { + kind: Adjust::Deref(autoderef), + target: target.store(), + }) .collect(); InferOk { obligations: self.take_obligations(), value: steps } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs index 00a1dfff6d958..d073b06ccc8a5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs @@ -4,7 +4,7 @@ use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags}; use rustc_ast_ir::Mutability; use rustc_type_ir::{ Flags, InferTy, TypeFlags, UintTy, - inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _}, + inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, Ty as _}, }; use stdx::never; @@ -83,8 +83,13 @@ impl CastError { expr: ExprId, expr_ty: Ty<'db>, cast_ty: Ty<'db>, - ) -> InferenceDiagnostic<'db> { - InferenceDiagnostic::InvalidCast { expr, error: self, expr_ty, cast_ty } + ) -> InferenceDiagnostic { + InferenceDiagnostic::InvalidCast { + expr, + error: self, + expr_ty: expr_ty.store(), + cast_ty: cast_ty.store(), + } } } @@ -109,7 +114,7 @@ impl<'db> CastCheck<'db> { pub(super) fn check( &mut self, ctx: &mut InferenceContext<'_, 'db>, - ) -> Result<(), InferenceDiagnostic<'db>> { + ) -> Result<(), InferenceDiagnostic> { self.expr_ty = ctx.table.try_structurally_resolve_type(self.expr_ty); self.cast_ty = ctx.table.try_structurally_resolve_type(self.cast_ty); @@ -137,7 +142,7 @@ impl<'db> CastCheck<'db> { { return Err(InferenceDiagnostic::CastToUnsized { expr: self.expr, - cast_ty: self.cast_ty, + cast_ty: self.cast_ty.store(), }); } @@ -393,8 +398,9 @@ fn pointer_kind<'db>( let struct_data = id.fields(ctx.db); if let Some((last_field, _)) = struct_data.fields().iter().last() { - let last_field_ty = - ctx.db.field_types(id.into())[last_field].instantiate(ctx.interner(), subst); + let last_field_ty = ctx.db.field_types(id.into())[last_field] + .get() + .instantiate(ctx.interner(), subst); pointer_kind(last_field_ty, ctx) } else { Ok(Some(PointerKind::Thin)) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 14b0c9076c307..19ffa3a9398a4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -13,7 +13,7 @@ use rustc_type_ir::{ ClosureArgs, ClosureArgsParts, CoroutineArgs, CoroutineArgsParts, CoroutineClosureArgs, CoroutineClosureArgsParts, Interner, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, - inherent::{BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Ty as _}, + inherent::{BoundExistentialPredicates, GenericArgs as _, IntoKind, Ty as _}, }; use tracing::debug; @@ -22,9 +22,8 @@ use crate::{ db::{InternedClosure, InternedCoroutine}, infer::{BreakableKind, Diverges, coerce::CoerceMany}, next_solver::{ - AliasTy, Binder, BoundRegionKind, BoundVarKind, BoundVarKinds, ClauseKind, DbInterner, - ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig, PolyProjectionPredicate, Predicate, - PredicateKind, SolverDefId, Ty, TyKind, + AliasTy, Binder, ClauseKind, DbInterner, ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig, + PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind, abi::Safety, infer::{ BoundRegionConversionTime, InferOk, InferResult, @@ -73,16 +72,17 @@ impl<'db> InferenceContext<'_, 'db> { let parent_args = GenericArgs::identity_for_item(interner, self.generic_def.into()); // FIXME: Make this an infer var and infer it later. - let tupled_upvars_ty = self.types.unit; + let tupled_upvars_ty = self.types.types.unit; let (id, ty, resume_yield_tys) = match closure_kind { ClosureKind::Coroutine(_) => { let yield_ty = self.table.next_ty_var(); - let resume_ty = liberated_sig.inputs().get(0).unwrap_or(self.types.unit); + let resume_ty = + liberated_sig.inputs().first().copied().unwrap_or(self.types.types.unit); // FIXME: Infer the upvars later. let parts = CoroutineArgsParts { - parent_args, - kind_ty: self.types.unit, + parent_args: parent_args.as_slice(), + kind_ty: self.types.types.unit, resume_ty, yield_ty, return_ty: body_ret_ty, @@ -119,7 +119,7 @@ impl<'db> InferenceContext<'_, 'db> { }; // FIXME: Infer the kind later if needed. let parts = ClosureArgsParts { - parent_args, + parent_args: parent_args.as_slice(), closure_kind_ty: Ty::from_closure_kind( interner, expected_kind.unwrap_or(rustc_type_ir::ClosureKind::Fn), @@ -140,9 +140,9 @@ impl<'db> InferenceContext<'_, 'db> { // async closures always return the type ascribed after the `->` (if present), // and yield `()`. let bound_return_ty = bound_sig.skip_binder().output(); - let bound_yield_ty = self.types.unit; + let bound_yield_ty = self.types.types.unit; // rustc uses a special lang item type for the resume ty. I don't believe this can cause us problems. - let resume_ty = self.types.unit; + let resume_ty = self.types.types.unit; // FIXME: Infer the kind later if needed. let closure_kind_ty = Ty::from_closure_kind( @@ -155,26 +155,26 @@ impl<'db> InferenceContext<'_, 'db> { let coroutine_captures_by_ref_ty = Ty::new_fn_ptr( interner, Binder::bind_with_vars( - interner.mk_fn_sig([], self.types.unit, false, Safety::Safe, FnAbi::Rust), - BoundVarKinds::new_from_iter( - interner, - [BoundVarKind::Region(BoundRegionKind::ClosureEnv)], + interner.mk_fn_sig( + [], + self.types.types.unit, + false, + Safety::Safe, + FnAbi::Rust, ), + self.types.coroutine_captures_by_ref_bound_var_kinds, ), ); let closure_args = CoroutineClosureArgs::new( interner, CoroutineClosureArgsParts { - parent_args, + parent_args: parent_args.as_slice(), closure_kind_ty, signature_parts_ty: Ty::new_fn_ptr( interner, bound_sig.map_bound(|sig| { interner.mk_fn_sig( - [ - resume_ty, - Ty::new_tup_from_iter(interner, sig.inputs().iter()), - ], + [resume_ty, Ty::new_tup(interner, sig.inputs())], Ty::new_tup(interner, &[bound_yield_ty, bound_return_ty]), sig.c_variadic, sig.safety, @@ -195,7 +195,7 @@ impl<'db> InferenceContext<'_, 'db> { // Now go through the argument patterns for (arg_pat, arg_ty) in args.iter().zip(bound_sig.skip_binder().inputs()) { - self.infer_top_pat(*arg_pat, arg_ty, None); + self.infer_top_pat(*arg_pat, *arg_ty, None); } // FIXME: lift these out into a struct @@ -668,7 +668,7 @@ impl<'db> InferenceContext<'_, 'db> { assert!(!expected_sig.skip_binder().has_vars_bound_above(rustc_type_ir::INNERMOST)); let bound_sig = expected_sig.map_bound(|sig| { self.interner().mk_fn_sig( - sig.inputs(), + sig.inputs().iter().copied(), sig.output(), sig.c_variadic, Safety::Safe, @@ -744,9 +744,10 @@ impl<'db> InferenceContext<'_, 'db> { // The liberated version of this signature should be a subtype // of the liberated form of the expectation. - for (supplied_ty, expected_ty) in - iter::zip(supplied_sig.inputs(), expected_sigs.liberated_sig.inputs()) - { + for (supplied_ty, expected_ty) in iter::zip( + supplied_sig.inputs().iter().copied(), + expected_sigs.liberated_sig.inputs().iter().copied(), + ) { // Check that E' = S'. let cause = ObligationCause::new(); let InferOk { value: (), obligations } = @@ -765,7 +766,8 @@ impl<'db> InferenceContext<'_, 'db> { let inputs = supplied_sig .inputs() - .into_iter() + .iter() + .copied() .map(|ty| table.infer_ctxt.resolve_vars_if_possible(ty)); expected_sigs.liberated_sig = table.interner().mk_fn_sig( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs index 308c01865ae37..5b0360071d9da 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure/analysis.rs @@ -15,7 +15,7 @@ use hir_def::{ }; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; -use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{IntoKind, Ty as _}; use smallvec::{SmallVec, smallvec}; use stdx::{format_to, never}; use syntax::utils::is_raw_identifier; @@ -25,21 +25,21 @@ use crate::{ db::{HirDatabase, InternedClosure, InternedClosureId}, infer::InferenceContext, mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem}, - next_solver::{DbInterner, EarlyBinder, GenericArgs, Ty, TyKind}, + next_solver::{DbInterner, GenericArgs, StoredEarlyBinder, StoredTy, Ty, TyKind}, traits::FnTrait, }; // The below functions handle capture and closure kind (Fn, FnMut, ..) #[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] -pub(crate) struct HirPlace<'db> { +pub(crate) struct HirPlace { pub(crate) local: BindingId, - pub(crate) projections: Vec>, + pub(crate) projections: Vec>, } -impl<'db> HirPlace<'db> { - fn ty(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> { - let mut ty = ctx.table.resolve_completely(ctx.result[self.local]); +impl HirPlace { + fn ty<'db>(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> { + let mut ty = ctx.table.resolve_completely(ctx.result.binding_ty(self.local)); for p in &self.projections { ty = p.projected_ty( &ctx.table.infer_ctxt, @@ -78,8 +78,8 @@ pub enum CaptureKind { } #[derive(Debug, Clone, PartialEq, Eq, salsa::Update)] -pub struct CapturedItem<'db> { - pub(crate) place: HirPlace<'db>, +pub struct CapturedItem { + pub(crate) place: HirPlace, pub(crate) kind: CaptureKind, /// The inner vec is the stacks; the outer vec is for each capture reference. /// @@ -88,11 +88,10 @@ pub struct CapturedItem<'db> { /// copy all captures of the inner closure to the outer closure, and then we may /// truncate them, and we want the correct span to be reported. span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, - #[update(unsafe(with(crate::utils::unsafe_update_eq)))] - pub(crate) ty: EarlyBinder<'db, Ty<'db>>, + pub(crate) ty: StoredEarlyBinder, } -impl<'db> CapturedItem<'db> { +impl CapturedItem { pub fn local(&self) -> BindingId { self.place.local } @@ -102,9 +101,9 @@ impl<'db> CapturedItem<'db> { self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref)) } - pub fn ty(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> { + pub fn ty<'db>(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> { let interner = DbInterner::new_no_crate(db); - self.ty.instantiate(interner, subst.split_closure_args_untupled().parent_args) + self.ty.get().instantiate(interner, subst.split_closure_args_untupled().parent_args) } pub fn kind(&self) -> CaptureKind { @@ -273,15 +272,15 @@ impl<'db> CapturedItem<'db> { } #[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct CapturedItemWithoutTy<'db> { - pub(crate) place: HirPlace<'db>, +pub(crate) struct CapturedItemWithoutTy { + pub(crate) place: HirPlace, pub(crate) kind: CaptureKind, /// The inner vec is the stacks; the outer vec is for each capture reference. pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, } -impl<'db> CapturedItemWithoutTy<'db> { - fn with_ty(self, ctx: &mut InferenceContext<'_, 'db>) -> CapturedItem<'db> { +impl CapturedItemWithoutTy { + fn with_ty(self, ctx: &mut InferenceContext<'_, '_>) -> CapturedItem { let ty = self.place.ty(ctx); let ty = match &self.kind { CaptureKind::ByValue => ty, @@ -290,20 +289,20 @@ impl<'db> CapturedItemWithoutTy<'db> { BorrowKind::Mut { .. } => Mutability::Mut, _ => Mutability::Not, }; - Ty::new_ref(ctx.interner(), ctx.types.re_error, ty, m) + Ty::new_ref(ctx.interner(), ctx.types.regions.error, ty, m) } }; CapturedItem { place: self.place, kind: self.kind, span_stacks: self.span_stacks, - ty: EarlyBinder::bind(ty), + ty: StoredEarlyBinder::bind(ty.store()), } } } impl<'db> InferenceContext<'_, 'db> { - fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option> { + fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option { let r = self.place_of_expr_without_adjust(tgt_expr)?; let adjustments = self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default(); @@ -311,7 +310,7 @@ impl<'db> InferenceContext<'_, 'db> { } /// Pushes the span into `current_capture_span_stack`, *without clearing it first*. - fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option> { + fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option { if path.type_anchor().is_some() { return None; } @@ -332,7 +331,7 @@ impl<'db> InferenceContext<'_, 'db> { } /// Changes `current_capture_span_stack` to contain the stack of spans for this expr. - fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option> { + fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option { self.current_capture_span_stack.clear(); match &self.body[tgt_expr] { Expr::Path(p) => { @@ -367,7 +366,7 @@ impl<'db> InferenceContext<'_, 'db> { None } - fn push_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) { + fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) { self.current_captures.push(CapturedItemWithoutTy { place, kind, @@ -375,11 +374,7 @@ impl<'db> InferenceContext<'_, 'db> { }); } - fn truncate_capture_spans( - &self, - capture: &mut CapturedItemWithoutTy<'db>, - mut truncate_to: usize, - ) { + fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) { // The first span is the identifier, and it must always remain. truncate_to += 1; for span_stack in &mut capture.span_stacks { @@ -404,14 +399,14 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn ref_expr(&mut self, expr: ExprId, place: Option>) { + fn ref_expr(&mut self, expr: ExprId, place: Option) { if let Some(place) = place { self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared)); } self.walk_expr(expr); } - fn add_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) { + fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) { if self.is_upvar(&place) { self.push_capture(place, kind); } @@ -427,7 +422,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn mutate_expr(&mut self, expr: ExprId, place: Option>) { + fn mutate_expr(&mut self, expr: ExprId, place: Option) { if let Some(place) = place { self.add_capture( place, @@ -444,7 +439,7 @@ impl<'db> InferenceContext<'_, 'db> { self.walk_expr(expr); } - fn consume_place(&mut self, place: HirPlace<'db>) { + fn consume_place(&mut self, place: HirPlace) { if self.is_upvar(&place) { let ty = place.ty(self); let kind = if self.is_ty_copy(ty) { @@ -456,7 +451,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment<'db>]) { + fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) { if let Some((last, rest)) = adjustment.split_last() { match &last.kind { Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => { @@ -477,12 +472,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn ref_capture_with_adjusts( - &mut self, - m: Mutability, - tgt_expr: ExprId, - rest: &[Adjustment<'db>], - ) { + fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) { let capture_kind = match m { Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), @@ -780,7 +770,7 @@ impl<'db> InferenceContext<'_, 'db> { } Pat::Bind { id, .. } => match self.result.binding_modes[p] { crate::BindingMode::Move => { - if self.is_ty_copy(self.result.type_of_binding[*id]) { + if self.is_ty_copy(self.result.binding_ty(*id)) { update_result(CaptureKind::ByRef(BorrowKind::Shared)); } else { update_result(CaptureKind::ByValue); @@ -798,7 +788,7 @@ impl<'db> InferenceContext<'_, 'db> { self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut)); } - fn is_upvar(&self, place: &HirPlace<'db>) -> bool { + fn is_upvar(&self, place: &HirPlace) -> bool { if let Some(c) = self.current_closure { let InternedClosure(_, root) = self.db.lookup_intern_closure(c); return self.body.is_binding_upvar(place.local, root); @@ -830,7 +820,7 @@ impl<'db> InferenceContext<'_, 'db> { // FIXME: Borrow checker problems without this. let mut current_captures = std::mem::take(&mut self.current_captures); for capture in &mut current_captures { - let mut ty = self.table.resolve_completely(self.result[capture.place.local]); + let mut ty = self.table.resolve_completely(self.result.binding_ty(capture.place.local)); if ty.is_raw_ptr() || ty.is_union() { capture.kind = CaptureKind::ByRef(BorrowKind::Shared); self.truncate_capture_spans(capture, 0); @@ -875,7 +865,7 @@ impl<'db> InferenceContext<'_, 'db> { fn minimize_captures(&mut self) { self.current_captures.sort_unstable_by_key(|it| it.place.projections.len()); - let mut hash_map = FxHashMap::, usize>::default(); + let mut hash_map = FxHashMap::::default(); let result = mem::take(&mut self.current_captures); for mut item in result { let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] }; @@ -910,7 +900,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - fn consume_with_pat(&mut self, mut place: HirPlace<'db>, tgt_pat: PatId) { + fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) { let adjustments_count = self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default(); place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref)); @@ -921,7 +911,7 @@ impl<'db> InferenceContext<'_, 'db> { Pat::Missing | Pat::Wild => (), Pat::Tuple { args, ellipsis } => { let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); - let field_count = match self.result[tgt_pat].kind() { + let field_count = match self.result.pat_ty(tgt_pat).kind() { TyKind::Tuple(s) => s.len(), _ => break 'reset_span_stack, }; @@ -1221,11 +1211,11 @@ impl<'db> InferenceContext<'_, 'db> { } /// Call this only when the last span in the stack isn't a split. -fn apply_adjusts_to_place<'db>( +fn apply_adjusts_to_place( current_capture_span_stack: &mut Vec, - mut r: HirPlace<'db>, - adjustments: &[Adjustment<'db>], -) -> Option> { + mut r: HirPlace, + adjustments: &[Adjustment], +) -> Option { let span = *current_capture_span_stack.last().expect("empty capture span stack"); for adj in adjustments { match &adj.kind { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 77c7155550da2..bb9cb1c1ca098 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -104,7 +104,7 @@ struct Coerce { cause: ObligationCause, } -type CoerceResult<'db> = InferResult<'db, (Vec>, Ty<'db>)>; +type CoerceResult<'db> = InferResult<'db, (Vec, Ty<'db>)>; /// Coercing a mutable reference to an immutable works, while /// coercing `&T` to `&mut T` should be forbidden. @@ -114,7 +114,7 @@ fn coerce_mutbls<'db>(from_mutbl: Mutability, to_mutbl: Mutability) -> RelateRes /// This always returns `Ok(...)`. fn success<'db>( - adj: Vec>, + adj: Vec, target: Ty<'db>, obligations: PredicateObligations<'db>, ) -> CoerceResult<'db> { @@ -206,14 +206,17 @@ where &mut self, a: Ty<'db>, b: Ty<'db>, - adjustments: impl IntoIterator>, + adjustments: impl IntoIterator, final_adjustment: Adjust, ) -> CoerceResult<'db> { self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| { success( adjustments .into_iter() - .chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment })) + .chain(std::iter::once(Adjustment { + target: ty.store(), + kind: final_adjustment, + })) .collect(), ty, obligations, @@ -237,7 +240,7 @@ where if self.coerce_never { return success( - vec![Adjustment { kind: Adjust::NeverToAny, target: b }], + vec![Adjustment { kind: Adjust::NeverToAny, target: b.store() }], b, PredicateObligations::new(), ); @@ -532,7 +535,8 @@ where // Now apply the autoref. let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase); - adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty }); + adjustments + .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty.store() }); debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments); @@ -635,10 +639,10 @@ where let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No); Some(( - Adjustment { kind: Adjust::Deref(None), target: ty_a }, + Adjustment { kind: Adjust::Deref(None), target: ty_a.store() }, Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), - target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b), + target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b).store(), }, )) } @@ -646,16 +650,16 @@ where coerce_mutbls(mt_a, mt_b)?; Some(( - Adjustment { kind: Adjust::Deref(None), target: ty_a }, + Adjustment { kind: Adjust::Deref(None), target: ty_a.store() }, Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)), - target: Ty::new_ptr(self.interner(), ty_a, mt_b), + target: Ty::new_ptr(self.interner(), ty_a, mt_b).store(), }, )) } _ => None, }; - let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target); + let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target.as_ref()); // Setup either a subtyping or a LUB relationship between // the `CoerceUnsized` target type and the expected type. @@ -726,7 +730,7 @@ where Ok(None) => { if trait_pred.def_id().0 == unsize_did { let self_ty = trait_pred.self_ty(); - let unsize_ty = trait_pred.trait_ref.args.inner()[1].expect_ty(); + let unsize_ty = trait_pred.trait_ref.args[1].expect_ty(); debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred); match (self_ty.kind(), unsize_ty.kind()) { (TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..)) @@ -815,7 +819,7 @@ where b, adjustment.map(|kind| Adjustment { kind, - target: Ty::new_fn_ptr(this.interner(), fn_ty_a), + target: Ty::new_fn_ptr(this.interner(), fn_ty_a).store(), }), Adjust::Pointer(PointerCast::UnsafeFnPointer), ) @@ -955,7 +959,7 @@ where self.unify_and( a_raw, b, - [Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }], + [Adjustment { kind: Adjust::Deref(None), target: mt_a.ty.store() }], Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), ) } else if mt_a.mutbl != mutbl_b { @@ -1170,12 +1174,15 @@ impl<'db> InferenceContext<'_, 'db> { for &expr in exprs { self.write_expr_adj( expr, - Box::new([Adjustment { kind: prev_adjustment.clone(), target: fn_ptr }]), + Box::new([Adjustment { + kind: prev_adjustment.clone(), + target: fn_ptr.store(), + }]), ); } self.write_expr_adj( new, - Box::new([Adjustment { kind: next_adjustment, target: fn_ptr }]), + Box::new([Adjustment { kind: next_adjustment, target: fn_ptr.store() }]), ); return Ok(fn_ptr); } @@ -1390,7 +1397,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> { icx, cause, expr, - icx.types.unit, + icx.types.types.unit, true, label_unit_as_expected, expr_is_read, @@ -1505,14 +1512,14 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> { // emit or provide suggestions on how to fix the initial error. icx.set_tainted_by_errors(); - self.final_ty = Some(icx.types.error); + self.final_ty = Some(icx.types.types.error); icx.result.type_mismatches.get_or_insert_default().insert( expression.into(), if label_expression_as_expected { - TypeMismatch { expected: found, actual: expected } + TypeMismatch { expected: found.store(), actual: expected.store() } } else { - TypeMismatch { expected, actual: found } + TypeMismatch { expected: expected.store(), actual: found.store() } }, ); } @@ -1528,7 +1535,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> { // If we only had inputs that were of type `!` (or no // inputs at all), then the final type is `!`. assert_eq!(self.pushed, 0); - icx.types.never + icx.types.types.never } } } @@ -1570,7 +1577,7 @@ fn coerce<'db>( db: &'db dyn HirDatabase, env: ParamEnvAndCrate<'db>, tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>, -) -> Result<(Vec>, Ty<'db>), TypeError>> { +) -> Result<(Vec, Ty<'db>), TypeError>> { let interner = DbInterner::new_with(db, env.krate); let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys); @@ -1593,7 +1600,6 @@ fn coerce<'db>( let mut ocx = ObligationCtxt::new(&infcx); let (adjustments, ty) = ocx.register_infer_ok_obligations(infer_ok); _ = ocx.try_evaluate_obligations(); - let (adjustments, ty) = infcx.resolve_vars_if_possible((adjustments, ty)); // default any type vars that weren't unified back to their original bound vars // (kind of hacky) @@ -1701,10 +1707,18 @@ fn coerce<'db>( } // FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferCtxt`. - let (adjustments, ty) = (adjustments, ty).fold_with(&mut Resolver { - interner, - debruijn: DebruijnIndex::ZERO, - var_values: vars.var_values, - }); + let mut resolver = + Resolver { interner, debruijn: DebruijnIndex::ZERO, var_values: vars.var_values }; + let ty = infcx.resolve_vars_if_possible(ty).fold_with(&mut resolver); + let adjustments = adjustments + .into_iter() + .map(|adjustment| Adjustment { + kind: adjustment.kind, + target: infcx + .resolve_vars_if_possible(adjustment.target.as_ref()) + .fold_with(&mut resolver) + .store(), + }) + .collect(); Ok((adjustments, ty)) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs index 0eb7a2f4740ff..2bdc6f9491dcb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs @@ -25,10 +25,10 @@ use crate::{ // to our resolver and so we cannot have mutable reference, but we really want to have // ability to dispatch diagnostics during this work otherwise the code becomes a complete mess. #[derive(Debug, Default, Clone)] -pub(super) struct Diagnostics<'db>(RefCell>>); +pub(super) struct Diagnostics(RefCell>); -impl<'db> Diagnostics<'db> { - pub(super) fn push(&self, diagnostic: InferenceDiagnostic<'db>) { +impl Diagnostics { + pub(super) fn push(&self, diagnostic: InferenceDiagnostic) { self.0.borrow_mut().push(diagnostic); } @@ -42,19 +42,19 @@ impl<'db> Diagnostics<'db> { ); } - pub(super) fn finish(self) -> ThinVec> { + pub(super) fn finish(self) -> ThinVec { self.0.into_inner() } } -pub(crate) struct PathDiagnosticCallbackData<'a, 'db> { +pub(crate) struct PathDiagnosticCallbackData<'a> { node: ExprOrPatId, - diagnostics: &'a Diagnostics<'db>, + diagnostics: &'a Diagnostics, } pub(super) struct InferenceTyLoweringContext<'db, 'a> { ctx: TyLoweringContext<'db, 'a>, - diagnostics: &'a Diagnostics<'db>, + diagnostics: &'a Diagnostics, source: InferenceTyDiagnosticSource, } @@ -64,7 +64,7 @@ impl<'db, 'a> InferenceTyLoweringContext<'db, 'a> { db: &'db dyn HirDatabase, resolver: &'a Resolver<'db>, store: &'a ExpressionStore, - diagnostics: &'a Diagnostics<'db>, + diagnostics: &'a Diagnostics, source: InferenceTyDiagnosticSource, generic_def: GenericDefId, lifetime_elision: LifetimeElisionKind<'db>, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 4e1711e48ec43..226e9f5cd6674 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -17,7 +17,7 @@ use hir_expand::name::Name; use rustc_ast_ir::Mutability; use rustc_type_ir::{ CoroutineArgs, CoroutineArgsParts, InferTy, Interner, - inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _}, + inherent::{AdtDef, GenericArgs as _, IntoKind, Ty as _}, }; use syntax::ast::RangeOp; use tracing::debug; @@ -35,7 +35,7 @@ use crate::{ lower::{GenericPredicates, lower_mutability}, method_resolution::{self, CandidateId, MethodCallee, MethodError}, next_solver::{ - ErrorGuaranteed, FnSig, GenericArgs, TraitRef, Ty, TyKind, TypeError, + ErrorGuaranteed, FnSig, GenericArg, GenericArgs, TraitRef, Ty, TyKind, TypeError, infer::{ BoundRegionConversionTime, InferOk, traits::{Obligation, ObligationCause}, @@ -68,10 +68,10 @@ impl<'db> InferenceContext<'_, 'db> { if let Some(expected_ty) = expected.only_has_type(&mut self.table) { let could_unify = self.unify(ty, expected_ty); if !could_unify { - self.result - .type_mismatches - .get_or_insert_default() - .insert(tgt_expr.into(), TypeMismatch { expected: expected_ty, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + tgt_expr.into(), + TypeMismatch { expected: expected_ty.store(), actual: ty.store() }, + ); } } ty @@ -98,10 +98,10 @@ impl<'db> InferenceContext<'_, 'db> { match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, is_read) { Ok(res) => res, Err(_) => { - self.result - .type_mismatches - .get_or_insert_default() - .insert(expr.into(), TypeMismatch { expected: target, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + expr.into(), + TypeMismatch { expected: target.store(), actual: ty.store() }, + ); target } } @@ -276,7 +276,7 @@ impl<'db> InferenceContext<'_, 'db> { if ty.is_never() { if let Some(adjustments) = self.result.expr_adjustments.get(&expr) { return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &**adjustments { - *target + target.as_ref() } else { self.err_ty() }; @@ -292,10 +292,10 @@ impl<'db> InferenceContext<'_, 'db> { if let Some(expected_ty) = expected.only_has_type(&mut self.table) { let could_unify = self.unify(ty, expected_ty); if !could_unify { - self.result - .type_mismatches - .get_or_insert_default() - .insert(expr.into(), TypeMismatch { expected: expected_ty, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + expr.into(), + TypeMismatch { expected: expected_ty.store(), actual: ty.store() }, + ); } } ty @@ -319,7 +319,7 @@ impl<'db> InferenceContext<'_, 'db> { let expected = &expected.adjust_for_branches(&mut self.table); self.infer_expr_coerce_never( condition, - &Expectation::HasType(self.types.bool), + &Expectation::HasType(self.types.types.bool), ExprIsRead::Yes, ); @@ -375,7 +375,7 @@ impl<'db> InferenceContext<'_, 'db> { input_ty, Some(DeclContext { origin: DeclOrigin::LetExpr }), ); - self.types.bool + self.types.types.bool } Expr::Block { statements, tail, label, id: _ } => { self.infer_block(tgt_expr, statements, *tail, *label, expected) @@ -400,7 +400,7 @@ impl<'db> InferenceContext<'_, 'db> { self.with_breakable_ctx(BreakableKind::Loop, Some(ty), label, |this| { this.infer_expr( body, - &Expectation::HasType(this.types.unit), + &Expectation::HasType(this.types.types.unit), ExprIsRead::Yes, ); }); @@ -410,7 +410,7 @@ impl<'db> InferenceContext<'_, 'db> { self.diverges = Diverges::Maybe; breaks } - None => self.types.never, + None => self.types.types.never, } } Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => self @@ -451,7 +451,7 @@ impl<'db> InferenceContext<'_, 'db> { if arms.is_empty() { self.diverges = Diverges::Always; - self.types.never + self.types.types.never } else { let matchee_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let mut all_arms_diverge = Diverges::Always; @@ -463,7 +463,7 @@ impl<'db> InferenceContext<'_, 'db> { let result_ty = match &expected { // We don't coerce to `()` so that if the match expression is a // statement it's branches can have any consistent type. - Expectation::HasType(ty) if *ty != self.types.unit => *ty, + Expectation::HasType(ty) if *ty != self.types.types.unit => *ty, _ => self.table.next_ty_var(), }; let mut coerce = CoerceMany::new(result_ty); @@ -473,7 +473,7 @@ impl<'db> InferenceContext<'_, 'db> { self.diverges = Diverges::Maybe; self.infer_expr_coerce_never( guard_expr, - &Expectation::HasType(self.types.bool), + &Expectation::HasType(self.types.types.bool), ExprIsRead::Yes, ); } @@ -504,7 +504,7 @@ impl<'db> InferenceContext<'_, 'db> { bad_value_break: false, }); }; - self.types.never + self.types.types.never } &Expr::Break { expr, label } => { let val_ty = if let Some(expr) = expr { @@ -528,7 +528,7 @@ impl<'db> InferenceContext<'_, 'db> { ExprIsRead::Yes, ) } else { - self.types.unit + self.types.types.unit }; match find_breakable(&mut self.breakables, label) { @@ -558,7 +558,7 @@ impl<'db> InferenceContext<'_, 'db> { }); } } - self.types.never + self.types.types.never } &Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr), &Expr::Become { expr } => self.infer_expr_become(expr), @@ -571,7 +571,7 @@ impl<'db> InferenceContext<'_, 'db> { ExprIsRead::Yes, ); } else { - let unit = self.types.unit; + let unit = self.types.types.unit; let _ = self.coerce( tgt_expr.into(), unit, @@ -583,14 +583,14 @@ impl<'db> InferenceContext<'_, 'db> { resume_ty } else { // FIXME: report error (yield expr in non-coroutine) - self.types.error + self.types.types.error } } Expr::Yeet { expr } => { if let &Some(expr) = expr { self.infer_expr_no_expect(expr, ExprIsRead::Yes); } - self.types.never + self.types.types.never } Expr::RecordLit { path, fields, spread, .. } => { let (ty, def_id) = self.resolve_variant(tgt_expr.into(), path.as_deref(), false); @@ -599,7 +599,7 @@ impl<'db> InferenceContext<'_, 'db> { self.unify(ty, t); } - let substs = ty.as_adt().map(|(_, s)| s).unwrap_or(self.types.empty_args); + let substs = ty.as_adt().map(|(_, s)| s).unwrap_or(self.types.empty.generic_args); if let Some(variant) = def_id { self.write_variant_resolution(tgt_expr.into(), variant); } @@ -637,7 +637,7 @@ impl<'db> InferenceContext<'_, 'db> { } }; let field_ty = field_def.map_or(self.err_ty(), |it| { - field_types[it].instantiate(self.interner(), &substs) + field_types[it].get().instantiate(self.interner(), &substs) }); // Field type might have some unknown types @@ -768,7 +768,7 @@ impl<'db> InferenceContext<'_, 'db> { // assignments into blocks. self.table.new_maybe_never_var() } else { - self.types.unit + self.types.types.unit } } Expr::Range { lhs, rhs, range_type } => { @@ -780,12 +780,14 @@ impl<'db> InferenceContext<'_, 'db> { Ty::new_adt( self.interner(), adt, - GenericArgs::new_from_iter(self.interner(), [ty.into()]), + GenericArgs::new_from_slice(&[GenericArg::from(ty)]), ) }; match (range_type, lhs_ty, rhs_ty) { (RangeOp::Exclusive, None, None) => match self.resolve_range_full() { - Some(adt) => Ty::new_adt(self.interner(), adt, self.types.empty_args), + Some(adt) => { + Ty::new_adt(self.interner(), adt, self.types.empty.generic_args) + } None => self.err_ty(), }, (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() { @@ -834,7 +836,7 @@ impl<'db> InferenceContext<'_, 'db> { trait_element_ty } // FIXME: Report an error. - None => self.types.error, + None => self.types.types.error, } } Expr::Tuple { exprs, .. } => { @@ -859,10 +861,10 @@ impl<'db> InferenceContext<'_, 'db> { } Expr::Array(array) => self.infer_expr_array(array, expected), Expr::Literal(lit) => match lit { - Literal::Bool(..) => self.types.bool, - Literal::String(..) => self.types.static_str_ref, + Literal::Bool(..) => self.types.types.bool, + Literal::String(..) => self.types.types.static_str_ref, Literal::ByteString(bs) => { - let byte_type = self.types.u8; + let byte_type = self.types.types.u8; let len = consteval::usize_const( self.db, @@ -871,35 +873,46 @@ impl<'db> InferenceContext<'_, 'db> { ); let array_type = Ty::new_array_with_const_len(self.interner(), byte_type, len); - Ty::new_ref(self.interner(), self.types.re_static, array_type, Mutability::Not) + Ty::new_ref( + self.interner(), + self.types.regions.statik, + array_type, + Mutability::Not, + ) } Literal::CString(..) => Ty::new_ref( self.interner(), - self.types.re_static, + self.types.regions.statik, self.lang_items.CStr.map_or_else( || self.err_ty(), - |strukt| Ty::new_adt(self.interner(), strukt.into(), self.types.empty_args), + |strukt| { + Ty::new_adt( + self.interner(), + strukt.into(), + self.types.empty.generic_args, + ) + }, ), Mutability::Not, ), - Literal::Char(..) => self.types.char, + Literal::Char(..) => self.types.types.char, Literal::Int(_v, ty) => match ty { Some(int_ty) => match int_ty { - hir_def::builtin_type::BuiltinInt::Isize => self.types.isize, - hir_def::builtin_type::BuiltinInt::I8 => self.types.i8, - hir_def::builtin_type::BuiltinInt::I16 => self.types.i16, - hir_def::builtin_type::BuiltinInt::I32 => self.types.i32, - hir_def::builtin_type::BuiltinInt::I64 => self.types.i64, - hir_def::builtin_type::BuiltinInt::I128 => self.types.i128, + hir_def::builtin_type::BuiltinInt::Isize => self.types.types.isize, + hir_def::builtin_type::BuiltinInt::I8 => self.types.types.i8, + hir_def::builtin_type::BuiltinInt::I16 => self.types.types.i16, + hir_def::builtin_type::BuiltinInt::I32 => self.types.types.i32, + hir_def::builtin_type::BuiltinInt::I64 => self.types.types.i64, + hir_def::builtin_type::BuiltinInt::I128 => self.types.types.i128, }, None => { let expected_ty = expected.to_option(&mut self.table); tracing::debug!(?expected_ty); let opt_ty = match expected_ty.as_ref().map(|it| it.kind()) { Some(TyKind::Int(_) | TyKind::Uint(_)) => expected_ty, - Some(TyKind::Char) => Some(self.types.u8), + Some(TyKind::Char) => Some(self.types.types.u8), Some(TyKind::RawPtr(..) | TyKind::FnDef(..) | TyKind::FnPtr(..)) => { - Some(self.types.usize) + Some(self.types.types.usize) } _ => None, }; @@ -908,20 +921,20 @@ impl<'db> InferenceContext<'_, 'db> { }, Literal::Uint(_v, ty) => match ty { Some(int_ty) => match int_ty { - hir_def::builtin_type::BuiltinUint::Usize => self.types.usize, - hir_def::builtin_type::BuiltinUint::U8 => self.types.u8, - hir_def::builtin_type::BuiltinUint::U16 => self.types.u16, - hir_def::builtin_type::BuiltinUint::U32 => self.types.u32, - hir_def::builtin_type::BuiltinUint::U64 => self.types.u64, - hir_def::builtin_type::BuiltinUint::U128 => self.types.u128, + hir_def::builtin_type::BuiltinUint::Usize => self.types.types.usize, + hir_def::builtin_type::BuiltinUint::U8 => self.types.types.u8, + hir_def::builtin_type::BuiltinUint::U16 => self.types.types.u16, + hir_def::builtin_type::BuiltinUint::U32 => self.types.types.u32, + hir_def::builtin_type::BuiltinUint::U64 => self.types.types.u64, + hir_def::builtin_type::BuiltinUint::U128 => self.types.types.u128, }, None => { let expected_ty = expected.to_option(&mut self.table); let opt_ty = match expected_ty.as_ref().map(|it| it.kind()) { Some(TyKind::Int(_) | TyKind::Uint(_)) => expected_ty, - Some(TyKind::Char) => Some(self.types.u8), + Some(TyKind::Char) => Some(self.types.types.u8), Some(TyKind::RawPtr(..) | TyKind::FnDef(..) | TyKind::FnPtr(..)) => { - Some(self.types.usize) + Some(self.types.types.usize) } _ => None, }; @@ -930,10 +943,10 @@ impl<'db> InferenceContext<'_, 'db> { }, Literal::Float(_v, ty) => match ty { Some(float_ty) => match float_ty { - hir_def::builtin_type::BuiltinFloat::F16 => self.types.f16, - hir_def::builtin_type::BuiltinFloat::F32 => self.types.f32, - hir_def::builtin_type::BuiltinFloat::F64 => self.types.f64, - hir_def::builtin_type::BuiltinFloat::F128 => self.types.f128, + hir_def::builtin_type::BuiltinFloat::F16 => self.types.types.f16, + hir_def::builtin_type::BuiltinFloat::F32 => self.types.types.f32, + hir_def::builtin_type::BuiltinFloat::F64 => self.types.types.f64, + hir_def::builtin_type::BuiltinFloat::F128 => self.types.types.f128, }, None => { let opt_ty = expected @@ -947,10 +960,13 @@ impl<'db> InferenceContext<'_, 'db> { // Underscore expression is an error, we render a specialized diagnostic // to let the user know what type is expected though. let expected = expected.to_option(&mut self.table).unwrap_or_else(|| self.err_ty()); - self.push_diagnostic(InferenceDiagnostic::TypedHole { expr: tgt_expr, expected }); + self.push_diagnostic(InferenceDiagnostic::TypedHole { + expr: tgt_expr, + expected: expected.store(), + }); expected } - Expr::OffsetOf(_) => self.types.usize, + Expr::OffsetOf(_) => self.types.types.usize, Expr::InlineAsm(asm) => { let check_expr_asm_operand = |this: &mut Self, expr, is_input: bool| { let ty = this.infer_expr_no_expect(expr, ExprIsRead::Yes); @@ -1011,7 +1027,7 @@ impl<'db> InferenceContext<'_, 'db> { AsmOperand::Label(expr) => { self.infer_expr( expr, - &Expectation::HasType(self.types.unit), + &Expectation::HasType(self.types.types.unit), ExprIsRead::No, ); } @@ -1021,7 +1037,7 @@ impl<'db> InferenceContext<'_, 'db> { // FIXME: `sym` should report for things that are not functions or statics. AsmOperand::Sym(_) => (), }); - if diverge { self.types.never } else { self.types.unit } + if diverge { self.types.types.never } else { self.types.types.unit } } }; // use a new type variable if we got unknown here @@ -1143,7 +1159,7 @@ impl<'db> InferenceContext<'_, 'db> { oprnd_t = ty; } else { // FIXME: Report an error. - oprnd_t = self.types.error; + oprnd_t = self.types.types.error; } } UnaryOp::Not => { @@ -1183,10 +1199,10 @@ impl<'db> InferenceContext<'_, 'db> { match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes) { Ok(res) => res, Err(_) => { - this.result - .type_mismatches - .get_or_insert_default() - .insert(tgt_expr.into(), TypeMismatch { expected: target, actual: ty }); + this.result.type_mismatches.get_or_insert_default().insert( + tgt_expr.into(), + TypeMismatch { expected: target.store(), actual: ty.store() }, + ); target } } @@ -1216,14 +1232,14 @@ impl<'db> InferenceContext<'_, 'db> { CoroutineArgs::new( self.interner(), CoroutineArgsParts { - parent_args, - kind_ty: self.types.unit, + parent_args: parent_args.as_slice(), + kind_ty: self.types.types.unit, // rustc uses a special lang item type for the resume ty. I don't believe this can cause us problems. - resume_ty: self.types.unit, - yield_ty: self.types.unit, + resume_ty: self.types.types.unit, + yield_ty: self.types.types.unit, return_ty: inner_ty, // FIXME: Infer upvars. - tupled_upvars_ty: self.types.unit, + tupled_upvars_ty: self.types.types.unit, }, ) .args, @@ -1234,7 +1250,7 @@ impl<'db> InferenceContext<'_, 'db> { &mut self, fn_x: FnTrait, derefed_callee: Ty<'db>, - adjustments: &mut Vec>, + adjustments: &mut Vec, callee_ty: Ty<'db>, params: &[Ty<'db>], tgt_expr: ExprId, @@ -1249,7 +1265,8 @@ impl<'db> InferenceContext<'_, 'db> { .unwrap_or(true) { // prefer reborrow to move - adjustments.push(Adjustment { kind: Adjust::Deref(None), target: inner }); + adjustments + .push(Adjustment { kind: Adjust::Deref(None), target: inner.store() }); adjustments.push(Adjustment::borrow( self.interner(), Mutability::Mut, @@ -1282,13 +1299,10 @@ impl<'db> InferenceContext<'_, 'db> { }; let trait_data = trait_.trait_items(self.db); if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) { - let subst = GenericArgs::new_from_iter( - self.interner(), - [ - callee_ty.into(), - Ty::new_tup_from_iter(self.interner(), params.iter().copied()).into(), - ], - ); + let subst = GenericArgs::new_from_slice(&[ + callee_ty.into(), + Ty::new_tup(self.interner(), params).into(), + ]); self.write_method_resolution(tgt_expr, func, subst); } } @@ -1332,7 +1346,7 @@ impl<'db> InferenceContext<'_, 'db> { &Expectation::has_type(elem_ty), ExprIsRead::Yes, ); - let usize = self.types.usize; + let usize = self.types.types.usize; let len = match self.body[repeat] { Expr::Underscore => { self.write_expr_ty(repeat, usize); @@ -1389,7 +1403,7 @@ impl<'db> InferenceContext<'_, 'db> { } } } - self.types.never + self.types.types.never } fn infer_expr_become(&mut self, expr: ExprId) -> Ty<'db> { @@ -1410,7 +1424,7 @@ impl<'db> InferenceContext<'_, 'db> { } } - self.types.never + self.types.types.never } fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation<'db>) -> Ty<'db> { @@ -1501,7 +1515,7 @@ impl<'db> InferenceContext<'_, 'db> { mem::replace(&mut this.diverges, Diverges::Maybe); this.infer_expr_coerce( *expr, - &Expectation::HasType(this.types.never), + &Expectation::HasType(this.types.types.never), ExprIsRead::Yes, ); this.diverges = previous_diverges; @@ -1513,7 +1527,7 @@ impl<'db> InferenceContext<'_, 'db> { } else { this.infer_expr_coerce( expr, - &Expectation::HasType(this.types.unit), + &Expectation::HasType(this.types.types.unit), ExprIsRead::Yes, ); } @@ -1540,7 +1554,7 @@ impl<'db> InferenceContext<'_, 'db> { if this .coerce( expr.into(), - this.types.unit, + this.types.types.unit, t, AllowTwoPhase::No, ExprIsRead::Yes, @@ -1549,12 +1563,15 @@ impl<'db> InferenceContext<'_, 'db> { { this.result.type_mismatches.get_or_insert_default().insert( expr.into(), - TypeMismatch { expected: t, actual: this.types.unit }, + TypeMismatch { + expected: t.store(), + actual: this.types.types.unit.store(), + }, ); } t } else { - this.types.unit + this.types.types.unit } } }); @@ -1567,7 +1584,7 @@ impl<'db> InferenceContext<'_, 'db> { &mut self, receiver_ty: Ty<'db>, name: &Name, - ) -> Option<(Ty<'db>, Either, Vec>, bool)> { + ) -> Option<(Ty<'db>, Either, Vec, bool)> { let interner = self.interner(); let mut autoderef = self.table.autoderef_with_tracking(receiver_ty); let mut private_field = None; @@ -1612,6 +1629,7 @@ impl<'db> InferenceContext<'_, 'db> { return None; } let ty = self.db.field_types(field_id.parent)[field_id.local_id] + .get() .instantiate(interner, parameters); Some((Either::Left(field_id), ty)) }); @@ -1629,6 +1647,7 @@ impl<'db> InferenceContext<'_, 'db> { let adjustments = self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok()); let ty = self.db.field_types(field_id.parent)[field_id.local_id] + .get() .instantiate(self.interner(), subst); let ty = self.process_remote_user_written_ty(ty); @@ -1679,7 +1698,7 @@ impl<'db> InferenceContext<'_, 'db> { ); self.push_diagnostic(InferenceDiagnostic::UnresolvedField { expr: tgt_expr, - receiver: receiver_ty, + receiver: receiver_ty.store(), name: name.clone(), method_with_same_name_exists: resolved.is_ok(), }); @@ -1755,7 +1774,7 @@ impl<'db> InferenceContext<'_, 'db> { None => { self.push_diagnostic(InferenceDiagnostic::ExpectedFunction { call_expr: tgt_expr, - found: callee_ty, + found: callee_ty.store(), }); (Vec::new(), Ty::new_error(interner, ErrorGuaranteed)) } @@ -1867,9 +1886,9 @@ impl<'db> InferenceContext<'_, 'db> { self.push_diagnostic(InferenceDiagnostic::UnresolvedMethodCall { expr: tgt_expr, - receiver: receiver_ty, + receiver: receiver_ty.store(), name: method_name.clone(), - field_with_same_name: field_with_same_name_exists, + field_with_same_name: field_with_same_name_exists.map(|it| it.store()), assoc_func_with_same_name: assoc_func_with_same_name.map(|it| it.def_id), }); @@ -1921,7 +1940,7 @@ impl<'db> InferenceContext<'_, 'db> { let (formal_receiver_ty, param_tys) = if !sig.inputs_and_output.inputs().is_empty() { (sig.inputs_and_output.as_slice()[0], &sig.inputs_and_output.inputs()[1..]) } else { - (self.types.error, &[] as _) + (self.types.types.error, &[] as _) }; let ret_ty = sig.output(); self.table.unify(formal_receiver_ty, receiver_ty); @@ -2115,10 +2134,10 @@ impl<'db> InferenceContext<'_, 'db> { && args_count_matches { // Don't report type mismatches if there is a mismatch in args count. - self.result - .type_mismatches - .get_or_insert_default() - .insert((*arg).into(), TypeMismatch { expected, actual: found }); + self.result.type_mismatches.get_or_insert_default().insert( + (*arg).into(), + TypeMismatch { expected: expected.store(), actual: found.store() }, + ); } } } @@ -2145,15 +2164,13 @@ impl<'db> InferenceContext<'_, 'db> { if let ItemContainerId::TraitId(trait_) = f.lookup(self.db).container { // construct a TraitRef let trait_params_len = generics(self.db, trait_.into()).len(); - let substs = GenericArgs::new_from_iter( - self.interner(), - parameters.as_slice()[..trait_params_len].iter().copied(), - ); + let substs = + GenericArgs::new_from_slice(¶meters.as_slice()[..trait_params_len]); self.table.register_predicate(Obligation::new( self.interner(), ObligationCause::new(), self.table.param_env, - TraitRef::new(self.interner(), trait_.into(), substs), + TraitRef::new_from_args(self.interner(), trait_.into(), substs), )); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/fallback.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/fallback.rs index d0ce8cba7a882..c7669b346fe8d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/fallback.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/fallback.rs @@ -151,8 +151,8 @@ impl<'db> InferenceContext<'_, 'db> { // type, `?T` is not considered unsolved, but `?I` is. The // same is true for float variables.) let fallback = match ty.kind() { - TyKind::Infer(rustc_type_ir::IntVar(_)) => self.types.i32, - TyKind::Infer(rustc_type_ir::FloatVar(_)) => self.types.f64, + TyKind::Infer(rustc_type_ir::IntVar(_)) => self.types.types.i32, + TyKind::Infer(rustc_type_ir::FloatVar(_)) => self.types.types.f64, _ => match diverging_fallback.get(&ty) { Some(&fallback_ty) => fallback_ty, None => return false, @@ -337,7 +337,7 @@ impl<'db> InferenceContext<'_, 'db> { match behavior { DivergingFallbackBehavior::ToUnit => { debug!("fallback to () - legacy: {:?}", diverging_vid); - fallback_to(self.types.unit); + fallback_to(self.types.types.unit); } DivergingFallbackBehavior::ContextDependent => { // FIXME: rustc does the following, but given this is only relevant when the unstable @@ -368,14 +368,14 @@ impl<'db> InferenceContext<'_, 'db> { // // set, see the relationship finding module in // // compiler/rustc_trait_selection/src/traits/relationships.rs. // debug!("fallback to () - found trait and projection: {:?}", diverging_vid); - // fallback_to(self.types.unit); + // fallback_to(self.types.types.unit); // } if can_reach_non_diverging { debug!("fallback to () - reached non-diverging: {:?}", diverging_vid); - fallback_to(self.types.unit); + fallback_to(self.types.types.unit); } else { debug!("fallback to ! - all diverging: {:?}", diverging_vid); - fallback_to(self.types.never); + fallback_to(self.types.types.never); } } DivergingFallbackBehavior::ToNever => { @@ -383,7 +383,7 @@ impl<'db> InferenceContext<'_, 'db> { "fallback to ! - `rustc_never_type_mode = \"fallback_to_never\")`: {:?}", diverging_vid ); - fallback_to(self.types.never); + fallback_to(self.types.types.never); } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 87dcaa8a4ea8c..729ed214daea8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -26,8 +26,8 @@ impl<'db> InferenceContext<'_, 'db> { Adjust::Deref(Some(d)) => { if mutability == Mutability::Mut { let source_ty = match adjustments.peek() { - Some(prev_adj) => prev_adj.target, - None => self.result.type_of_expr[tgt_expr], + Some(prev_adj) => prev_adj.target.as_ref(), + None => self.result.type_of_expr[tgt_expr].as_ref(), }; if let Some(infer_ok) = Self::try_mutable_overloaded_place_op( &self.table, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs index 8236de167f735..c79c828cd4420 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/op.rs @@ -39,7 +39,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { && is_builtin_binop(lhs_ty, rhs_ty, category) { self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category); - self.types.unit + self.types.types.unit } else { return_ty }; @@ -67,20 +67,20 @@ impl<'a, 'db> InferenceContext<'a, 'db> { // && and || are a simple case. self.infer_expr_coerce( lhs_expr, - &Expectation::HasType(self.types.bool), + &Expectation::HasType(self.types.types.bool), ExprIsRead::Yes, ); let lhs_diverges = self.diverges; self.infer_expr_coerce( rhs_expr, - &Expectation::HasType(self.types.bool), + &Expectation::HasType(self.types.types.bool), ExprIsRead::Yes, ); // Depending on the LHS' value, the RHS can never execute. self.diverges = lhs_diverges; - self.types.bool + self.types.types.bool } _ => { // Otherwise, we always treat operators as if they are @@ -131,9 +131,9 @@ impl<'a, 'db> InferenceContext<'a, 'db> { match category { BinOpCategory::Shortcircuit => { - self.demand_suptype(self.types.bool, lhs_ty); - self.demand_suptype(self.types.bool, rhs_ty); - self.types.bool + self.demand_suptype(self.types.types.bool, lhs_ty); + self.demand_suptype(self.types.types.bool, rhs_ty); + self.types.types.bool } BinOpCategory::Shift => { @@ -150,7 +150,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { BinOpCategory::Comparison => { // both LHS and RHS and result will have the same type self.demand_suptype(lhs_ty, rhs_ty); - self.types.bool + self.types.types.bool } } } @@ -213,7 +213,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes); let autoref = Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), - target: method.sig.inputs_and_output.inputs()[0], + target: method.sig.inputs_and_output.inputs()[0].store(), }; self.write_expr_adj(lhs_expr, Box::new([autoref])); } @@ -227,7 +227,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { let autoref = Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), - target: method.sig.inputs_and_output.inputs()[1], + target: method.sig.inputs_and_output.inputs()[1].store(), }; // HACK(eddyb) Bypass checks due to reborrows being in // some cases applied on the RHS, on top of which we need @@ -251,7 +251,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { } Err(_errors) => { // FIXME: Report diagnostic. - self.types.error + self.types.types.error } }; @@ -271,7 +271,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { } Err(_errors) => { // FIXME: Report diagnostic. - self.types.error + self.types.types.error } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/opaques.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/opaques.rs index ce4597f83d557..a39288721b3e1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/opaques.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/opaques.rs @@ -112,12 +112,12 @@ impl<'db> InferenceContext<'_, 'db> { _ = self.demand_eqtype_fixme_no_diag(expected, hidden_type.ty); } - self.result.type_of_opaque.insert(def_id, ty.ty); + self.result.type_of_opaque.insert(def_id, ty.ty.store()); continue; } - self.result.type_of_opaque.insert(def_id, self.types.error); + self.result.type_of_opaque.insert(def_id, self.types.types.error.store()); } } @@ -139,9 +139,10 @@ impl<'db> InferenceContext<'_, 'db> { let at = self.table.infer_ctxt.at(&cause, self.table.param_env); let hidden_type = match at.deeply_normalize(hidden_type) { Ok(hidden_type) => hidden_type, - Err(_errors) => OpaqueHiddenType { ty: self.types.error }, + Err(_errors) => OpaqueHiddenType { ty: self.types.types.error }, }; - let hidden_type = fold_regions(self.interner(), hidden_type, |_, _| self.types.re_erased); + let hidden_type = + fold_regions(self.interner(), hidden_type, |_, _| self.types.regions.erased); UsageKind::HasDefiningUse(hidden_type) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index a02e280ac6379..1b8ce5ceaf860 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -9,7 +9,7 @@ use hir_def::{ }; use hir_expand::name::Name; use rustc_ast_ir::Mutability; -use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, Ty as _}; use stdx::TupleExt; use crate::{ @@ -82,7 +82,7 @@ impl<'db> InferenceContext<'_, 'db> { { // FIXME(DIAGNOSE): private tuple field } - let f = field_types[local_id]; + let f = field_types[local_id].get(); let expected_ty = match substs { Some(substs) => f.instantiate(self.interner(), substs), None => f.instantiate(self.interner(), &[]), @@ -146,7 +146,7 @@ impl<'db> InferenceContext<'_, 'db> { variant: def, }); } - let f = field_types[local_id]; + let f = field_types[local_id].get(); let expected_ty = match substs { Some(substs) => f.instantiate(self.interner(), substs), None => f.instantiate(self.interner(), &[]), @@ -234,7 +234,7 @@ impl<'db> InferenceContext<'_, 'db> { } if let Some(uncovered) = elements.get(element_tys.len()..) { for &elem in uncovered { - self.infer_pat(elem, self.types.error, default_bm, decl); + self.infer_pat(elem, self.types.types.error, default_bm, decl); } } pat_ty @@ -270,7 +270,7 @@ impl<'db> InferenceContext<'_, 'db> { } else if self.is_non_ref_pat(self.body, pat) { let mut pat_adjustments = Vec::new(); while let TyKind::Ref(_lifetime, inner, mutability) = expected.kind() { - pat_adjustments.push(expected); + pat_adjustments.push(expected.store()); expected = self.table.try_structurally_resolve_type(inner); default_bm = match default_bm { BindingMode::Move => BindingMode::Ref(mutability), @@ -333,7 +333,10 @@ impl<'db> InferenceContext<'_, 'db> { Err(_) => { self.result.type_mismatches.get_or_insert_default().insert( pat.into(), - TypeMismatch { expected, actual: ty_inserted_vars }, + TypeMismatch { + expected: expected.store(), + actual: ty_inserted_vars.store(), + }, ); self.write_pat_ty(pat, ty); // We return `expected` to prevent cascading errors. I guess an alternative is to @@ -372,7 +375,7 @@ impl<'db> InferenceContext<'_, 'db> { Some((adt, subst)) if adt == box_adt => { (subst.type_at(0), subst.as_slice().get(1).and_then(|a| a.as_type())) } - _ => (self.types.error, None), + _ => (self.types.types.error, None), }; let inner_ty = self.infer_pat(*inner, inner_ty, default_bm, decl); @@ -413,10 +416,10 @@ impl<'db> InferenceContext<'_, 'db> { ) { Ok(ty) => ty, Err(_) => { - self.result - .type_mismatches - .get_or_insert_default() - .insert(pat.into(), TypeMismatch { expected, actual: lhs_ty }); + self.result.type_mismatches.get_or_insert_default().insert( + pat.into(), + TypeMismatch { expected: expected.store(), actual: lhs_ty.store() }, + ); // `rhs_ty` is returned so no further type mismatches are // reported because of this mismatch. expected @@ -432,22 +435,22 @@ impl<'db> InferenceContext<'_, 'db> { let ty = self.insert_type_vars_shallow(ty); // FIXME: This never check is odd, but required with out we do inference right now if !expected.is_never() && !self.unify(ty, expected) { - self.result - .type_mismatches - .get_or_insert_default() - .insert(pat.into(), TypeMismatch { expected, actual: ty }); + self.result.type_mismatches.get_or_insert_default().insert( + pat.into(), + TypeMismatch { expected: expected.store(), actual: ty.store() }, + ); } self.write_pat_ty(pat, ty); self.pat_ty_after_adjustment(pat) } fn pat_ty_after_adjustment(&self, pat: PatId) -> Ty<'db> { - *self - .result + self.result .pat_adjustments .get(&pat) .and_then(|it| it.last()) - .unwrap_or(&self.result.type_of_pat[pat]) + .unwrap_or_else(|| &self.result.type_of_pat[pat]) + .as_ref() } fn infer_ref_pat( @@ -571,10 +574,14 @@ impl<'db> InferenceContext<'_, 'db> { { let inner = self.table.try_structurally_resolve_type(inner); if matches!(inner.kind(), TyKind::Slice(_)) { - let elem_ty = self.types.u8; + let elem_ty = self.types.types.u8; let slice_ty = Ty::new_slice(self.interner(), elem_ty); - let ty = - Ty::new_ref(self.interner(), self.types.re_static, slice_ty, Mutability::Not); + let ty = Ty::new_ref( + self.interner(), + self.types.regions.statik, + slice_ty, + Mutability::Not, + ); self.write_expr_ty(expr, ty); return ty; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index 301cbf462ce06..b11650bbcd9a8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -64,7 +64,7 @@ impl<'db> InferenceContext<'_, 'db> { } ValueNs::LocalBinding(pat) => { return match self.result.type_of_binding.get(pat) { - Some(ty) => Some(ValuePathResolution::NonGeneric(*ty)), + Some(ty) => Some(ValuePathResolution::NonGeneric(ty.as_ref())), None => { never!("uninferred pattern?"); None @@ -102,7 +102,7 @@ impl<'db> InferenceContext<'_, 'db> { // This is something like `TypeAlias::::EnumVariant`. Do not call `substs_from_path()`, // as it'll try to re-lower the previous segment assuming it refers to the enum, but it refers // to the type alias and they may have different generics. - self.types.empty_args + self.types.empty.generic_args } else { self.with_body_ty_lowering(|ctx| { let mut path_ctx = ctx.at_path(path, id); @@ -240,11 +240,8 @@ impl<'db> InferenceContext<'_, 'db> { if let ItemContainerId::TraitId(trait_) = container { let parent_len = generics(self.db, def).parent_generics().map_or(0, |g| g.len_self()); - let parent_subst = GenericArgs::new_from_iter( - interner, - subst.as_slice()[..parent_len].iter().copied(), - ); - let trait_ref = TraitRef::new(interner, trait_.into(), parent_subst); + let parent_subst = GenericArgs::new_from_slice(&subst.as_slice()[..parent_len]); + let trait_ref = TraitRef::new_from_args(interner, trait_.into(), parent_subst); self.table.register_predicate(Obligation::new( interner, ObligationCause::new(), @@ -339,7 +336,7 @@ impl<'db> InferenceContext<'_, 'db> { [ty.into()], |_, id, _| self.table.next_var_for_param(id), ); - let trait_ref = TraitRef::new(self.interner(), trait_.into(), args); + let trait_ref = TraitRef::new_from_args(self.interner(), trait_.into(), args); self.table.register_predicate(Obligation::new( self.interner(), ObligationCause::new(), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs index 3ef5e5870a582..1298b38097034 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/place_op.rs @@ -65,7 +65,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { oprnd_expr, Box::new([Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)), - target: method.sig.inputs_and_output.inputs()[0], + target: method.sig.inputs_and_output.inputs()[0].store(), }]), ); } else { @@ -125,7 +125,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { ctx.interner(), ObligationCause::new(), ctx.table.param_env, - ClauseKind::ConstArgHasType(ct, ctx.types.usize), + ClauseKind::ConstArgHasType(ct, ctx.types.types.usize), )); self_ty = Ty::new_slice(ctx.interner(), element_ty); } else { @@ -151,7 +151,8 @@ impl<'a, 'db> InferenceContext<'a, 'db> { { adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)), - target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty), + target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty) + .store(), }); } else { panic!("input to index is not a ref?"); @@ -159,7 +160,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { if unsize { adjustments.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), - target: method.sig.inputs_and_output.inputs()[0], + target: method.sig.inputs_and_output.inputs()[0].store(), }); } autoderef.ctx().write_expr_adj(base_expr, adjustments.into_boxed_slice()); @@ -283,7 +284,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { None => return, }; debug!("convert_place_op_to_mutable: method={:?}", method); - self.result.method_resolutions.insert(expr, (method.def_id, method.args)); + self.result.method_resolutions.insert(expr, (method.def_id, method.args.store())); let TyKind::Ref(region, _, Mutability::Mut) = method.sig.inputs_and_output.inputs()[0].kind() @@ -308,9 +309,9 @@ impl<'a, 'db> InferenceContext<'a, 'db> { allow_two_phase_borrow: AllowTwoPhase::No, }; adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(mutbl)); - adjustment.target = Ty::new_ref(interner, region, source, mutbl.into()); + adjustment.target = Ty::new_ref(interner, region, source, mutbl.into()).store(); } - source = adjustment.target; + source = adjustment.target.as_ref(); } // If we have an autoref followed by unsizing at the end, fix the unsize target. @@ -320,7 +321,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> { Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }, ] = adjustments[..] { - *target = method.sig.inputs_and_output.inputs()[0]; + *target = method.sig.inputs_and_output.inputs()[0].store(); } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index a5060416a16e1..d55fc0ab0da66 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -9,7 +9,7 @@ use intern::sym; use rustc_hash::FxHashSet; use rustc_type_ir::{ TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom, - inherent::{Const as _, GenericArg as _, IntoKind, SliceLike, Ty as _}, + inherent::{Const as _, GenericArg as _, IntoKind, Ty as _}, solve::Certainty, }; use smallvec::SmallVec; @@ -542,16 +542,14 @@ impl<'db> InferenceTable<'db> { }) .take(num_args), ); - let args = [ty, arg_ty]; - let trait_ref = TraitRef::new(self.interner(), fn_trait.into(), args); + let args = GenericArgs::new_from_slice(&[ty.into(), arg_ty.into()]); + let trait_ref = TraitRef::new_from_args(self.interner(), fn_trait.into(), args); - let proj_args = self - .infer_ctxt - .fill_rest_fresh_args(output_assoc_type.into(), args.into_iter().map(Into::into)); + let proj_args = self.infer_ctxt.fill_rest_fresh_args(output_assoc_type.into(), args); let projection = Ty::new_alias( self.interner(), rustc_type_ir::AliasTyKind::Projection, - AliasTy::new(self.interner(), output_assoc_type.into(), proj_args), + AliasTy::new_from_args(self.interner(), output_assoc_type.into(), proj_args), ); let pred = Predicate::upcast_from(trait_ref, self.interner()); @@ -560,7 +558,8 @@ impl<'db> InferenceTable<'db> { let return_ty = self.normalize_alias_ty(projection); for &fn_x in subtraits { let fn_x_trait = fn_x.get_id(lang_items)?; - let trait_ref = TraitRef::new(self.interner(), fn_x_trait.into(), args); + let trait_ref = + TraitRef::new_from_args(self.interner(), fn_x_trait.into(), args); let pred = Predicate::upcast_from(trait_ref, self.interner()); if !self.try_obligation(pred).no_solution() { return Some((fn_x, arg_tys, return_ty)); @@ -640,6 +639,7 @@ impl<'db> InferenceTable<'db> { let struct_data = id.fields(self.db); if let Some((last_field, _)) = struct_data.fields().iter().next_back() { let last_field_ty = self.db.field_types(id.into())[last_field] + .get() .instantiate(self.interner(), subst); if structs.contains(&ty) { // A struct recursively contains itself as a tail field somewhere. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs index 075a7066db789..402e9ce969713 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs @@ -154,7 +154,7 @@ impl<'a, 'db> UninhabitedFrom<'a, 'db> { let field_vis = if is_enum { None } else { Some(self.db().field_visibilities(variant)) }; for (fid, _) in fields.iter() { - self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid], subst)?; + self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid].get(), subst)?; } CONTINUE_OPAQUELY_INHABITED } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index 4b20d6eb32208..b6ad3624ae28b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -14,10 +14,7 @@ use rustc_abi::{ TargetDataLayout, WrappingRange, }; use rustc_index::IndexVec; -use rustc_type_ir::{ - FloatTy, IntTy, UintTy, - inherent::{IntoKind, SliceLike}, -}; +use rustc_type_ir::{FloatTy, IntTy, UintTy, inherent::IntoKind}; use triomphe::Arc; use crate::{ @@ -25,9 +22,10 @@ use crate::{ consteval::try_const_usize, db::HirDatabase, next_solver::{ - DbInterner, GenericArgs, Ty, TyKind, TypingMode, + DbInterner, GenericArgs, StoredTy, Ty, TyKind, TypingMode, infer::{DbInternerInferExt, traits::ObligationCause}, }, + traits::StoredParamEnvAndCrate, }; pub(crate) use self::adt::layout_of_adt_cycle_result; @@ -144,22 +142,22 @@ fn layout_of_simd_ty<'db>( let Some(TyKind::Array(e_ty, e_len)) = fields .next() .filter(|_| fields.next().is_none()) - .map(|f| (*f.1).instantiate(DbInterner::new_no_crate(db), args).kind()) + .map(|f| (*f.1).get().instantiate(DbInterner::new_no_crate(db), args).kind()) else { return Err(LayoutError::InvalidSimdType); }; let e_len = try_const_usize(db, e_len).ok_or(LayoutError::HasErrorConst)? as u64; - let e_ly = db.layout_of_ty(e_ty, env)?; + let e_ly = db.layout_of_ty(e_ty.store(), env.store())?; let cx = LayoutCx::new(dl); Ok(Arc::new(cx.calc.simd_type(e_ly, e_len, repr_packed)?)) } -pub fn layout_of_ty_query<'db>( - db: &'db dyn HirDatabase, - ty: Ty<'db>, - trait_env: ParamEnvAndCrate<'db>, +pub fn layout_of_ty_query( + db: &dyn HirDatabase, + ty: StoredTy, + trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError> { let krate = trait_env.krate; let interner = DbInterner::new_with(db, krate); @@ -170,19 +168,29 @@ pub fn layout_of_ty_query<'db>( let cx = LayoutCx::new(dl); let infer_ctxt = interner.infer_ctxt().build(TypingMode::PostAnalysis); let cause = ObligationCause::dummy(); - let ty = infer_ctxt.at(&cause, trait_env.param_env).deeply_normalize(ty).unwrap_or(ty); + let ty = infer_ctxt + .at(&cause, trait_env.param_env()) + .deeply_normalize(ty.as_ref()) + .unwrap_or(ty.as_ref()); let result = match ty.kind() { TyKind::Adt(def, args) => { match def.inner().id { hir_def::AdtId::StructId(s) => { let repr = AttrFlags::repr(db, s.into()).unwrap_or_default(); if repr.simd() { - return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target); + return layout_of_simd_ty( + db, + s, + repr.packed(), + &args, + trait_env.as_ref(), + &target, + ); } } _ => {} } - return db.layout_of_adt(def.inner().id, args, trait_env); + return db.layout_of_adt(def.inner().id, args.store(), trait_env); } TyKind::Bool => Layout::scalar( dl, @@ -246,21 +254,23 @@ pub fn layout_of_ty_query<'db>( ), TyKind::Tuple(tys) => { let kind = - if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; + if tys.is_empty() { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; - let fields = - tys.iter().map(|k| db.layout_of_ty(k, trait_env)).collect::, _>>()?; + let fields = tys + .iter() + .map(|k| db.layout_of_ty(k.store(), trait_env.clone())) + .collect::, _>>()?; let fields = fields.iter().map(|it| &**it).collect::>(); let fields = fields.iter().collect::>(); cx.calc.univariant(&fields, &ReprOptions::default(), kind)? } TyKind::Array(element, count) => { let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64; - let element = db.layout_of_ty(element, trait_env)?; + let element = db.layout_of_ty(element.store(), trait_env)?; cx.calc.array_like::<_, _, ()>(&element, Some(count))? } TyKind::Slice(element) => { - let element = db.layout_of_ty(element, trait_env)?; + let element = db.layout_of_ty(element.store(), trait_env)?; cx.calc.array_like::<_, _, ()>(&element, None)? } TyKind::Str => { @@ -325,9 +335,11 @@ pub fn layout_of_ty_query<'db>( let fields = captures .iter() .map(|it| { - let ty = - it.ty.instantiate(interner, args.split_closure_args_untupled().parent_args); - db.layout_of_ty(ty, trait_env) + let ty = it + .ty + .get() + .instantiate(interner, args.split_closure_args_untupled().parent_args); + db.layout_of_ty(ty.store(), trait_env.clone()) }) .collect::, _>>()?; let fields = fields.iter().map(|it| &**it).collect::>(); @@ -357,10 +369,11 @@ pub fn layout_of_ty_query<'db>( Ok(Arc::new(result)) } -pub(crate) fn layout_of_ty_cycle_result<'db>( +pub(crate) fn layout_of_ty_cycle_result( _: &dyn HirDatabase, - _: Ty<'db>, - _: ParamEnvAndCrate<'db>, + _: salsa::Id, + _: StoredTy, + _: StoredParamEnvAndCrate, ) -> Result, LayoutError> { Err(LayoutError::RecursiveTypeWithoutIndirection) } @@ -376,7 +389,7 @@ fn struct_tail_erasing_lifetimes<'a>(db: &'a dyn HirDatabase, pointee: Ty<'a>) - let mut it = data.fields().iter().rev(); match it.next() { Some((f, _)) => { - let last_field_ty = field_ty(db, struct_id.into(), f, &args); + let last_field_ty = field_ty(db, struct_id.into(), f, args); struct_tail_erasing_lifetimes(db, last_field_ty) } None => pointee, @@ -397,9 +410,9 @@ fn field_ty<'a>( db: &'a dyn HirDatabase, def: hir_def::VariantId, fd: LocalFieldId, - args: &GenericArgs<'a>, + args: GenericArgs<'a>, ) -> Ty<'a> { - db.field_types(def)[fd].instantiate(DbInterner::new_no_crate(db), args) + db.field_types(def)[fd].get().instantiate(DbInterner::new_no_crate(db), args) } fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index cf2d0989fd9fb..d2495917187e4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -13,17 +13,17 @@ use smallvec::SmallVec; use triomphe::Arc; use crate::{ - ParamEnvAndCrate, db::HirDatabase, layout::{Layout, LayoutCx, LayoutError, field_ty}, - next_solver::GenericArgs, + next_solver::StoredGenericArgs, + traits::StoredParamEnvAndCrate, }; -pub fn layout_of_adt_query<'db>( - db: &'db dyn HirDatabase, +pub fn layout_of_adt_query( + db: &dyn HirDatabase, def: AdtId, - args: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, + args: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError> { let krate = trait_env.krate; let Ok(target) = db.target_data_layout(krate) else { @@ -34,7 +34,9 @@ pub fn layout_of_adt_query<'db>( let handle_variant = |def: VariantId, var: &VariantFields| { var.fields() .iter() - .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &args), trait_env)) + .map(|(fd, _)| { + db.layout_of_ty(field_ty(db, def, fd, args.as_ref()).store(), trait_env.clone()) + }) .collect::, _>>() }; let (variants, repr, is_special_no_niche) = match def { @@ -95,11 +97,12 @@ pub fn layout_of_adt_query<'db>( Ok(Arc::new(result)) } -pub(crate) fn layout_of_adt_cycle_result<'db>( - _: &'db dyn HirDatabase, +pub(crate) fn layout_of_adt_cycle_result( + _: &dyn HirDatabase, + _: salsa::Id, _def: AdtId, - _args: GenericArgs<'db>, - _trait_env: ParamEnvAndCrate<'db>, + _args: StoredGenericArgs, + _trait_env: StoredParamEnvAndCrate, ) -> Result, LayoutError> { Err(LayoutError::RecursiveTypeWithoutIndirection) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index be6a76478a116..8c91be1d78110 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -98,7 +98,7 @@ fn eval_goal( Either::Left(it) => it.krate(&db), Either::Right(it) => it.krate(&db), }; - db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }) + db.layout_of_ty(goal_ty.store(), ParamEnvAndCrate { param_env, krate }.store()) }) } @@ -140,10 +140,10 @@ fn eval_expr( .unwrap() .0; let infer = InferenceResult::for_body(&db, function_id.into()); - let goal_ty = infer.type_of_binding[b]; + let goal_ty = infer.type_of_binding[b].clone(); let param_env = db.trait_environment(function_id.into()); let krate = function_id.krate(&db); - db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }) + db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }.store()) }) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 5ebe87c5d5d67..1674771413f25 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -61,11 +61,12 @@ use hir_def::{CallableDefId, TypeOrConstParamId, type_ref::Rawness}; use hir_expand::name::Name; use indexmap::{IndexMap, map::Entry}; use intern::{Symbol, sym}; +use macros::GenericTypeVisitable; use mir::{MirEvalError, VTableMap}; use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; use rustc_type_ir::{ BoundVarIndexKind, TypeSuperVisitable, TypeVisitableExt, UpcastFrom, - inherent::{IntoKind, SliceLike, Ty as _}, + inherent::{IntoKind, Ty as _}, }; use syntax::ast::{ConstArg, make}; use traits::FnTrait; @@ -76,8 +77,8 @@ use crate::{ infer::unify::InferenceTable, next_solver::{ AliasTy, Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, Canonical, - CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, FnSig, PolyFnSig, Predicate, - Region, RegionKind, TraitRef, Ty, TyKind, Tys, abi, + CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, FnSig, GenericArgs, + PolyFnSig, Predicate, Region, RegionKind, TraitRef, Ty, TyKind, Tys, abi, }, }; @@ -87,7 +88,7 @@ pub use infer::{ InferenceTyDiagnosticSource, OverloadedDeref, PointerCast, cast::CastError, closure::analysis::{CaptureKind, CapturedItem}, - could_coerce, could_unify, could_unify_deeply, + could_coerce, could_unify, could_unify_deeply, infer_query_with_inspect, }; pub use lower::{ GenericPredicates, ImplTraits, LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId, @@ -104,7 +105,7 @@ pub use utils::{ /// A constant can have reference to other things. Memory map job is holding /// the necessary bits of memory of the const eval session to keep the constant /// meaningful. -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)] pub enum MemoryMap<'db> { #[default] Empty, @@ -112,7 +113,7 @@ pub enum MemoryMap<'db> { Complex(Box>), } -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)] pub struct ComplexMemoryMap<'db> { memory: IndexMap, FxBuildHasher>, vtable: VTableMap<'db>, @@ -134,7 +135,7 @@ impl ComplexMemoryMap<'_> { } impl<'db> MemoryMap<'db> { - pub fn vtable_ty(&self, id: usize) -> Result, MirEvalError<'db>> { + pub fn vtable_ty(&self, id: usize) -> Result, MirEvalError> { match self { MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)), MemoryMap::Complex(cm) => cm.vtable.ty(id), @@ -150,8 +151,8 @@ impl<'db> MemoryMap<'db> { /// allocator function as `f` and it will return a mapping of old addresses to new addresses. fn transform_addresses( &self, - mut f: impl FnMut(&[u8], usize) -> Result>, - ) -> Result, MirEvalError<'db>> { + mut f: impl FnMut(&[u8], usize) -> Result, + ) -> Result, MirEvalError> { let mut transform = |(addr, val): (&usize, &[u8])| { let addr = *addr; let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) }; @@ -333,9 +334,9 @@ impl FnAbi { } #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub enum ImplTraitId<'db> { - ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx<'db>), - TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx<'db>), +pub enum ImplTraitId { + ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx), + TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx), } /// 'Canonicalizes' the `t` by replacing any errors with new variables. Also @@ -468,7 +469,7 @@ where Canonical { value, max_universe: rustc_type_ir::UniverseIndex::ZERO, - variables: CanonicalVars::new_from_iter(interner, error_replacer.vars), + variables: CanonicalVars::new_from_slice(&error_replacer.vars), } } @@ -490,12 +491,12 @@ pub fn callable_sig_from_fn_trait<'db>( // - Self: FnOnce // - >::Output == ?ret_ty let args_ty = table.next_ty_var(); - let args = [self_ty, args_ty]; - let trait_ref = TraitRef::new(table.interner(), fn_once_trait.into(), args); + let args = GenericArgs::new_from_slice(&[self_ty.into(), args_ty.into()]); + let trait_ref = TraitRef::new_from_args(table.interner(), fn_once_trait.into(), args); let projection = Ty::new_alias( table.interner(), rustc_type_ir::AliasTyKind::Projection, - AliasTy::new(table.interner(), output_assoc_type.into(), args), + AliasTy::new_from_args(table.interner(), output_assoc_type.into(), args), ); let pred = Predicate::upcast_from(trait_ref, table.interner()); @@ -504,7 +505,7 @@ pub fn callable_sig_from_fn_trait<'db>( let return_ty = table.normalize_alias_ty(projection); for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] { let fn_x_trait = fn_x.get_id(lang_items)?; - let trait_ref = TraitRef::new(table.interner(), fn_x_trait.into(), args); + let trait_ref = TraitRef::new_from_args(table.interner(), fn_x_trait.into(), args); if !table .try_obligation(Predicate::upcast_from(trait_ref, table.interner())) .no_solution() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index dd34bbe2fd02b..62a5837f349df 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -15,8 +15,8 @@ use either::Either; use hir_def::{ AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, - LifetimeParamId, LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, - TypeParamId, UnionId, VariantId, + LifetimeParamId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, + TypeOrConstParamId, TypeParamId, UnionId, VariantId, builtin_type::BuiltinType, expr_store::{ExpressionStore, HygieneId, path::Path}, hir::generics::{ @@ -61,36 +61,38 @@ use crate::{ AliasTy, Binder, BoundExistentialPredicates, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, FxIndexMap, GenericArg, GenericArgs, ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, - TraitPredicate, TraitRef, Ty, Tys, UnevaluatedConst, abi::Safety, util::BottomUpFolder, + StoredClauses, StoredEarlyBinder, StoredGenericArg, StoredGenericArgs, StoredPolyFnSig, + StoredTy, TraitPredicate, TraitRef, Ty, Tys, UnevaluatedConst, abi::Safety, + util::BottomUpFolder, }, }; pub(crate) struct PathDiagnosticCallbackData(pub(crate) TypeRefId); #[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTraits<'db> { - pub(crate) impl_traits: Arena>, +pub struct ImplTraits { + pub(crate) impl_traits: Arena, } #[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTrait<'db> { - pub(crate) predicates: Box<[Clause<'db>]>, +pub struct ImplTrait { + pub(crate) predicates: StoredClauses, } -pub type ImplTraitIdx<'db> = Idx>; +pub type ImplTraitIdx = Idx; #[derive(Debug, Default)] -struct ImplTraitLoweringState<'db> { +struct ImplTraitLoweringState { /// When turning `impl Trait` into opaque types, we have to collect the /// bounds at the same time to get the IDs correct (without becoming too /// complicated). mode: ImplTraitLoweringMode, // This is structured as a struct with fields and not as an enum because it helps with the borrow checker. - opaque_type_data: Arena>, + opaque_type_data: Arena, } -impl<'db> ImplTraitLoweringState<'db> { - fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState<'db> { +impl ImplTraitLoweringState { + fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState { Self { mode, opaque_type_data: Arena::new() } } } @@ -168,13 +170,14 @@ impl<'db> LifetimeElisionKind<'db> { pub struct TyLoweringContext<'db, 'a> { pub db: &'db dyn HirDatabase, interner: DbInterner<'db>, + types: &'db crate::next_solver::DefaultAny<'db>, lang_items: &'db LangItems, resolver: &'a Resolver<'db>, store: &'a ExpressionStore, def: GenericDefId, generics: OnceCell, in_binders: DebruijnIndex, - impl_trait_mode: ImplTraitLoweringState<'db>, + impl_trait_mode: ImplTraitLoweringState, /// Tracks types with explicit `?Sized` bounds. pub(crate) unsized_types: FxHashSet>, pub(crate) diagnostics: Vec, @@ -199,6 +202,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { db, // Can provide no block since we don't use it for trait solving. interner, + types: crate::next_solver::default_types(db), lang_items: interner.lang_items(), resolver, def, @@ -337,7 +341,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } } Some(ValueNs::ConstId(c)) => { - let args = GenericArgs::new_from_iter(self.interner, []); + let args = GenericArgs::empty(self.interner); Some(Const::new( self.interner, rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new( @@ -397,7 +401,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { let type_ref = &self.store[type_ref_id]; tracing::debug!(?type_ref); let ty = match type_ref { - TypeRef::Never => Ty::new(interner, TyKind::Never), + TypeRef::Never => self.types.types.never, TypeRef::Tuple(inner) => { let inner_tys = inner.iter().map(|&tr| self.lower_ty(tr)); Ty::new_tup_from_iter(interner, inner_tys) @@ -476,7 +480,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { let idx = self .impl_trait_mode .opaque_type_data - .alloc(ImplTrait { predicates: Box::default() }); + .alloc(ImplTrait { predicates: Clauses::empty(interner).store() }); let impl_trait_id = origin.either( |f| ImplTraitId::ReturnTypeImplTrait(f, idx), @@ -989,7 +993,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } } - fn lower_impl_trait(&mut self, def_id: SolverDefId, bounds: &[TypeBound]) -> ImplTrait<'db> { + fn lower_impl_trait(&mut self, def_id: SolverDefId, bounds: &[TypeBound]) -> ImplTrait { let interner = self.interner; cov_mark::hit!(lower_rpit); let args = GenericArgs::identity_for_item(interner, def_id); @@ -1010,7 +1014,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { let trait_ref = TraitRef::new_from_args( interner, trait_id.into(), - GenericArgs::new_from_iter(interner, [self_ty.into()]), + GenericArgs::new_from_slice(&[self_ty.into()]), ); Clause(Predicate::new( interner, @@ -1024,9 +1028,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { }); predicates.extend(sized_clause); } - predicates.into_boxed_slice() + predicates }); - ImplTrait { predicates } + ImplTrait { predicates: Clauses::new_from_slice(&predicates).store() } } pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> { @@ -1090,28 +1094,50 @@ pub(crate) fn impl_trait_query<'db>( db.impl_trait_with_diagnostics(impl_id).map(|it| it.0) } -pub(crate) fn impl_trait_with_diagnostics_query<'db>( +pub(crate) fn impl_trait_with_diagnostics<'db>( db: &'db dyn HirDatabase, impl_id: ImplId, ) -> Option<(EarlyBinder<'db, TraitRef<'db>>, Diagnostics)> { - let impl_data = db.impl_signature(impl_id); - let resolver = impl_id.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &impl_data.store, - impl_id.into(), - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, - ); - let self_ty = db.impl_self_ty(impl_id).skip_binder(); - let target_trait = impl_data.target_trait.as_ref()?; - let trait_ref = EarlyBinder::bind(ctx.lower_trait_ref(target_trait, self_ty)?); - Some((trait_ref, create_diagnostics(ctx.diagnostics))) + return impl_trait_with_diagnostics_query(db, impl_id).as_ref().map(|(binder, diags)| { + ( + binder.get_with(|(trait_id, args)| { + TraitRef::new_from_args( + DbInterner::new_no_crate(db), + (*trait_id).into(), + args.as_ref(), + ) + }), + diags.clone(), + ) + }); + + #[salsa::tracked(returns(ref))] + pub(crate) fn impl_trait_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, + ) -> Option<(StoredEarlyBinder<(TraitId, StoredGenericArgs)>, Diagnostics)> { + let impl_data = db.impl_signature(impl_id); + let resolver = impl_id.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let self_ty = db.impl_self_ty(impl_id).skip_binder(); + let target_trait = impl_data.target_trait.as_ref()?; + let trait_ref = ctx.lower_trait_ref(target_trait, self_ty)?; + Some(( + StoredEarlyBinder::bind((trait_ref.def_id.0, trait_ref.args.store())), + create_diagnostics(ctx.diagnostics), + )) + } } -impl<'db> ImplTraitId<'db> { +impl ImplTraitId { #[inline] - pub fn predicates(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> { + pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> { let (impl_traits, idx) = match self { ImplTraitId::ReturnTypeImplTrait(owner, idx) => { (ImplTraits::return_type_impl_traits(db, owner), idx) @@ -1123,8 +1149,7 @@ impl<'db> ImplTraitId<'db> { impl_traits .as_deref() .expect("owner should have opaque type") - .as_ref() - .map_bound(|it| &*it.impl_traits[idx].predicates) + .get_with(|it| it.impl_traits[idx].predicates.as_ref().as_slice()) } } @@ -1136,12 +1161,12 @@ impl InternedOpaqueTyId { } #[salsa::tracked] -impl<'db> ImplTraits<'db> { - #[salsa::tracked(returns(ref), unsafe(non_update_return_type))] +impl ImplTraits { + #[salsa::tracked(returns(ref))] pub(crate) fn return_type_impl_traits( - db: &'db dyn HirDatabase, + db: &dyn HirDatabase, def: hir_def::FunctionId, - ) -> Option>>> { + ) -> Option>> { // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe let data = db.function_signature(def); let resolver = def.resolver(db); @@ -1162,15 +1187,15 @@ impl<'db> ImplTraits<'db> { None } else { return_type_impl_traits.impl_traits.shrink_to_fit(); - Some(Box::new(EarlyBinder::bind(return_type_impl_traits))) + Some(Box::new(StoredEarlyBinder::bind(return_type_impl_traits))) } } - #[salsa::tracked(returns(ref), unsafe(non_update_return_type))] + #[salsa::tracked(returns(ref))] pub(crate) fn type_alias_impl_traits( - db: &'db dyn HirDatabase, + db: &dyn HirDatabase, def: hir_def::TypeAliasId, - ) -> Option>>> { + ) -> Option>> { let data = db.type_alias_signature(def); let resolver = def.resolver(db); let mut ctx = TyLoweringContext::new( @@ -1190,7 +1215,7 @@ impl<'db> ImplTraits<'db> { None } else { type_alias_impl_traits.impl_traits.shrink_to_fit(); - Some(Box::new(EarlyBinder::bind(type_alias_impl_traits))) + Some(Box::new(StoredEarlyBinder::bind(type_alias_impl_traits))) } } } @@ -1246,17 +1271,20 @@ pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBind /// Build the declared type of a function. This should not need to look at the /// function body. -fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> StoredEarlyBinder { let interner = DbInterner::new_no_crate(db); - EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::FunctionId(def).into(), - GenericArgs::identity_for_item(interner, def.into()), - )) + StoredEarlyBinder::bind( + Ty::new_fn_def( + interner, + CallableDefId::FunctionId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + ) + .store(), + ) } /// Build the declared type of a const. -fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> StoredEarlyBinder { let resolver = def.resolver(db); let data = db.const_signature(def); let parent = def.loc(db).container; @@ -1268,11 +1296,11 @@ fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'d LifetimeElisionKind::AnonymousReportError, ); ctx.set_lifetime_elision(LifetimeElisionKind::for_const(ctx.interner, parent)); - EarlyBinder::bind(ctx.lower_ty(data.type_ref)) + StoredEarlyBinder::bind(ctx.lower_ty(data.type_ref).store()) } /// Build the declared type of a static. -fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> StoredEarlyBinder { let resolver = def.resolver(db); let data = db.static_signature(def); let mut ctx = TyLoweringContext::new( @@ -1283,99 +1311,129 @@ fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder< LifetimeElisionKind::AnonymousReportError, ); ctx.set_lifetime_elision(LifetimeElisionKind::Elided(Region::new_static(ctx.interner))); - EarlyBinder::bind(ctx.lower_ty(data.type_ref)) + StoredEarlyBinder::bind(ctx.lower_ty(data.type_ref).store()) } /// Build the type of a tuple struct constructor. -fn type_for_struct_constructor<'db>( - db: &'db dyn HirDatabase, +fn type_for_struct_constructor( + db: &dyn HirDatabase, def: StructId, -) -> Option>> { +) -> Option> { let struct_data = def.fields(db); match struct_data.shape { FieldsShape::Record => None, FieldsShape::Unit => Some(type_for_adt(db, def.into())), FieldsShape::Tuple => { let interner = DbInterner::new_no_crate(db); - Some(EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::StructId(def).into(), - GenericArgs::identity_for_item(interner, def.into()), - ))) + Some(StoredEarlyBinder::bind( + Ty::new_fn_def( + interner, + CallableDefId::StructId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + ) + .store(), + )) } } } /// Build the type of a tuple enum variant constructor. -fn type_for_enum_variant_constructor<'db>( - db: &'db dyn HirDatabase, +fn type_for_enum_variant_constructor( + db: &dyn HirDatabase, def: EnumVariantId, -) -> Option>> { +) -> Option> { let struct_data = def.fields(db); match struct_data.shape { FieldsShape::Record => None, FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())), FieldsShape::Tuple => { let interner = DbInterner::new_no_crate(db); - Some(EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::EnumVariantId(def).into(), - GenericArgs::identity_for_item(interner, def.loc(db).parent.into()), - ))) + Some(StoredEarlyBinder::bind( + Ty::new_fn_def( + interner, + CallableDefId::EnumVariantId(def).into(), + GenericArgs::identity_for_item(interner, def.loc(db).parent.into()), + ) + .store(), + )) } } } -pub(crate) fn value_ty_query<'db>( +pub(crate) fn value_ty<'db>( db: &'db dyn HirDatabase, def: ValueTyDefId, ) -> Option>> { - match def { - ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), - ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), - ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())), - ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), - ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)), - ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)), + return value_ty_query(db, def).as_ref().map(|it| it.get()); + + #[salsa::tracked(returns(ref))] + pub(crate) fn value_ty_query<'db>( + db: &'db dyn HirDatabase, + def: ValueTyDefId, + ) -> Option> { + match def { + ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), + ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), + ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())), + ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), + ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)), + ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)), + } } } -pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>( +pub(crate) fn type_for_type_alias_with_diagnostics<'db>( db: &'db dyn HirDatabase, t: TypeAliasId, ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - let type_alias_data = db.type_alias_signature(t); - let mut diags = None; - let resolver = t.resolver(db); - let interner = DbInterner::new_no_crate(db); - let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { - EarlyBinder::bind(Ty::new_foreign(interner, t.into())) - } else { - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - t.into(), - LifetimeElisionKind::AnonymousReportError, - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - let res = EarlyBinder::bind( - type_alias_data - .ty - .map(|type_ref| ctx.lower_ty(type_ref)) - .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)), - ); - diags = create_diagnostics(ctx.diagnostics); - res - }; - (inner, diags) -} + let (ty, diags) = type_for_type_alias_with_diagnostics_query(db, t); + return (ty.get(), diags.clone()); -pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result<'db>( - db: &'db dyn HirDatabase, - _adt: TypeAliasId, -) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - (EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None) + #[salsa::tracked(returns(ref), cycle_result = type_for_type_alias_with_diagnostics_cycle_result)] + pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + t: TypeAliasId, + ) -> (StoredEarlyBinder, Diagnostics) { + let type_alias_data = db.type_alias_signature(t); + let mut diags = None; + let resolver = t.resolver(db); + let interner = DbInterner::new_no_crate(db); + let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { + StoredEarlyBinder::bind(Ty::new_foreign(interner, t.into()).store()) + } else { + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + t.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + let res = StoredEarlyBinder::bind( + type_alias_data + .ty + .map(|type_ref| ctx.lower_ty(type_ref)) + .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)) + .store(), + ); + diags = create_diagnostics(ctx.diagnostics); + res + }; + (inner, diags) + } + + pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _: salsa::Id, + _adt: TypeAliasId, + ) -> (StoredEarlyBinder, Diagnostics) { + ( + StoredEarlyBinder::bind( + Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed).store(), + ), + None, + ) + } } pub(crate) fn impl_self_ty_query<'db>( @@ -1385,30 +1443,45 @@ pub(crate) fn impl_self_ty_query<'db>( db.impl_self_ty_with_diagnostics(impl_id).0 } -pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( +pub(crate) fn impl_self_ty_with_diagnostics<'db>( db: &'db dyn HirDatabase, impl_id: ImplId, ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - let resolver = impl_id.resolver(db); + let (ty, diags) = impl_self_ty_with_diagnostics_query(db, impl_id); + return (ty.get(), diags.clone()); - let impl_data = db.impl_signature(impl_id); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &impl_data.store, - impl_id.into(), - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, - ); - let ty = ctx.lower_ty(impl_data.self_ty); - assert!(!ty.has_escaping_bound_vars()); - (EarlyBinder::bind(ty), create_diagnostics(ctx.diagnostics)) -} + #[salsa::tracked(returns(ref), cycle_result = impl_self_ty_with_diagnostics_cycle_result)] + pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, + ) -> (StoredEarlyBinder, Diagnostics) { + let resolver = impl_id.resolver(db); -pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( - db: &dyn HirDatabase, - _impl_id: ImplId, -) -> (EarlyBinder<'_, Ty<'_>>, Diagnostics) { - (EarlyBinder::bind(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed)), None) + let impl_data = db.impl_signature(impl_id); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let ty = ctx.lower_ty(impl_data.self_ty); + assert!(!ty.has_escaping_bound_vars()); + (StoredEarlyBinder::bind(ty.store()), create_diagnostics(ctx.diagnostics)) + } + + pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _: salsa::Id, + _impl_id: ImplId, + ) -> (StoredEarlyBinder, Diagnostics) { + ( + StoredEarlyBinder::bind( + Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed).store(), + ), + None, + ) + } } pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> { @@ -1416,56 +1489,69 @@ pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstPara } // returns None if def is a type arg -pub(crate) fn const_param_ty_with_diagnostics_query<'db>( +pub(crate) fn const_param_ty_with_diagnostics<'db>( db: &'db dyn HirDatabase, def: ConstParamId, ) -> (Ty<'db>, Diagnostics) { - let (parent_data, store) = db.generic_params_and_store(def.parent()); - let data = &parent_data[def.local_id()]; - let resolver = def.parent().resolver(db); - let interner = DbInterner::new_no_crate(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &store, - def.parent(), - LifetimeElisionKind::AnonymousReportError, - ); - let ty = match data { - TypeOrConstParamData::TypeParamData(_) => { - never!(); - Ty::new_error(interner, ErrorGuaranteed) - } - TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), - }; - (ty, create_diagnostics(ctx.diagnostics)) -} + let (ty, diags) = const_param_ty_with_diagnostics_query(db, (), def); + return (ty.as_ref(), diags.clone()); -pub(crate) fn const_param_ty_with_diagnostics_cycle_result<'db>( - db: &'db dyn HirDatabase, - _: crate::db::HirDatabaseData, - _def: ConstParamId, -) -> (Ty<'db>, Diagnostics) { - let interner = DbInterner::new_no_crate(db); - (Ty::new_error(interner, ErrorGuaranteed), None) + // FIXME: Make this query non-interned. + #[salsa::tracked(returns(ref), cycle_result = const_param_ty_with_diagnostics_cycle_result)] + pub(crate) fn const_param_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + _: (), + def: ConstParamId, + ) -> (StoredTy, Diagnostics) { + let (parent_data, store) = db.generic_params_and_store(def.parent()); + let data = &parent_data[def.local_id()]; + let resolver = def.parent().resolver(db); + let interner = DbInterner::new_no_crate(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &store, + def.parent(), + LifetimeElisionKind::AnonymousReportError, + ); + let ty = match data { + TypeOrConstParamData::TypeParamData(_) => { + never!(); + Ty::new_error(interner, ErrorGuaranteed) + } + TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), + }; + (ty.store(), create_diagnostics(ctx.diagnostics)) + } + + pub(crate) fn const_param_ty_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _: salsa::Id, + _: (), + _def: ConstParamId, + ) -> (StoredTy, Diagnostics) { + let interner = DbInterner::new_no_crate(db); + (Ty::new_error(interner, ErrorGuaranteed).store(), None) + } } -pub(crate) fn field_types_query<'db>( - db: &'db dyn HirDatabase, +pub(crate) fn field_types_query( + db: &dyn HirDatabase, variant_id: VariantId, -) -> Arc>>> { - db.field_types_with_diagnostics(variant_id).0 +) -> &ArenaMap> { + &db.field_types_with_diagnostics(variant_id).0 } /// Build the type of all specific fields of a struct or enum variant. +#[salsa::tracked(returns(ref))] pub(crate) fn field_types_with_diagnostics_query<'db>( db: &'db dyn HirDatabase, variant_id: VariantId, -) -> (Arc>>>, Diagnostics) { +) -> (ArenaMap>, Diagnostics) { let var_data = variant_id.fields(db); let fields = var_data.fields(); if fields.is_empty() { - return (Arc::new(ArenaMap::default()), None); + return (ArenaMap::default(), None); } let (resolver, def): (_, GenericDefId) = match variant_id { @@ -1482,9 +1568,9 @@ pub(crate) fn field_types_with_diagnostics_query<'db>( LifetimeElisionKind::AnonymousReportError, ); for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, EarlyBinder::bind(ctx.lower_ty(field_data.type_ref))); + res.insert(field_id, StoredEarlyBinder::bind(ctx.lower_ty(field_data.type_ref).store())); } - (Arc::new(res), create_diagnostics(ctx.diagnostics)) + (res, create_diagnostics(ctx.diagnostics)) } /// This query exists only to be used when resolving short-hand associated types @@ -1496,13 +1582,13 @@ pub(crate) fn field_types_with_diagnostics_query<'db>( /// following bounds are disallowed: `T: Foo, U: Foo`, but /// these are fine: `T: Foo, U: Foo<()>`. #[tracing::instrument(skip(db), ret)] -#[salsa::tracked(returns(ref), unsafe(non_update_return_type), cycle_result = generic_predicates_for_param_cycle_result)] +#[salsa::tracked(returns(ref), cycle_result = generic_predicates_for_param_cycle_result)] pub(crate) fn generic_predicates_for_param<'db>( db: &'db dyn HirDatabase, def: GenericDefId, param_id: TypeOrConstParamId, assoc_name: Option, -) -> EarlyBinder<'db, Box<[Clause<'db>]>> { +) -> StoredEarlyBinder { let generics = generics(db, def); let interner = DbInterner::new_no_crate(db); let resolver = def.resolver(db); @@ -1604,16 +1690,17 @@ pub(crate) fn generic_predicates_for_param<'db>( predicates.extend(implicitly_sized_predicates); }; } - EarlyBinder::bind(predicates.into_boxed_slice()) + StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()) } -pub(crate) fn generic_predicates_for_param_cycle_result<'db>( - _db: &'db dyn HirDatabase, +pub(crate) fn generic_predicates_for_param_cycle_result( + db: &dyn HirDatabase, + _: salsa::Id, _def: GenericDefId, _param_id: TypeOrConstParamId, _assoc_name: Option, -) -> EarlyBinder<'db, Box<[Clause<'db>]>> { - EarlyBinder::bind(Box::new([])) +) -> StoredEarlyBinder { + StoredEarlyBinder::bind(Clauses::empty(DbInterner::new_no_crate(db)).store()) } #[inline] @@ -1621,84 +1708,95 @@ pub(crate) fn type_alias_bounds<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { - type_alias_bounds_with_diagnostics(db, type_alias).0.as_ref().map_bound(|it| &**it) + type_alias_bounds_with_diagnostics(db, type_alias).0.map_bound(|it| it.as_slice()) } -#[salsa::tracked(returns(ref), unsafe(non_update_return_type))] -pub fn type_alias_bounds_with_diagnostics<'db>( +pub(crate) fn type_alias_bounds_with_diagnostics<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, -) -> (EarlyBinder<'db, Box<[Clause<'db>]>>, Diagnostics) { - let type_alias_data = db.type_alias_signature(type_alias); - let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - type_alias.into(), - LifetimeElisionKind::AnonymousReportError, - ); - let interner = ctx.interner; - let def_id = type_alias.into(); +) -> (EarlyBinder<'db, Clauses<'db>>, Diagnostics) { + let (bounds, diags) = type_alias_bounds_with_diagnostics_query(db, type_alias); + return (bounds.get(), diags.clone()); - let item_args = GenericArgs::identity_for_item(interner, def_id); - let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); + #[salsa::tracked(returns(ref))] + pub fn type_alias_bounds_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + type_alias: TypeAliasId, + ) -> (StoredEarlyBinder, Diagnostics) { + let type_alias_data = db.type_alias_signature(type_alias); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + type_alias.into(), + LifetimeElisionKind::AnonymousReportError, + ); + let interner = ctx.interner; + let def_id = type_alias.into(); - let mut bounds = Vec::new(); - for bound in &type_alias_data.bounds { - ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { - bounds.push(pred); - }); - } + let item_args = GenericArgs::identity_for_item(interner, def_id); + let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args); - if !ctx.unsized_types.contains(&interner_ty) { - let sized_trait = ctx.lang_items.Sized; - if let Some(sized_trait) = sized_trait { - let trait_ref = TraitRef::new_from_args( - interner, - sized_trait.into(), - GenericArgs::new_from_iter(interner, [interner_ty.into()]), - ); - bounds.push(trait_ref.upcast(interner)); - }; - } + let mut bounds = Vec::new(); + for bound in &type_alias_data.bounds { + ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| { + bounds.push(pred); + }); + } - (EarlyBinder::bind(bounds.into_boxed_slice()), create_diagnostics(ctx.diagnostics)) + if !ctx.unsized_types.contains(&interner_ty) { + let sized_trait = ctx.lang_items.Sized; + if let Some(sized_trait) = sized_trait { + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_slice(&[interner_ty.into()]), + ); + bounds.push(trait_ref.upcast(interner)); + }; + } + + ( + StoredEarlyBinder::bind(Clauses::new_from_slice(&bounds).store()), + create_diagnostics(ctx.diagnostics), + ) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericPredicates<'db> { +pub struct GenericPredicates { // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the // parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child, // then the implicit trait predicate for the child, if `is_trait` is `true`. - predicates: EarlyBinder<'db, Box<[Clause<'db>]>>, + predicates: StoredEarlyBinder, own_predicates_start: u32, is_trait: bool, parent_is_trait: bool, } #[salsa::tracked] -impl<'db> GenericPredicates<'db> { +impl<'db> GenericPredicates { /// Resolve the where clause(s) of an item with generics. /// /// Diagnostics are computed only for this item's predicates, not for parents. - #[salsa::tracked(returns(ref), unsafe(non_update_return_type))] + #[salsa::tracked(returns(ref))] pub fn query_with_diagnostics( db: &'db dyn HirDatabase, def: GenericDefId, - ) -> (GenericPredicates<'db>, Diagnostics) { + ) -> (GenericPredicates, Diagnostics) { generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true) } } -impl<'db> GenericPredicates<'db> { +impl GenericPredicates { #[inline] - pub fn query(db: &'db dyn HirDatabase, def: GenericDefId) -> &'db GenericPredicates<'db> { + pub fn query(db: &dyn HirDatabase, def: GenericDefId) -> &GenericPredicates { &Self::query_with_diagnostics(db, def).0 } #[inline] - pub fn query_all( + pub fn query_all<'db>( db: &'db dyn HirDatabase, def: GenericDefId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { @@ -1706,7 +1804,7 @@ impl<'db> GenericPredicates<'db> { } #[inline] - pub fn query_own( + pub fn query_own<'db>( db: &'db dyn HirDatabase, def: GenericDefId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { @@ -1714,7 +1812,7 @@ impl<'db> GenericPredicates<'db> { } #[inline] - pub fn query_explicit( + pub fn query_explicit<'db>( db: &'db dyn HirDatabase, def: GenericDefId, ) -> EarlyBinder<'db, &'db [Clause<'db>]> { @@ -1722,20 +1820,20 @@ impl<'db> GenericPredicates<'db> { } #[inline] - pub fn all_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { - self.predicates.as_ref().map_bound(|it| &**it) + pub fn all_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { + self.predicates.get().map_bound(|it| it.as_slice()) } #[inline] - pub fn own_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { - self.predicates.as_ref().map_bound(|it| &it[self.own_predicates_start as usize..]) + pub fn own_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { + self.predicates.get().map_bound(|it| &it.as_slice()[self.own_predicates_start as usize..]) } /// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait. #[inline] - pub fn explicit_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> { - self.predicates.as_ref().map_bound(|it| { - &it[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)] + pub fn explicit_predicates(&self) -> EarlyBinder<'_, &[Clause<'_>]> { + self.predicates.get().map_bound(|it| { + &it.as_slice()[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)] }) } } @@ -1750,18 +1848,24 @@ pub(crate) fn trait_environment_for_body_query( db.trait_environment(def) } -pub(crate) fn trait_environment_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> ParamEnv<'db> { - let module = def.module(db); - let interner = DbInterner::new_with(db, module.krate(db)); - let predicates = GenericPredicates::query_all(db, def); - let clauses = rustc_type_ir::elaborate::elaborate(interner, predicates.iter_identity_copied()); - let clauses = Clauses::new_from_iter(interner, clauses); +pub(crate) fn trait_environment<'db>(db: &'db dyn HirDatabase, def: GenericDefId) -> ParamEnv<'db> { + return ParamEnv { clauses: trait_environment_query(db, def).as_ref() }; + + #[salsa::tracked(returns(ref))] + pub(crate) fn trait_environment_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, + ) -> StoredClauses { + let module = def.module(db); + let interner = DbInterner::new_with(db, module.krate(db)); + let predicates = GenericPredicates::query_all(db, def); + let clauses = + rustc_type_ir::elaborate::elaborate(interner, predicates.iter_identity_copied()); + let clauses = Clauses::new_from_iter(interner, clauses); - // FIXME: We should normalize projections here, like rustc does. - ParamEnv { clauses } + // FIXME: We should normalize projections here, like rustc does. + clauses.store() + } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -1773,12 +1877,12 @@ pub(crate) enum PredicateFilter { /// Resolve the where clause(s) of an item with generics, /// with a given filter #[tracing::instrument(skip(db, filter), ret)] -pub(crate) fn generic_predicates_filtered_by<'db, F>( - db: &'db dyn HirDatabase, +pub(crate) fn generic_predicates_filtered_by( + db: &dyn HirDatabase, def: GenericDefId, predicate_filter: PredicateFilter, filter: F, -) -> (GenericPredicates<'db>, Diagnostics) +) -> (GenericPredicates, Diagnostics) where F: Fn(GenericDefId) -> bool, { @@ -1852,7 +1956,7 @@ where let trait_ref = TraitRef::new_from_args( interner, sized_trait.into(), - GenericArgs::new_from_iter(interner, [param_ty.into()]), + GenericArgs::new_from_slice(&[param_ty.into()]), ); let clause = Clause(Predicate::new( interner, @@ -1895,7 +1999,7 @@ where own_predicates_start, is_trait, parent_is_trait, - predicates: EarlyBinder::bind(predicates.into_boxed_slice()), + predicates: StoredEarlyBinder::bind(Clauses::new_from_slice(&predicates).store()), }; return (predicates, diagnostics); @@ -1984,7 +2088,7 @@ fn implicitly_sized_clauses<'a, 'subst, 'db>( let trait_ref = TraitRef::new_from_args( interner, sized_trait.into(), - GenericArgs::new_from_iter(interner, [self_ty.into()]), + GenericArgs::new_from_slice(&[self_ty.into()]), ); Clause(Predicate::new( interner, @@ -2000,19 +2104,16 @@ fn implicitly_sized_clauses<'a, 'subst, 'db>( } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericDefaults<'db>(Option>>]>>); +pub struct GenericDefaults(Option>]>>); -impl<'db> GenericDefaults<'db> { +impl GenericDefaults { #[inline] - pub fn get(&self, idx: usize) -> Option>> { - self.0.as_ref()?[idx] + pub fn get<'db>(&self, idx: usize) -> Option>> { + Some(self.0.as_ref()?[idx].as_ref()?.get_with(|it| it.as_ref())) } } -pub(crate) fn generic_defaults_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> GenericDefaults<'_> { +pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> GenericDefaults { db.generic_defaults_with_diagnostics(def).0 } @@ -2022,7 +2123,7 @@ pub(crate) fn generic_defaults_query( pub(crate) fn generic_defaults_with_diagnostics_query( db: &dyn HirDatabase, def: GenericDefId, -) -> (GenericDefaults<'_>, Diagnostics) { +) -> (GenericDefaults, Diagnostics) { let generic_params = generics(db, def); if generic_params.is_empty() { return (GenericDefaults(None), None); @@ -2068,20 +2169,23 @@ pub(crate) fn generic_defaults_with_diagnostics_query( ctx: &mut TyLoweringContext<'db, '_>, idx: usize, p: GenericParamDataRef<'_>, - ) -> (Option>>, bool) { + ) -> (Option>, bool) { ctx.lowering_param_default(idx as u32); match p { GenericParamDataRef::TypeParamData(p) => { let ty = p.default.map(|ty| ctx.lower_ty(ty)); - (ty.map(|ty| EarlyBinder::bind(ty.into())), p.default.is_some()) + ( + ty.map(|ty| StoredEarlyBinder::bind(GenericArg::from(ty).store())), + p.default.is_some(), + ) } GenericParamDataRef::ConstParamData(p) => { let val = p.default.map(|c| { let param_ty = ctx.lower_ty(p.ty); let c = ctx.lower_const(c, param_ty); - c.into() + GenericArg::from(c).store() }); - (val.map(EarlyBinder::bind), p.default.is_some()) + (val.map(StoredEarlyBinder::bind), p.default.is_some()) } GenericParamDataRef::LifetimeParamData(_) => (None, false), } @@ -2090,27 +2194,33 @@ pub(crate) fn generic_defaults_with_diagnostics_query( pub(crate) fn generic_defaults_with_diagnostics_cycle_result( _db: &dyn HirDatabase, + _: salsa::Id, _def: GenericDefId, -) -> (GenericDefaults<'_>, Diagnostics) { +) -> (GenericDefaults, Diagnostics) { (GenericDefaults(None), None) } /// Build the signature of a callable item (function, struct or enum variant). -pub(crate) fn callable_item_signature_query<'db>( +pub(crate) fn callable_item_signature<'db>( db: &'db dyn HirDatabase, def: CallableDefId, ) -> EarlyBinder<'db, PolyFnSig<'db>> { - match def { - CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), - CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), - CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), + return callable_item_signature_query(db, def).get_with(|sig| sig.get()); + + #[salsa::tracked(returns(ref))] + pub(crate) fn callable_item_signature_query<'db>( + db: &'db dyn HirDatabase, + def: CallableDefId, + ) -> StoredEarlyBinder { + match def { + CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), + CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), + CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), + } } } -fn fn_sig_for_fn<'db>( - db: &'db dyn HirDatabase, - def: FunctionId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { +fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> StoredEarlyBinder { let data = db.function_signature(def); let resolver = def.resolver(db); let interner = DbInterner::new_no_crate(db); @@ -2140,56 +2250,56 @@ fn fn_sig_for_fn<'db>( let inputs_and_output = Tys::new_from_iter(interner, params.chain(Some(ret))); // If/when we track late bound vars, we need to switch this to not be `dummy` - EarlyBinder::bind(rustc_type_ir::Binder::dummy(FnSig { + StoredEarlyBinder::bind(StoredPolyFnSig::new(Binder::dummy(FnSig { abi: data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), c_variadic: data.is_varargs(), safety: if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, inputs_and_output, - })) + }))) } -fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> { +fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> StoredEarlyBinder { let interner = DbInterner::new_no_crate(db); let args = GenericArgs::identity_for_item(interner, adt.into()); let ty = Ty::new_adt(interner, adt, args); - EarlyBinder::bind(ty) + StoredEarlyBinder::bind(ty.store()) } -fn fn_sig_for_struct_constructor<'db>( - db: &'db dyn HirDatabase, +fn fn_sig_for_struct_constructor( + db: &dyn HirDatabase, def: StructId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { +) -> StoredEarlyBinder { let field_tys = db.field_types(def.into()); - let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let params = field_tys.iter().map(|(_, ty)| ty.get().skip_binder()); let ret = type_for_adt(db, def.into()).skip_binder(); let inputs_and_output = - Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret))); - EarlyBinder::bind(Binder::dummy(FnSig { + Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret.as_ref()))); + StoredEarlyBinder::bind(StoredPolyFnSig::new(Binder::dummy(FnSig { abi: FnAbi::RustCall, c_variadic: false, safety: Safety::Safe, inputs_and_output, - })) + }))) } -fn fn_sig_for_enum_variant_constructor<'db>( - db: &'db dyn HirDatabase, +fn fn_sig_for_enum_variant_constructor( + db: &dyn HirDatabase, def: EnumVariantId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { +) -> StoredEarlyBinder { let field_tys = db.field_types(def.into()); - let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let params = field_tys.iter().map(|(_, ty)| ty.get().skip_binder()); let parent = def.lookup(db).parent; let ret = type_for_adt(db, parent.into()).skip_binder(); let inputs_and_output = - Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret))); - EarlyBinder::bind(Binder::dummy(FnSig { + Tys::new_from_iter(DbInterner::new_no_crate(db), params.chain(Some(ret.as_ref()))); + StoredEarlyBinder::bind(StoredPolyFnSig::new(Binder::dummy(FnSig { abi: FnAbi::RustCall, c_variadic: false, safety: Safety::Safe, inputs_and_output, - })) + }))) } // FIXME(next-solver): should merge this with `explicit_item_bounds` in some way @@ -2226,10 +2336,7 @@ pub(crate) fn associated_ty_item_bounds<'db>( Some(ExistentialPredicate::Trait(ExistentialTraitRef::new_from_args( interner, t.def_id(), - GenericArgs::new_from_iter( - interner, - t.trait_ref.args.iter().skip(1), - ), + GenericArgs::new_from_slice(&t.trait_ref.args[1..]), ))) } } @@ -2237,10 +2344,7 @@ pub(crate) fn associated_ty_item_bounds<'db>( ExistentialPredicate::Projection(ExistentialProjection::new_from_args( interner, p.def_id(), - GenericArgs::new_from_iter( - interner, - p.projection_term.args.iter().skip(1), - ), + GenericArgs::new_from_slice(&p.projection_term.args[1..]), p.term, )), ), @@ -2270,7 +2374,7 @@ pub(crate) fn associated_ty_item_bounds<'db>( bounds.push(sized_clause); } - EarlyBinder::bind(BoundExistentialPredicates::new_from_iter(interner, bounds)) + EarlyBinder::bind(BoundExistentialPredicates::new_from_slice(&bounds)) } pub(crate) fn associated_type_by_name_including_super_traits<'db>( @@ -2336,7 +2440,7 @@ fn named_associated_type_shorthand_candidates<'db, R>( if let Some(alias) = check_trait(trait_ref) { return Some(alias); } - for pred in generic_predicates_filtered_by( + let predicates = generic_predicates_filtered_by( db, GenericDefId::TraitId(trait_ref.def_id.0), PredicateFilter::SelfTrait, @@ -2347,9 +2451,8 @@ fn named_associated_type_shorthand_candidates<'db, R>( |pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0), ) .0 - .predicates - .instantiate_identity() - { + .predicates; + for pred in predicates.get().instantiate_identity() { tracing::debug!(?pred); let sup_trait_ref = match pred.kind().skip_binder() { rustc_type_ir::ClauseKind::Trait(pred) => pred.trait_ref, @@ -2396,8 +2499,8 @@ fn named_associated_type_shorthand_candidates<'db, R>( let predicates = generic_predicates_for_param(db, def, param_id.into(), assoc_name.clone()); predicates - .as_ref() - .iter_identity_copied() + .get() + .iter_identity() .find_map(|pred| match pred.kind().skip_binder() { rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate), _ => None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index fe96b6832e08d..a79f547c2a44f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -20,7 +20,7 @@ use hir_def::{ use hir_expand::name::Name; use rustc_type_ir::{ AliasTerm, AliasTy, AliasTyKind, - inherent::{GenericArgs as _, Region as _, SliceLike, Ty as _}, + inherent::{GenericArgs as _, Region as _, Ty as _}, }; use smallvec::SmallVec; use stdx::never; @@ -45,17 +45,15 @@ use super::{ const_param_ty_query, ty_query, }; -type CallbackData<'a, 'db> = Either< - PathDiagnosticCallbackData, - crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>, ->; +type CallbackData<'a> = + Either>; // We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box` // because of the allocation, so we create a lifetime-less callback, tailored for our needs. pub(crate) struct PathDiagnosticCallback<'a, 'db> { - pub(crate) data: CallbackData<'a, 'db>, + pub(crate) data: CallbackData<'a>, pub(crate) callback: - fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic), + fn(&CallbackData<'_>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic), } pub(crate) struct PathLoweringContext<'a, 'b, 'db> { @@ -508,7 +506,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { Some(Ty::new_alias( interner, AliasTyKind::Projection, - AliasTy::new(interner, associated_ty.into(), substs), + AliasTy::new_from_args(interner, associated_ty.into(), substs), )) }; named_associated_type_shorthand_candidates( @@ -555,7 +553,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { ValueTyDefId::UnionId(it) => it.into(), ValueTyDefId::ConstId(it) => it.into(), ValueTyDefId::StaticId(_) => { - return GenericArgs::new_from_iter(interner, []); + return GenericArgs::empty(interner); } ValueTyDefId::EnumVariantId(var) => { // the generic args for an enum variant may be either specified @@ -1285,7 +1283,7 @@ pub(crate) fn substs_from_args_and_bindings<'db>( } } - GenericArgs::new_from_iter(interner, substs) + GenericArgs::new_from_slice(&substs) } fn type_looks_like_const( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index 868ae00329b3b..c370330a87172 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -26,7 +26,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ TypeVisitableExt, fast_reject::{TreatParams, simplify_type}, - inherent::{BoundExistentialPredicates, IntoKind, SliceLike}, + inherent::{BoundExistentialPredicates, IntoKind}, }; use stdx::impl_from; use triomphe::Arc; @@ -362,7 +362,7 @@ pub fn lookup_impl_const<'db>( ItemContainerId::TraitId(id) => id, _ => return (const_id, subs), }; - let trait_ref = TraitRef::new(interner, trait_id.into(), subs); + let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), subs); let const_signature = db.const_signature(const_id); let name = match const_signature.name.as_ref() { @@ -392,10 +392,10 @@ pub fn is_dyn_method<'db>( }; let trait_params = db.generic_params(trait_id.into()).len(); let fn_params = fn_subst.len() - trait_params; - let trait_ref = TraitRef::new( + let trait_ref = TraitRef::new_from_args( interner, trait_id.into(), - GenericArgs::new_from_iter(interner, fn_subst.iter().take(trait_params)), + GenericArgs::new_from_slice(&fn_subst[..trait_params]), ); let self_ty = trait_ref.self_ty(); if let TyKind::Dynamic(d, _) = self_ty.kind() { @@ -427,10 +427,10 @@ pub(crate) fn lookup_impl_method_query<'db>( return (func, fn_subst); }; let trait_params = db.generic_params(trait_id.into()).len(); - let trait_ref = TraitRef::new( + let trait_ref = TraitRef::new_from_args( interner, trait_id.into(), - GenericArgs::new_from_iter(interner, fn_subst.iter().take(trait_params)), + GenericArgs::new_from_slice(&fn_subst[..trait_params]), ); let name = &db.function_signature(func).name; @@ -505,13 +505,19 @@ pub(crate) fn find_matching_impl<'db>( } #[salsa::tracked(returns(ref))] -fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Crate]> { +fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase, krate: Crate) -> Box<[Crate]> { + let _p = tracing::info_span!("crates_containing_incoherent_inherent_impls").entered(); // We assume that only sysroot crates contain `#[rustc_has_incoherent_inherent_impls]` // impls, since this is an internal feature and only std uses it. - db.all_crates().iter().copied().filter(|krate| krate.data(db).origin.is_lang()).collect() + krate.transitive_deps(db).into_iter().filter(|krate| krate.data(db).origin.is_lang()).collect() } -pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] { +pub fn with_incoherent_inherent_impls( + db: &dyn HirDatabase, + krate: Crate, + self_ty: &SimplifiedType, + mut callback: impl FnMut(&[ImplId]), +) { let has_incoherent_impls = match self_ty.def() { Some(def_id) => match def_id.try_into() { Ok(def_id) => AttrFlags::query(db, def_id) @@ -520,26 +526,14 @@ pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) }, _ => true, }; - return if !has_incoherent_impls { - &[] - } else { - incoherent_inherent_impls_query(db, (), self_ty) - }; - - #[salsa::tracked(returns(ref))] - fn incoherent_inherent_impls_query( - db: &dyn HirDatabase, - _force_query_input_to_be_interned: (), - self_ty: SimplifiedType, - ) -> Box<[ImplId]> { - let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered(); - - let mut result = Vec::new(); - for &krate in crates_containing_incoherent_inherent_impls(db) { - let impls = InherentImpls::for_crate(db, krate); - result.extend_from_slice(impls.for_self_ty(&self_ty)); - } - result.into_boxed_slice() + if !has_incoherent_impls { + return; + } + let _p = tracing::info_span!("incoherent_inherent_impls").entered(); + let crates = crates_containing_incoherent_inherent_impls(db, krate); + for &krate in crates { + let impls = InherentImpls::for_crate(db, krate); + callback(impls.for_self_ty(self_ty)); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs index 6d6515a457825..0024ca16a5942 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/confirm.rs @@ -9,7 +9,7 @@ use hir_def::{ use rustc_type_ir::{ TypeFoldable, elaborate::elaborate, - inherent::{BoundExistentialPredicates, IntoKind, SliceLike, Ty as _}, + inherent::{BoundExistentialPredicates, IntoKind, Ty as _}, }; use tracing::debug; @@ -45,7 +45,7 @@ struct ConfirmContext<'a, 'b, 'db> { pub(crate) struct ConfirmResult<'db> { pub(crate) callee: MethodCallee<'db>, pub(crate) illegal_sized_bound: bool, - pub(crate) adjustments: Box<[Adjustment<'db>]>, + pub(crate) adjustments: Box<[Adjustment]>, } impl<'a, 'db> InferenceContext<'a, 'db> { @@ -145,7 +145,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { // traits, no trait system method can be called before this point because they // could alter our Self-type, except for normalizing the receiver from the // signature (which is also done during probing). - let method_sig_rcvr = method_sig.inputs().as_slice()[0]; + let method_sig_rcvr = method_sig.inputs()[0]; debug!( "confirm: self_ty={:?} method_sig_rcvr={:?} method_sig={:?}", self_ty, method_sig_rcvr, method_sig @@ -177,7 +177,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { &mut self, unadjusted_self_ty: Ty<'db>, pick: &probe::Pick<'db>, - ) -> (Ty<'db>, Box<[Adjustment<'db>]>) { + ) -> (Ty<'db>, Box<[Adjustment]>) { // Commit the autoderefs by calling `autoderef` again, but this // time writing the results into the various typeck results. let mut autoderef = self.ctx.table.autoderef_with_tracking(unadjusted_self_ty); @@ -200,8 +200,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { // for two-phase borrows. let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes); - adjustments - .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target }); + adjustments.push(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), + target: target.store(), + }); if unsize { let unsized_ty = if let TyKind::Array(elem_ty, _) = base_ty.kind() { @@ -213,8 +215,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { ) }; target = Ty::new_ref(self.interner(), region, unsized_ty, mutbl.into()); - adjustments - .push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target }); + adjustments.push(Adjustment { + kind: Adjust::Pointer(PointerCast::Unsize), + target: target.store(), + }); } } Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => { @@ -228,7 +232,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { adjustments.push(Adjustment { kind: Adjust::Pointer(PointerCast::MutToConstPointer), - target, + target: target.store(), }); } None => {} @@ -482,7 +486,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> { if self.ctx.unstable_features.arbitrary_self_types { self.ctx.result.type_mismatches.get_or_insert_default().insert( self.expr.into(), - TypeMismatch { expected: method_self_ty, actual: self_ty }, + TypeMismatch { expected: method_self_ty.store(), actual: self_ty.store() }, ); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs index 6af47ab68bfc5..cb9b810686e34 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution/probe.rs @@ -14,7 +14,7 @@ use rustc_type_ir::{ InferTy, TypeVisitableExt, Upcast, Variance, elaborate::{self, supertrait_def_ids}, fast_reject::{DeepRejectCtxt, TreatParams, simplify_type}, - inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _}, + inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, Ty as _}, }; use smallvec::{SmallVec, smallvec}; use tracing::{debug, instrument}; @@ -27,7 +27,7 @@ use crate::{ lower::GenericPredicates, method_resolution::{ CandidateId, CandidateSource, InherentImpls, MethodError, MethodResolutionContext, - incoherent_inherent_impls, simplified_type_module, + simplified_type_module, with_incoherent_inherent_impls, }, next_solver::{ Binder, Canonical, ClauseKind, DbInterner, FnSig, GenericArg, GenericArgs, Goal, ParamEnv, @@ -965,9 +965,11 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> { else { panic!("unexpected incoherent type: {:?}", self_ty) }; - for &impl_def_id in incoherent_inherent_impls(self.db(), simp) { - self.assemble_inherent_impl_probe(impl_def_id, receiver_steps); - } + with_incoherent_inherent_impls(self.db(), self.ctx.resolver.krate(), &simp, |impls| { + for &impl_def_id in impls { + self.assemble_inherent_impl_probe(impl_def_id, receiver_steps); + } + }); } fn assemble_inherent_impl_candidates_for_type( @@ -1975,7 +1977,7 @@ impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> { && self.mode == Mode::MethodCall { let sig = self.xform_method_sig(item, args); - (sig.inputs().as_slice()[0], Some(sig.output())) + (sig.inputs()[0], Some(sig.output())) } else { (impl_ty, None) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 836c20a433485..6642386011089 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -12,7 +12,7 @@ use hir_def::{ use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use rustc_ast_ir::Mutability; use rustc_hash::FxHashMap; -use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, Ty as _}; use smallvec::{SmallVec, smallvec}; use stdx::{impl_from, never}; @@ -23,7 +23,8 @@ use crate::{ display::{DisplayTarget, HirDisplay}, infer::PointerCast, next_solver::{ - Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind, + Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, StoredConst, StoredGenericArgs, + StoredTy, Ty, TyKind, infer::{InferCtxt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -49,16 +50,16 @@ pub(crate) use monomorphization::monomorphized_mir_body_cycle_result; use super::consteval::try_const_usize; -pub type BasicBlockId<'db> = Idx>; -pub type LocalId<'db> = Idx>; +pub type BasicBlockId = Idx; +pub type LocalId = Idx; -fn return_slot<'db>() -> LocalId<'db> { +fn return_slot() -> LocalId { LocalId::from_raw(RawIdx::from(0)) } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct Local<'db> { - pub ty: Ty<'db>, +pub struct Local { + pub ty: StoredTy, } /// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of @@ -80,19 +81,19 @@ pub struct Local<'db> { /// currently implements it, but it seems like this may be something to check against in the /// validator. #[derive(Debug, PartialEq, Eq, Clone)] -pub struct Operand<'db> { - kind: OperandKind<'db>, +pub struct Operand { + kind: OperandKind, // FIXME : This should actually just be of type `MirSpan`. span: Option, } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum OperandKind<'db> { +pub enum OperandKind { /// Creates a value by loading the given place. /// /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there /// is no such requirement. - Copy(Place<'db>), + Copy(Place), /// Creates a value by performing loading the place, just like the `Copy` operand. /// @@ -101,21 +102,21 @@ pub enum OperandKind<'db> { /// place without first re-initializing it. /// /// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188 - Move(Place<'db>), + Move(Place), /// Constants are already semantically values, and remain unchanged. - Constant { konst: Const<'db>, ty: Ty<'db> }, + Constant { konst: StoredConst, ty: StoredTy }, /// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc /// handles it with the `Constant` variant somehow. Static(StaticId), } -impl<'db> Operand<'db> { +impl<'db> Operand { fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'db>, ty: Ty<'db>) -> Self { let interner = DbInterner::conjure(); Operand { kind: OperandKind::Constant { - konst: Const::new_valtree(interner, ty, data, memory_map), - ty, + konst: Const::new_valtree(interner, ty, data, memory_map).store(), + ty: ty.store(), }, span: None, } @@ -125,7 +126,7 @@ impl<'db> Operand<'db> { Operand::from_concrete_const(data, MemoryMap::default(), ty) } - fn const_zst(ty: Ty<'db>) -> Operand<'db> { + fn const_zst(ty: Ty<'db>) -> Operand { Self::from_bytes(Box::default(), ty) } @@ -133,28 +134,28 @@ impl<'db> Operand<'db> { db: &'db dyn HirDatabase, func_id: hir_def::FunctionId, generic_args: GenericArgs<'db>, - ) -> Operand<'db> { + ) -> Operand { let interner = DbInterner::new_no_crate(db); let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args); Operand::from_bytes(Box::default(), ty) } } -#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] -pub enum ProjectionElem<'db, V: PartialEq> { +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ProjectionElem { Deref, Field(Either), // FIXME: get rid of this, and use FieldId for tuples and closures ClosureField(usize), - Index(#[update(unsafe(with(crate::utils::unsafe_update_eq)))] V), + Index(V), ConstantIndex { offset: u64, from_end: bool }, Subslice { from: u64, to: u64 }, //Downcast(Option, VariantIdx), - OpaqueCast(Ty<'db>), + OpaqueCast(StoredTy), } -impl<'db, V: PartialEq> ProjectionElem<'db, V> { - pub fn projected_ty( +impl ProjectionElem { + pub fn projected_ty<'db>( &self, infcx: &InferCtxt<'db>, env: ParamEnv<'db>, @@ -194,7 +195,7 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> { }, ProjectionElem::Field(Either::Left(f)) => match base.kind() { TyKind::Adt(_, subst) => { - db.field_types(f.parent)[f.local_id].instantiate(interner, subst) + db.field_types(f.parent)[f.local_id].get().instantiate(interner, subst) } ty => { never!("Only adt has field, found {:?}", ty); @@ -253,18 +254,18 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> { } } -type PlaceElem<'db> = ProjectionElem<'db, LocalId<'db>>; +type PlaceElem = ProjectionElem; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ProjectionId(u32); #[derive(Debug, Clone, PartialEq, Eq)] -pub struct ProjectionStore<'db> { - id_to_proj: FxHashMap]>>, - proj_to_id: FxHashMap]>, ProjectionId>, +pub struct ProjectionStore { + id_to_proj: FxHashMap>, + proj_to_id: FxHashMap, ProjectionId>, } -impl Default for ProjectionStore<'_> { +impl Default for ProjectionStore { fn default() -> Self { let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() }; // Ensure that [] will get the id 0 which is used in `ProjectionId::Empty` @@ -273,17 +274,17 @@ impl Default for ProjectionStore<'_> { } } -impl<'db> ProjectionStore<'db> { +impl ProjectionStore { pub fn shrink_to_fit(&mut self) { self.id_to_proj.shrink_to_fit(); self.proj_to_id.shrink_to_fit(); } - pub fn intern_if_exist(&self, projection: &[PlaceElem<'db>]) -> Option { + pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option { self.proj_to_id.get(projection).copied() } - pub fn intern(&mut self, projection: Box<[PlaceElem<'db>]>) -> ProjectionId { + pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId { let new_id = ProjectionId(self.proj_to_id.len() as u32); match self.proj_to_id.entry(projection) { Entry::Occupied(id) => *id.get(), @@ -304,15 +305,11 @@ impl ProjectionId { self == ProjectionId::EMPTY } - pub fn lookup<'a, 'db>(self, store: &'a ProjectionStore<'db>) -> &'a [PlaceElem<'db>] { + pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] { store.id_to_proj.get(&self).unwrap() } - pub fn project<'db>( - self, - projection: PlaceElem<'db>, - store: &mut ProjectionStore<'db>, - ) -> ProjectionId { + pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId { let mut current = self.lookup(store).to_vec(); current.push(projection); store.intern(current.into()) @@ -320,13 +317,13 @@ impl ProjectionId { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Place<'db> { - pub local: LocalId<'db>, +pub struct Place { + pub local: LocalId, pub projection: ProjectionId, } -impl<'db> Place<'db> { - fn is_parent(&self, child: &Place<'db>, store: &ProjectionStore<'db>) -> bool { +impl Place { + fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool { self.local == child.local && child.projection.lookup(store).starts_with(self.projection.lookup(store)) } @@ -334,39 +331,39 @@ impl<'db> Place<'db> { /// The place itself is not included fn iterate_over_parents<'a>( &'a self, - store: &'a ProjectionStore<'db>, - ) -> impl Iterator> + 'a { + store: &'a ProjectionStore, + ) -> impl Iterator + 'a { let projection = self.projection.lookup(store); (0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| { Some(Place { local: self.local, projection: store.intern_if_exist(x)? }) }) } - fn project(&self, projection: PlaceElem<'db>, store: &mut ProjectionStore<'db>) -> Place<'db> { + fn project(&self, projection: PlaceElem, store: &mut ProjectionStore) -> Place { Place { local: self.local, projection: self.projection.project(projection, store) } } } -impl<'db> From> for Place<'db> { - fn from(local: LocalId<'db>) -> Self { +impl From for Place { + fn from(local: LocalId) -> Self { Self { local, projection: ProjectionId::EMPTY } } } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum AggregateKind<'db> { +pub enum AggregateKind { /// The type is of the element - Array(Ty<'db>), + Array(StoredTy), /// The type is of the tuple - Tuple(Ty<'db>), - Adt(VariantId, GenericArgs<'db>), + Tuple(StoredTy), + Adt(VariantId, StoredGenericArgs), Union(UnionId, FieldId), - Closure(Ty<'db>), + Closure(StoredTy), //Coroutine(LocalDefId, SubstsRef, Movability), } #[derive(Debug, Clone, Hash, PartialEq, Eq)] -pub struct SwitchTargets<'db> { +pub struct SwitchTargets { /// Possible values. The locations to branch to in each case /// are found in the corresponding indices from the `targets` vector. values: SmallVec<[u128; 1]>, @@ -383,17 +380,17 @@ pub struct SwitchTargets<'db> { // // However we’ve decided to keep this as-is until we figure a case // where some other approach seems to be strictly better than other. - targets: SmallVec<[BasicBlockId<'db>; 2]>, + targets: SmallVec<[BasicBlockId; 2]>, } -impl<'db> SwitchTargets<'db> { +impl SwitchTargets { /// Creates switch targets from an iterator of values and target blocks. /// /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to /// `goto otherwise;`. pub fn new( - targets: impl Iterator)>, - otherwise: BasicBlockId<'db>, + targets: impl Iterator, + otherwise: BasicBlockId, ) -> Self { let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip(); targets.push(otherwise); @@ -402,12 +399,12 @@ impl<'db> SwitchTargets<'db> { /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`, /// and to `else_` if not. - pub fn static_if(value: u128, then: BasicBlockId<'db>, else_: BasicBlockId<'db>) -> Self { + pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self { Self { values: smallvec![value], targets: smallvec![then, else_] } } /// Returns the fallback target that is jumped to when none of the values match the operand. - pub fn otherwise(&self) -> BasicBlockId<'db> { + pub fn otherwise(&self) -> BasicBlockId { *self.targets.last().unwrap() } @@ -417,33 +414,33 @@ impl<'db> SwitchTargets<'db> { /// including the `otherwise` fallback target. /// /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory. - pub fn iter(&self) -> impl Iterator)> + '_ { + pub fn iter(&self) -> impl Iterator + '_ { iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y)) } /// Returns a slice with all possible jump targets (including the fallback target). - pub fn all_targets(&self) -> &[BasicBlockId<'db>] { + pub fn all_targets(&self) -> &[BasicBlockId] { &self.targets } /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the /// specific value. This cannot fail, as it'll return the `otherwise` /// branch if there's not a specific match for the value. - pub fn target_for_value(&self, value: u128) -> BasicBlockId<'db> { + pub fn target_for_value(&self, value: u128) -> BasicBlockId { self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise()) } } #[derive(Debug, PartialEq, Eq, Clone)] -pub struct Terminator<'db> { +pub struct Terminator { pub span: MirSpan, - pub kind: TerminatorKind<'db>, + pub kind: TerminatorKind, } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum TerminatorKind<'db> { +pub enum TerminatorKind { /// Block has one successor; we continue execution there. - Goto { target: BasicBlockId<'db> }, + Goto { target: BasicBlockId }, /// Switches based on the computed value. /// @@ -455,9 +452,9 @@ pub enum TerminatorKind<'db> { /// Target values may not appear more than once. SwitchInt { /// The discriminant value being tested. - discr: Operand<'db>, + discr: Operand, - targets: SwitchTargets<'db>, + targets: SwitchTargets, }, /// Indicates that the landing pad is finished and that the process should continue unwinding. @@ -508,7 +505,7 @@ pub enum TerminatorKind<'db> { /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to /// > the place or one of its "parents" occurred more recently than a move out of it. This does not /// > consider indirect assignments. - Drop { place: Place<'db>, target: BasicBlockId<'db>, unwind: Option> }, + Drop { place: Place, target: BasicBlockId, unwind: Option }, /// Drops the place and assigns a new value to it. /// @@ -541,10 +538,10 @@ pub enum TerminatorKind<'db> { /// /// Disallowed after drop elaboration. DropAndReplace { - place: Place<'db>, - value: Operand<'db>, - target: BasicBlockId<'db>, - unwind: Option>, + place: Place, + value: Operand, + target: BasicBlockId, + unwind: Option, }, /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of @@ -559,18 +556,18 @@ pub enum TerminatorKind<'db> { /// [#71117]: https://github.com/rust-lang/rust/issues/71117 Call { /// The function that’s being called. - func: Operand<'db>, + func: Operand, /// Arguments the function is called with. /// These are owned by the callee, which is free to modify them. /// This allows the memory occupied by "by-value" arguments to be /// reused across function calls without duplicating the contents. - args: Box<[Operand<'db>]>, + args: Box<[Operand]>, /// Where the returned value will be written - destination: Place<'db>, + destination: Place, /// Where to go after this call returns. If none, the call necessarily diverges. - target: Option>, + target: Option, /// Cleanups to be done if the call unwinds. - cleanup: Option>, + cleanup: Option, /// `true` if this is from a call in HIR rather than from an overloaded /// operator. True for overloaded function call. from_hir_call: bool, @@ -586,11 +583,11 @@ pub enum TerminatorKind<'db> { /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the /// assertion does not fail, execution continues at the specified basic block. Assert { - cond: Operand<'db>, + cond: Operand, expected: bool, //msg: AssertMessage, - target: BasicBlockId<'db>, - cleanup: Option>, + target: BasicBlockId, + cleanup: Option, }, /// Marks a suspend point. @@ -607,13 +604,13 @@ pub enum TerminatorKind<'db> { /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`? Yield { /// The value to return. - value: Operand<'db>, + value: Operand, /// Where to resume to. - resume: BasicBlockId<'db>, + resume: BasicBlockId, /// The place to store the resume argument in. - resume_arg: Place<'db>, + resume_arg: Place, /// Cleanup to be done if the coroutine is dropped at this suspend point. - drop: Option>, + drop: Option, }, /// Indicates the end of dropping a coroutine. @@ -636,10 +633,10 @@ pub enum TerminatorKind<'db> { /// Disallowed after drop elaboration. FalseEdge { /// The target normal control flow will take. - real_target: BasicBlockId<'db>, + real_target: BasicBlockId, /// A block control flow could conceptually jump to, but won't in /// practice. - imaginary_target: BasicBlockId<'db>, + imaginary_target: BasicBlockId, }, /// A terminator for blocks that only take one path in reality, but where we reserve the right @@ -651,14 +648,14 @@ pub enum TerminatorKind<'db> { /// Disallowed after drop elaboration. FalseUnwind { /// The target normal control flow will take. - real_target: BasicBlockId<'db>, + real_target: BasicBlockId, /// The imaginary cleanup block link. This particular path will never be taken /// in practice, but in order to avoid fragility we want to always /// consider it in borrowck. We don't want to accept programs which /// pass borrowck only when `panic=abort` or some assertions are disabled /// due to release vs. debug mode builds. This needs to be an `Option` because /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes. - unwind: Option>, + unwind: Option, }, } @@ -845,8 +842,8 @@ impl From for BinOp { } } -impl<'db> From> for Rvalue<'db> { - fn from(x: Operand<'db>) -> Self { +impl From for Rvalue { + fn from(x: Operand) -> Self { Self::Use(x) } } @@ -875,14 +872,14 @@ pub enum CastKind { } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum Rvalue<'db> { +pub enum Rvalue { /// Yields the operand unchanged - Use(Operand<'db>), + Use(Operand), /// Creates an array where each element is the value of the operand. /// /// Corresponds to source code like `[x; 32]`. - Repeat(Operand<'db>, Const<'db>), + Repeat(Operand, StoredConst), /// Creates a reference of the indicated kind to the place. /// @@ -891,7 +888,7 @@ pub enum Rvalue<'db> { /// exactly what the behavior of this operation should be. /// /// `Shallow` borrows are disallowed after drop lowering. - Ref(BorrowKind, Place<'db>), + Ref(BorrowKind, Place), /// Creates a pointer/reference to the given thread local. /// @@ -922,7 +919,7 @@ pub enum Rvalue<'db> { /// If the type of the place is an array, this is the array length. For slices (`[T]`, not /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is /// ill-formed for places of other types. - Len(Place<'db>), + Len(Place), /// Performs essentially all of the casts that can be performed via `as`. /// @@ -930,7 +927,7 @@ pub enum Rvalue<'db> { /// /// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why /// `ArrayToPointer` and `MutToConstPointer` are special. - Cast(CastKind, Operand<'db>, Ty<'db>), + Cast(CastKind, Operand, StoredTy), // FIXME link to `pointer::offset` when it hits stable. /// * `Offset` has the same semantics as `pointer::offset`, except that the second @@ -962,7 +959,7 @@ pub enum Rvalue<'db> { /// when the value of right-hand side is negative. /// /// Other combinations of types and operators are unsupported. - CheckedBinaryOp(BinOp, Operand<'db>, Operand<'db>), + CheckedBinaryOp(BinOp, Operand, Operand), /// Computes a value as described by the operation. //NullaryOp(NullOp, Ty), @@ -973,7 +970,7 @@ pub enum Rvalue<'db> { /// Also does two's-complement arithmetic. Negation requires a signed integer or a float; /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds /// return a value with the same type as their operand. - UnaryOp(UnOp, Operand<'db>), + UnaryOp(UnOp, Operand), /// Computes the discriminant of the place, returning it as an integer of type /// `discriminant_ty`. Returns zero for types without discriminant. @@ -983,7 +980,7 @@ pub enum Rvalue<'db> { /// variant index; use `discriminant_for_variant` to convert. /// /// [#91095]: https://github.com/rust-lang/rust/issues/91095 - Discriminant(Place<'db>), + Discriminant(Place), /// Creates an aggregate value, like a tuple or struct. /// @@ -993,17 +990,17 @@ pub enum Rvalue<'db> { /// /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too. - Aggregate(AggregateKind<'db>, Box<[Operand<'db>]>), + Aggregate(AggregateKind, Box<[Operand]>), /// Transmutes a `*mut u8` into shallow-initialized `Box`. /// /// This is different from a normal transmute because dataflow analysis will treat the box as /// initialized but its content as uninitialized. Like other pointer casts, this in general /// affects alias analysis. - ShallowInitBox(Operand<'db>, Ty<'db>), + ShallowInitBox(Operand, StoredTy), /// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer. - ShallowInitBoxWithAlloc(Ty<'db>), + ShallowInitBoxWithAlloc(StoredTy), /// A CopyForDeref is equivalent to a read from a place at the /// codegen level, but is treated specially by drop elaboration. When such a read happens, it @@ -1013,41 +1010,41 @@ pub enum Rvalue<'db> { /// read never happened and just projects further. This allows simplifying various MIR /// optimizations and codegen backends that previously had to handle deref operations anywhere /// in a place. - CopyForDeref(Place<'db>), + CopyForDeref(Place), } #[derive(Debug, PartialEq, Eq, Clone)] -pub enum StatementKind<'db> { - Assign(Place<'db>, Rvalue<'db>), - FakeRead(Place<'db>), +pub enum StatementKind { + Assign(Place, Rvalue), + FakeRead(Place), //SetDiscriminant { // place: Box, // variant_index: VariantIdx, //}, - Deinit(Place<'db>), - StorageLive(LocalId<'db>), - StorageDead(LocalId<'db>), + Deinit(Place), + StorageLive(LocalId), + StorageDead(LocalId), //Retag(RetagKind, Box), //AscribeUserType(Place, UserTypeProjection, Variance), //Intrinsic(Box), Nop, } -impl<'db> StatementKind<'db> { - fn with_span(self, span: MirSpan) -> Statement<'db> { +impl StatementKind { + fn with_span(self, span: MirSpan) -> Statement { Statement { kind: self, span } } } #[derive(Debug, PartialEq, Eq, Clone)] -pub struct Statement<'db> { - pub kind: StatementKind<'db>, +pub struct Statement { + pub kind: StatementKind, pub span: MirSpan, } #[derive(Debug, Default, Clone, PartialEq, Eq)] -pub struct BasicBlock<'db> { +pub struct BasicBlock { /// List of statements in this block. - pub statements: Vec>, + pub statements: Vec, /// Terminator for this block. /// @@ -1057,7 +1054,7 @@ pub struct BasicBlock<'db> { /// exception is that certain passes, such as `simplify_cfg`, swap /// out the terminator temporarily with `None` while they continue /// to recurse over the set of basic blocks. - pub terminator: Option>, + pub terminator: Option, /// If true, this block lies on an unwind path. This is used /// during codegen where distinct kinds of basic blocks may be @@ -1067,29 +1064,29 @@ pub struct BasicBlock<'db> { } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct MirBody<'db> { - pub projection_store: ProjectionStore<'db>, - pub basic_blocks: Arena>, - pub locals: Arena>, - pub start_block: BasicBlockId<'db>, +pub struct MirBody { + pub projection_store: ProjectionStore, + pub basic_blocks: Arena, + pub locals: Arena, + pub start_block: BasicBlockId, pub owner: DefWithBodyId, - pub binding_locals: ArenaMap>, - pub param_locals: Vec>, + pub binding_locals: ArenaMap, + pub param_locals: Vec, /// This field stores the closures directly owned by this body. It is used /// in traversing every mir body. pub closures: Vec, } -impl<'db> MirBody<'db> { - pub fn local_to_binding_map(&self) -> ArenaMap, BindingId> { +impl MirBody { + pub fn local_to_binding_map(&self) -> ArenaMap { self.binding_locals.iter().map(|(it, y)| (*y, it)).collect() } - fn walk_places(&mut self, mut f: impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>)) { - fn for_operand<'db>( - op: &mut Operand<'db>, - f: &mut impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>), - store: &mut ProjectionStore<'db>, + fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) { + fn for_operand( + op: &mut Operand, + f: &mut impl FnMut(&mut Place, &mut ProjectionStore), + store: &mut ProjectionStore, ) { match &mut op.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index b39c9bc06559e..941b6c75bfe74 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -17,7 +17,7 @@ use crate::{ display::DisplayTarget, mir::OperandKind, next_solver::{ - DbInterner, GenericArgs, ParamEnv, Ty, TypingMode, + DbInterner, GenericArgs, ParamEnv, StoredTy, Ty, TypingMode, infer::{DbInternerInferExt, InferCtxt}, }, }; @@ -36,44 +36,44 @@ pub enum MutabilityReason { } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct MovedOutOfRef<'db> { - pub ty: Ty<'db>, +pub struct MovedOutOfRef { + pub ty: StoredTy, pub span: MirSpan, } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct PartiallyMoved<'db> { - pub ty: Ty<'db>, +pub struct PartiallyMoved { + pub ty: StoredTy, pub span: MirSpan, - pub local: LocalId<'db>, + pub local: LocalId, } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct BorrowRegion<'db> { - pub local: LocalId<'db>, +pub struct BorrowRegion { + pub local: LocalId, pub kind: BorrowKind, pub places: Vec, } #[derive(Debug, Clone, PartialEq, Eq)] -pub struct BorrowckResult<'db> { - pub mir_body: Arc>, - pub mutability_of_locals: ArenaMap, MutabilityReason>, - pub moved_out_of_ref: Vec>, - pub partially_moved: Vec>, - pub borrow_regions: Vec>, +pub struct BorrowckResult { + pub mir_body: Arc, + pub mutability_of_locals: ArenaMap, + pub moved_out_of_ref: Vec, + pub partially_moved: Vec, + pub borrow_regions: Vec, } -fn all_mir_bodies<'db>( - db: &'db dyn HirDatabase, +fn all_mir_bodies( + db: &dyn HirDatabase, def: DefWithBodyId, - mut cb: impl FnMut(Arc>), -) -> Result<(), MirLowerError<'db>> { - fn for_closure<'db>( - db: &'db dyn HirDatabase, + mut cb: impl FnMut(Arc), +) -> Result<(), MirLowerError> { + fn for_closure( + db: &dyn HirDatabase, c: InternedClosureId, - cb: &mut impl FnMut(Arc>), - ) -> Result<(), MirLowerError<'db>> { + cb: &mut impl FnMut(Arc), + ) -> Result<(), MirLowerError> { match db.mir_body_for_closure(c) { Ok(body) => { cb(body.clone()); @@ -91,10 +91,10 @@ fn all_mir_bodies<'db>( } } -pub fn borrowck_query<'db>( - db: &'db dyn HirDatabase, +pub fn borrowck_query( + db: &dyn HirDatabase, def: DefWithBodyId, -) -> Result]>, MirLowerError<'db>> { +) -> Result, MirLowerError> { let _p = tracing::info_span!("borrowck_query").entered(); let module = def.module(db); let interner = DbInterner::new_with(db, module.krate(db)); @@ -125,20 +125,20 @@ fn make_fetch_closure_field<'db>( let (captures, _) = infer.closure_info(c); let parent_subst = subst.split_closure_args_untupled().parent_args; let interner = DbInterner::new_no_crate(db); - captures.get(f).expect("broken closure field").ty.instantiate(interner, parent_subst) + captures.get(f).expect("broken closure field").ty.get().instantiate(interner, parent_subst) } } fn moved_out_of_ref<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, -) -> Vec> { + body: &MirBody, +) -> Vec { let db = infcx.interner.db; let mut result = vec![]; - let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind { + let mut for_operand = |op: &Operand, span: MirSpan| match op.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { - let mut ty: Ty<'db> = body.locals[p.local].ty; + let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref(); let mut is_dereference_of_ref = false; for proj in p.projection.lookup(&body.projection_store) { if *proj == ProjectionElem::Deref && ty.as_reference().is_some() { @@ -156,7 +156,7 @@ fn moved_out_of_ref<'db>( && !infcx.type_is_copy_modulo_regions(env, ty) && !ty.references_non_lt_error() { - result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty }); + result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty: ty.store() }); } } OperandKind::Constant { .. } | OperandKind::Static(_) => (), @@ -233,13 +233,13 @@ fn moved_out_of_ref<'db>( fn partially_moved<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, -) -> Vec> { + body: &MirBody, +) -> Vec { let db = infcx.interner.db; let mut result = vec![]; - let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind { + let mut for_operand = |op: &Operand, span: MirSpan| match op.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { - let mut ty: Ty<'db> = body.locals[p.local].ty; + let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref(); for proj in p.projection.lookup(&body.projection_store) { ty = proj.projected_ty( infcx, @@ -250,7 +250,7 @@ fn partially_moved<'db>( ); } if !infcx.type_is_copy_modulo_regions(env, ty) && !ty.references_non_lt_error() { - result.push(PartiallyMoved { span, ty, local: p.local }); + result.push(PartiallyMoved { span, ty: ty.store(), local: p.local }); } } OperandKind::Constant { .. } | OperandKind::Static(_) => (), @@ -324,7 +324,7 @@ fn partially_moved<'db>( result } -fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec> { +fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut borrows = FxHashMap::default(); for (_, block) in body.basic_blocks.iter() { db.unwind_if_revision_cancelled(); @@ -332,7 +332,7 @@ fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec| { + .and_modify(|it: &mut BorrowRegion| { it.places.push(statement.span); }) .or_insert_with(|| BorrowRegion { @@ -377,12 +377,12 @@ enum ProjectionCase { fn place_case<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, - lvalue: &Place<'db>, + body: &MirBody, + lvalue: &Place, ) -> ProjectionCase { let db = infcx.interner.db; let mut is_part_of = false; - let mut ty = body.locals[lvalue.local].ty; + let mut ty = body.locals[lvalue.local].ty.as_ref(); for proj in lvalue.projection.lookup(&body.projection_store).iter() { match proj { ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw @@ -410,18 +410,18 @@ fn place_case<'db>( /// Returns a map from basic blocks to the set of locals that might be ever initialized before /// the start of the block. Only `StorageDead` can remove something from this map, and we ignore /// `Uninit` and `drop` and similar after initialization. -fn ever_initialized_map<'db>( - db: &'db dyn HirDatabase, - body: &MirBody<'db>, -) -> ArenaMap, ArenaMap, bool>> { - let mut result: ArenaMap, ArenaMap, bool>> = +fn ever_initialized_map( + db: &dyn HirDatabase, + body: &MirBody, +) -> ArenaMap> { + let mut result: ArenaMap> = body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect(); - fn dfs<'db>( - db: &'db dyn HirDatabase, - body: &MirBody<'db>, - l: LocalId<'db>, - stack: &mut Vec>, - result: &mut ArenaMap, ArenaMap, bool>>, + fn dfs( + db: &dyn HirDatabase, + body: &MirBody, + l: LocalId, + stack: &mut Vec, + result: &mut ArenaMap>, ) { while let Some(b) = stack.pop() { let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs @@ -509,11 +509,7 @@ fn ever_initialized_map<'db>( result } -fn push_mut_span<'db>( - local: LocalId<'db>, - span: MirSpan, - result: &mut ArenaMap, MutabilityReason>, -) { +fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap) { match &mut result[local] { MutabilityReason::Mut { spans } => spans.push(span), it @ (MutabilityReason::Not | MutabilityReason::Unused) => { @@ -522,16 +518,13 @@ fn push_mut_span<'db>( }; } -fn record_usage<'db>(local: LocalId<'db>, result: &mut ArenaMap, MutabilityReason>) { +fn record_usage(local: LocalId, result: &mut ArenaMap) { if let it @ MutabilityReason::Unused = &mut result[local] { *it = MutabilityReason::Not; }; } -fn record_usage_for_operand<'db>( - arg: &Operand<'db>, - result: &mut ArenaMap, MutabilityReason>, -) { +fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap) { if let OperandKind::Copy(p) | OperandKind::Move(p) = arg.kind { record_usage(p.local, result); } @@ -540,10 +533,10 @@ fn record_usage_for_operand<'db>( fn mutability_of_locals<'db>( infcx: &InferCtxt<'db>, env: ParamEnv<'db>, - body: &MirBody<'db>, -) -> ArenaMap, MutabilityReason> { + body: &MirBody, +) -> ArenaMap { let db = infcx.interner.db; - let mut result: ArenaMap, MutabilityReason> = + let mut result: ArenaMap = body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect(); let ever_init_maps = ever_initialized_map(db, body); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 3b4913cae3fb7..c7156bb11ed63 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -17,6 +17,7 @@ use hir_def::{ use hir_expand::{InFile, mod_path::path, name::Name}; use intern::sym; use la_arena::ArenaMap; +use macros::GenericTypeVisitable; use rustc_abi::TargetDataLayout; use rustc_apfloat::{ Float, @@ -42,8 +43,8 @@ use crate::{ layout::{Layout, LayoutError, RustcEnumVariantIdx}, method_resolution::{is_dyn_method, lookup_impl_const}, next_solver::{ - Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, Ty, TyKind, - TypingMode, UnevaluatedConst, ValueConst, + Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, + StoredConst, StoredTy, Ty, TyKind, TypingMode, UnevaluatedConst, ValueConst, infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -83,7 +84,7 @@ macro_rules! not_supported { }; } -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)] pub struct VTableMap<'db> { ty_to_id: FxHashMap, usize>, id_to_ty: Vec>, @@ -150,16 +151,16 @@ impl TlsData { } } -struct StackFrame<'db> { - locals: Locals<'db>, - destination: Option>, +struct StackFrame { + locals: Locals, + destination: Option, prev_stack_ptr: usize, span: (MirSpan, DefWithBodyId), } #[derive(Clone)] -enum MirOrDynIndex<'db> { - Mir(Arc>), +enum MirOrDynIndex { + Mir(Arc), Dyn(usize), } @@ -169,7 +170,7 @@ pub struct Evaluator<'db> { target_data_layout: Arc, stack: Vec, heap: Vec, - code_stack: Vec>, + code_stack: Vec, /// Stores the global location of the statics. We const evaluate every static first time we need it /// and see it's missing, then we add it to this to reuse. static_locations: FxHashMap, @@ -182,13 +183,13 @@ pub struct Evaluator<'db> { stdout: Vec, stderr: Vec, layout_cache: RefCell, Arc>>, - projected_ty_cache: RefCell, PlaceElem<'db>), Ty<'db>>>, + projected_ty_cache: RefCell, PlaceElem), Ty<'db>>>, not_special_fn_cache: RefCell>, - mir_or_dyn_index_cache: RefCell), MirOrDynIndex<'db>>>, - /// Constantly dropping and creating `Locals<'db>` is very costly. We store + mir_or_dyn_index_cache: RefCell), MirOrDynIndex>>, + /// Constantly dropping and creating `Locals` is very costly. We store /// old locals that we normally want to drop here, to reuse their allocations /// later. - unused_locals_store: RefCell>>>, + unused_locals_store: RefCell>>, cached_ptr_size: usize, cached_fn_trait_func: Option, cached_fn_mut_trait_func: Option, @@ -261,7 +262,7 @@ impl<'db> IntervalAndTy<'db> { addr: Address, ty: Ty<'db>, evaluator: &Evaluator<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, IntervalAndTy<'db>> { let size = evaluator.size_of_sized(ty, locals, "type of interval")?; Ok(IntervalAndTy { interval: Interval { addr, size }, ty }) @@ -340,22 +341,22 @@ impl Address { } #[derive(Clone, PartialEq, Eq)] -pub enum MirEvalError<'db> { - ConstEvalError(String, Box>), - LayoutError(LayoutError, Ty<'db>), +pub enum MirEvalError { + ConstEvalError(String, Box), + LayoutError(LayoutError, StoredTy), TargetDataLayoutNotAvailable(TargetLoadError), /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// then use this type of error. UndefinedBehavior(String), Panic(String), // FIXME: This should be folded into ConstEvalError? - MirLowerError(FunctionId, MirLowerError<'db>), - MirLowerErrorForClosure(InternedClosureId, MirLowerError<'db>), - TypeIsUnsized(Ty<'db>, &'static str), + MirLowerError(FunctionId, MirLowerError), + MirLowerErrorForClosure(InternedClosureId, MirLowerError), + TypeIsUnsized(StoredTy, &'static str), NotSupported(String), - InvalidConst(Const<'db>), + InvalidConst(StoredConst), InFunction( - Box>, + Box, Vec<(Either, MirSpan, DefWithBodyId)>, ), ExecutionLimitExceeded, @@ -363,12 +364,12 @@ pub enum MirEvalError<'db> { /// FIXME: Fold this into InternalError InvalidVTableId(usize), /// ? - CoerceUnsizedError(Ty<'db>), + CoerceUnsizedError(StoredTy), /// These should not occur, usually indicates a bug in mir lowering. InternalError(Box), } -impl MirEvalError<'_> { +impl MirEvalError { pub fn pretty_print( &self, f: &mut String, @@ -432,7 +433,9 @@ impl MirEvalError<'_> { write!( f, "Layout for type `{}` is not available due {err:?}", - ty.display(db, display_target).with_closure_style(ClosureStyle::ClosureWithId) + ty.as_ref() + .display(db, display_target) + .with_closure_style(ClosureStyle::ClosureWithId) )?; } MirEvalError::MirLowerError(func, err) => { @@ -495,7 +498,7 @@ impl MirEvalError<'_> { } } -impl std::fmt::Debug for MirEvalError<'_> { +impl std::fmt::Debug for MirEvalError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::ConstEvalError(arg0, arg1) => { @@ -534,15 +537,15 @@ impl std::fmt::Debug for MirEvalError<'_> { } } -type Result<'db, T> = std::result::Result>; +type Result<'db, T> = std::result::Result; #[derive(Debug, Default)] -struct DropFlags<'db> { - need_drop: FxHashSet>, +struct DropFlags { + need_drop: FxHashSet, } -impl<'db> DropFlags<'db> { - fn add_place(&mut self, p: Place<'db>, store: &ProjectionStore<'db>) { +impl DropFlags { + fn add_place(&mut self, p: Place, store: &ProjectionStore) { if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) { return; } @@ -550,7 +553,7 @@ impl<'db> DropFlags<'db> { self.need_drop.insert(p); } - fn remove_place(&mut self, p: &Place<'db>, store: &ProjectionStore<'db>) -> bool { + fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool { // FIXME: replace parents with parts if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) { self.need_drop.remove(&parent); @@ -565,10 +568,10 @@ impl<'db> DropFlags<'db> { } #[derive(Debug)] -struct Locals<'db> { - ptr: ArenaMap, Interval>, - body: Arc>, - drop_flags: DropFlags<'db>, +struct Locals { + ptr: ArenaMap, + body: Arc, + drop_flags: DropFlags, } pub struct MirOutput { @@ -587,7 +590,7 @@ impl MirOutput { pub fn interpret_mir<'db>( db: &'db dyn HirDatabase, - body: Arc>, + body: Arc, // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now // they share their body with their parent, so in MIR lowering we have locals of the parent body, which // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has @@ -596,7 +599,7 @@ pub fn interpret_mir<'db>( assert_placeholder_ty_is_unused: bool, trait_env: Option>, ) -> Result<'db, (Result<'db, Const<'db>>, MirOutput)> { - let ty = body.locals[return_slot()].ty; + let ty = body.locals[return_slot()].ty.as_ref(); let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?; let it: Result<'db, Const<'db>> = (|| { if evaluator.ptr_size() != size_of::() { @@ -694,11 +697,11 @@ impl<'db> Evaluator<'db> { self.infcx.interner.lang_items() } - fn place_addr(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Address> { + fn place_addr(&self, p: &Place, locals: &Locals) -> Result<'db, Address> { Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0) } - fn place_interval(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> { + fn place_interval(&self, p: &Place, locals: &Locals) -> Result<'db, Interval> { let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?; Ok(Interval { addr: place_addr_and_ty.0, @@ -714,7 +717,7 @@ impl<'db> Evaluator<'db> { self.cached_ptr_size } - fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem<'db>) -> Ty<'db> { + fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem) -> Ty<'db> { let pair = (ty, proj); if let Some(r) = self.projected_ty_cache.borrow().get(&pair) { return *r; @@ -733,6 +736,7 @@ impl<'db> Evaluator<'db> { .get(f) .expect("broken closure field") .ty + .get() .instantiate(self.interner(), parent_subst) }, self.crate_id, @@ -743,11 +747,11 @@ impl<'db> Evaluator<'db> { fn place_addr_and_ty_and_metadata<'a>( &'a self, - p: &Place<'db>, - locals: &'a Locals<'db>, + p: &Place, + locals: &'a Locals, ) -> Result<'db, (Address, Ty<'db>, Option)> { let mut addr = locals.ptr[p.local].addr; - let mut ty: Ty<'db> = locals.body.locals[p.local].ty; + let mut ty: Ty<'db> = locals.body.locals[p.local].ty.as_ref(); let mut metadata: Option = None; // locals are always sized for proj in p.projection.lookup(&locals.body.projection_store) { let prev_ty = ty; @@ -868,8 +872,8 @@ impl<'db> Evaluator<'db> { } let r = self .db - .layout_of_ty(ty, self.param_env) - .map_err(|e| MirEvalError::LayoutError(e, ty))?; + .layout_of_ty(ty.store(), self.param_env.store()) + .map_err(|e| MirEvalError::LayoutError(e, ty.store()))?; self.layout_cache.borrow_mut().insert(ty, r.clone()); Ok(r) } @@ -878,17 +882,17 @@ impl<'db> Evaluator<'db> { self.layout(Ty::new_adt(self.interner(), adt, subst)) } - fn place_ty<'a>(&'a self, p: &Place<'db>, locals: &'a Locals<'db>) -> Result<'db, Ty<'db>> { + fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<'db, Ty<'db>> { Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1) } - fn operand_ty(&self, o: &Operand<'db>, locals: &Locals<'db>) -> Result<'db, Ty<'db>> { + fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<'db, Ty<'db>> { Ok(match &o.kind { OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?, - OperandKind::Constant { konst: _, ty } => *ty, + OperandKind::Constant { konst: _, ty } => ty.as_ref(), &OperandKind::Static(s) => { - let ty = - InferenceResult::for_body(self.db, s.into())[self.db.body(s.into()).body_expr]; + let ty = InferenceResult::for_body(self.db, s.into()) + .expr_ty(self.db.body(s.into()).body_expr); Ty::new_ref( self.interner(), Region::new_static(self.interner()), @@ -901,8 +905,8 @@ impl<'db> Evaluator<'db> { fn operand_ty_and_eval( &mut self, - o: &Operand<'db>, - locals: &mut Locals<'db>, + o: &Operand, + locals: &mut Locals, ) -> Result<'db, IntervalAndTy<'db>> { Ok(IntervalAndTy { interval: self.eval_operand(o, locals)?, @@ -912,7 +916,7 @@ impl<'db> Evaluator<'db> { fn interpret_mir( &mut self, - body: Arc>, + body: Arc, args: impl Iterator, ) -> Result<'db, Interval> { if let Some(it) = self.stack_depth_limit.checked_sub(1) { @@ -1076,8 +1080,8 @@ impl<'db> Evaluator<'db> { fn fill_locals_for_body( &mut self, - body: &MirBody<'db>, - locals: &mut Locals<'db>, + body: &MirBody, + locals: &mut Locals, args: impl Iterator, ) -> Result<'db, ()> { let mut remain_args = body.param_locals.len(); @@ -1100,9 +1104,9 @@ impl<'db> Evaluator<'db> { fn create_locals_for_body( &mut self, - body: &Arc>, + body: &Arc, destination: Option, - ) -> Result<'db, (Locals<'db>, usize)> { + ) -> Result<'db, (Locals, usize)> { let mut locals = match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() { None => Locals { @@ -1126,7 +1130,7 @@ impl<'db> Evaluator<'db> { continue; } let (size, align) = self.size_align_of_sized( - it.ty, + it.ty.as_ref(), &locals, "no unsized local in extending stack", )?; @@ -1149,11 +1153,7 @@ impl<'db> Evaluator<'db> { Ok((locals, prev_stack_pointer)) } - fn eval_rvalue( - &mut self, - r: &Rvalue<'db>, - locals: &mut Locals<'db>, - ) -> Result<'db, IntervalOrOwned> { + fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<'db, IntervalOrOwned> { use IntervalOrOwned::*; Ok(match r { Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?), @@ -1445,7 +1445,7 @@ impl<'db> Evaluator<'db> { Owned(result.to_le_bytes().to_vec()) } Rvalue::Repeat(it, len) => { - let len = match try_const_usize(self.db, *len) { + let len = match try_const_usize(self.db, len.as_ref()) { Some(it) => it as usize, None => not_supported!("non evaluatable array len in repeat Rvalue"), }; @@ -1455,7 +1455,7 @@ impl<'db> Evaluator<'db> { } Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"), Rvalue::ShallowInitBoxWithAlloc(ty) => { - let Some((size, align)) = self.size_align_of(*ty, locals)? else { + let Some((size, align)) = self.size_align_of(ty.as_ref(), locals)? else { not_supported!("unsized box initialization"); }; let addr = self.heap_allocate(size, align)?; @@ -1477,7 +1477,7 @@ impl<'db> Evaluator<'db> { Owned(r) } AggregateKind::Tuple(ty) => { - let layout = self.layout(*ty)?; + let layout = self.layout(ty.as_ref())?; Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -1486,10 +1486,8 @@ impl<'db> Evaluator<'db> { )?) } AggregateKind::Union(it, f) => { - let layout = self.layout_adt( - (*it).into(), - GenericArgs::new_from_iter(self.interner(), []), - )?; + let layout = + self.layout_adt((*it).into(), GenericArgs::empty(self.interner()))?; let offset = layout .fields .offset(u32::from(f.local_id.into_raw()) as usize) @@ -1501,7 +1499,7 @@ impl<'db> Evaluator<'db> { } AggregateKind::Adt(it, subst) => { let (size, variant_layout, tag) = - self.layout_of_variant(*it, *subst, locals)?; + self.layout_of_variant(*it, subst.as_ref(), locals)?; Owned(self.construct_with_layout( size, &variant_layout, @@ -1510,7 +1508,7 @@ impl<'db> Evaluator<'db> { )?) } AggregateKind::Closure(ty) => { - let layout = self.layout(*ty)?; + let layout = self.layout(ty.as_ref())?; Owned(self.construct_with_layout( layout.size.bytes_usize(), &layout, @@ -1537,7 +1535,7 @@ impl<'db> Evaluator<'db> { PointerCast::Unsize => { let current_ty = self.operand_ty(operand, locals)?; let addr = self.eval_operand(operand, locals)?; - self.coerce_unsized(addr, current_ty, *target_ty)? + self.coerce_unsized(addr, current_ty, target_ty.as_ref())? } PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => { // This is no-op @@ -1556,8 +1554,11 @@ impl<'db> Evaluator<'db> { let current_ty = self.operand_ty(operand, locals)?; let is_signed = matches!(current_ty.kind(), TyKind::Int(_)); let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed); - let dest_size = - self.size_of_sized(*target_ty, locals, "destination of int to int cast")?; + let dest_size = self.size_of_sized( + target_ty.as_ref(), + locals, + "destination of int to int cast", + )?; Owned(current[0..dest_size].to_vec()) } CastKind::FloatToInt => { @@ -1579,9 +1580,12 @@ impl<'db> Evaluator<'db> { not_supported!("unstable floating point type f16 and f128"); } }; - let is_signed = matches!(target_ty.kind(), TyKind::Int(_)); - let dest_size = - self.size_of_sized(*target_ty, locals, "destination of float to int cast")?; + let is_signed = matches!(target_ty.as_ref().kind(), TyKind::Int(_)); + let dest_size = self.size_of_sized( + target_ty.as_ref(), + locals, + "destination of float to int cast", + )?; let dest_bits = dest_size * 8; let (max, min) = if dest_bits == 128 { (i128::MAX, i128::MIN) @@ -1614,7 +1618,7 @@ impl<'db> Evaluator<'db> { not_supported!("unstable floating point type f16 and f128"); } }; - let TyKind::Float(target_ty) = target_ty.kind() else { + let TyKind::Float(target_ty) = target_ty.as_ref().kind() else { not_supported!("invalid float to float cast"); }; match target_ty { @@ -1630,7 +1634,7 @@ impl<'db> Evaluator<'db> { let is_signed = matches!(current_ty.kind(), TyKind::Int(_)); let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed); let value = i128::from_le_bytes(value); - let TyKind::Float(target_ty) = target_ty.kind() else { + let TyKind::Float(target_ty) = target_ty.as_ref().kind() else { not_supported!("invalid int to float cast"); }; match target_ty { @@ -1709,12 +1713,12 @@ impl<'db> Evaluator<'db> { { let field_types = self.db.field_types(struct_id.into()); if let Some(ty) = - field_types.iter().last().map(|it| it.1.instantiate(self.interner(), subst)) + field_types.iter().last().map(|it| it.1.get().instantiate(self.interner(), subst)) { return self.coerce_unsized_look_through_fields(ty, goal); } } - Err(MirEvalError::CoerceUnsizedError(ty)) + Err(MirEvalError::CoerceUnsizedError(ty.store())) } fn coerce_unsized( @@ -1787,8 +1791,10 @@ impl<'db> Evaluator<'db> { not_supported!("unsizing struct without field"); }; let target_last_field = self.db.field_types(id.into())[last_field] + .get() .instantiate(self.interner(), target_subst); let current_last_field = self.db.field_types(id.into())[last_field] + .get() .instantiate(self.interner(), current_subst); return self.unsizing_ptr_from_addr( target_last_field, @@ -1806,7 +1812,7 @@ impl<'db> Evaluator<'db> { &mut self, it: VariantId, subst: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, (usize, Arc, Option<(usize, usize, i128)>)> { let adt = it.adt_id(self.db); if let DefWithBodyId::VariantId(f) = locals.body.owner @@ -1900,11 +1906,7 @@ impl<'db> Evaluator<'db> { Ok(result) } - fn eval_operand( - &mut self, - it: &Operand<'db>, - locals: &mut Locals<'db>, - ) -> Result<'db, Interval> { + fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<'db, Interval> { Ok(match &it.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { locals.drop_flags.remove_place(p, &locals.body.projection_store); @@ -1914,14 +1916,16 @@ impl<'db> Evaluator<'db> { let addr = self.eval_static(*st, locals)?; Interval::new(addr, self.ptr_size()) } - OperandKind::Constant { konst, .. } => self.allocate_const_in_heap(locals, *konst)?, + OperandKind::Constant { konst, .. } => { + self.allocate_const_in_heap(locals, konst.as_ref())? + } }) } #[allow(clippy::double_parens)] fn allocate_const_in_heap( &mut self, - locals: &Locals<'db>, + locals: &Locals, konst: Const<'db>, ) -> Result<'db, Interval> { let result_owner; @@ -1971,7 +1975,7 @@ impl<'db> Evaluator<'db> { } else if size < 16 && v.len() == 16 { Cow::Borrowed(&v[0..size]) } else { - return Err(MirEvalError::InvalidConst(konst)); + return Err(MirEvalError::InvalidConst(konst.store())); } } else { Cow::Borrowed(v) @@ -1993,7 +1997,7 @@ impl<'db> Evaluator<'db> { Ok(Interval::new(addr, size)) } - fn eval_place(&mut self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> { + fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<'db, Interval> { let addr = self.place_addr(p, locals)?; Ok(Interval::new( addr, @@ -2093,11 +2097,7 @@ impl<'db> Evaluator<'db> { Ok(()) } - fn size_align_of( - &self, - ty: Ty<'db>, - locals: &Locals<'db>, - ) -> Result<'db, Option<(usize, usize)>> { + fn size_align_of(&self, ty: Ty<'db>, locals: &Locals) -> Result<'db, Option<(usize, usize)>> { if let Some(layout) = self.layout_cache.borrow().get(&ty) { return Ok(layout .is_sized() @@ -2126,12 +2126,12 @@ impl<'db> Evaluator<'db> { fn size_of_sized( &self, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, what: &'static str, ) -> Result<'db, usize> { match self.size_align_of(ty, locals)? { Some(it) => Ok(it.0), - None => Err(MirEvalError::TypeIsUnsized(ty, what)), + None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)), } } @@ -2140,12 +2140,12 @@ impl<'db> Evaluator<'db> { fn size_align_of_sized( &self, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, what: &'static str, ) -> Result<'db, (usize, usize)> { match self.size_align_of(ty, locals)? { Some(it) => Ok(it), - None => Err(MirEvalError::TypeIsUnsized(ty, what)), + None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)), } } @@ -2181,13 +2181,13 @@ impl<'db> Evaluator<'db> { &self, bytes: &[u8], ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, ComplexMemoryMap<'db>> { fn rec<'db>( this: &Evaluator<'db>, bytes: &[u8], ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, mm: &mut ComplexMemoryMap<'db>, stack_depth_limit: usize, ) -> Result<'db, ()> { @@ -2288,7 +2288,7 @@ impl<'db> Evaluator<'db> { .fields .offset(u32::from(f.into_raw()) as usize) .bytes_usize(); - let ty = field_types[f].instantiate(this.interner(), subst); + let ty = field_types[f].get().instantiate(this.interner(), subst); let size = this.layout(ty)?.size.bytes_usize(); rec( this, @@ -2314,7 +2314,7 @@ impl<'db> Evaluator<'db> { for (f, _) in data.fields().iter() { let offset = l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize(); - let ty = field_types[f].instantiate(this.interner(), subst); + let ty = field_types[f].get().instantiate(this.interner(), subst); let size = this.layout(ty)?.size.bytes_usize(); rec( this, @@ -2356,7 +2356,7 @@ impl<'db> Evaluator<'db> { ty_of_bytes: impl Fn(&[u8]) -> Result<'db, Ty<'db>> + Copy, addr: Address, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, ()> { // FIXME: support indirect references let layout = self.layout(ty)?; @@ -2389,7 +2389,7 @@ impl<'db> Evaluator<'db> { AdtId::StructId(s) => { for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); - let ty = ty.instantiate(self.interner(), args); + let ty = ty.get().instantiate(self.interner(), args); self.patch_addresses( patch_map, ty_of_bytes, @@ -2410,7 +2410,7 @@ impl<'db> Evaluator<'db> { ) { for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); - let ty = ty.instantiate(self.interner(), args); + let ty = ty.get().instantiate(self.interner(), args); self.patch_addresses( patch_map, ty_of_bytes, @@ -2477,10 +2477,10 @@ impl<'db> Evaluator<'db> { bytes: Interval, destination: Interval, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, - target_bb: Option>, + locals: &Locals, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let id = from_bytes!(usize, bytes.get(self)?); let next_ty = self.vtable_map.ty(id)?; use rustc_type_ir::TyKind; @@ -2508,19 +2508,23 @@ impl<'db> Evaluator<'db> { generic_args: GenericArgs<'db>, destination: Interval, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let mir_body = self .db - .monomorphized_mir_body_for_closure(closure, generic_args, self.param_env) + .monomorphized_mir_body_for_closure( + closure, + generic_args.store(), + self.param_env.store(), + ) .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?; - let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some() - { - closure_data.addr.to_bytes().to_vec() - } else { - closure_data.get(self)?.to_owned() - }; + let closure_data = + if mir_body.locals[mir_body.param_locals[0]].ty.as_ref().as_reference().is_some() { + closure_data.addr.to_bytes().to_vec() + } else { + closure_data.get(self)?.to_owned() + }; let arg_bytes = iter::once(Ok(closure_data)) .chain(args.iter().map(|it| Ok(it.get(self)?.to_owned()))) .collect::>>()?; @@ -2542,10 +2546,10 @@ impl<'db> Evaluator<'db> { generic_args: GenericArgs<'db>, destination: Interval, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, - target_bb: Option>, + locals: &Locals, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { match def { CallableDefId::FunctionId(def) => { if self.detect_fn_trait(def).is_some() { @@ -2600,9 +2604,9 @@ impl<'db> Evaluator<'db> { &self, def: FunctionId, generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, - ) -> Result<'db, MirOrDynIndex<'db>> { + ) -> Result<'db, MirOrDynIndex> { let pair = (def, generic_args); if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) { return Ok(r.clone()); @@ -2621,7 +2625,7 @@ impl<'db> Evaluator<'db> { let mir_body = self .db - .monomorphized_mir_body(imp.into(), generic_args, self.param_env) + .monomorphized_mir_body(imp.into(), generic_args.store(), self.param_env.store()) .map_err(|e| { MirEvalError::InFunction( Box::new(MirEvalError::MirLowerError(imp, e)), @@ -2639,11 +2643,11 @@ impl<'db> Evaluator<'db> { mut def: FunctionId, args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, - target_bb: Option>, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { if self.detect_and_exec_special_function( def, args, @@ -2705,14 +2709,14 @@ impl<'db> Evaluator<'db> { fn exec_looked_up_function( &mut self, - mir_body: Arc>, - locals: &Locals<'db>, + mir_body: Arc, + locals: &Locals, def: FunctionId, arg_bytes: impl Iterator, span: MirSpan, destination: Interval, - target_bb: Option>, - ) -> Result<'db, Option>> { + target_bb: Option, + ) -> Result<'db, Option> { Ok(if let Some(target_bb) = target_bb { let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&mir_body, Some(destination))?; @@ -2736,11 +2740,11 @@ impl<'db> Evaluator<'db> { def: FunctionId, args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, - target_bb: Option>, + target_bb: Option, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let func = args .first() .ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?; @@ -2767,7 +2771,7 @@ impl<'db> Evaluator<'db> { TyKind::Closure(closure, subst) => self.exec_closure( closure.0, func_data, - subst.split_closure_args_untupled().parent_args, + GenericArgs::new_from_slice(subst.split_closure_args_untupled().parent_args), destination, &args[1..], locals, @@ -2805,7 +2809,7 @@ impl<'db> Evaluator<'db> { } } - fn eval_static(&mut self, st: StaticId, locals: &Locals<'db>) -> Result<'db, Address> { + fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<'db, Address> { if let Some(o) = self.static_locations.get(&st) { return Ok(*o); }; @@ -2816,8 +2820,8 @@ impl<'db> Evaluator<'db> { })?; self.allocate_const_in_heap(locals, konst)? } else { - let ty = - InferenceResult::for_body(self.db, st.into())[self.db.body(st.into()).body_expr]; + let ty = InferenceResult::for_body(self.db, st.into()) + .expr_ty(self.db.body(st.into()).body_expr); let Some((size, align)) = self.size_align_of(ty, locals)? else { not_supported!("unsized extern static"); }; @@ -2852,12 +2856,7 @@ impl<'db> Evaluator<'db> { } } - fn drop_place( - &mut self, - place: &Place<'db>, - locals: &mut Locals<'db>, - span: MirSpan, - ) -> Result<'db, ()> { + fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<'db, ()> { let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?; if !locals.drop_flags.remove_place(place, &locals.body.projection_store) { return Ok(()); @@ -2872,7 +2871,7 @@ impl<'db> Evaluator<'db> { fn run_drop_glue_deep( &mut self, ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, addr: Address, _metadata: &[u8], span: MirSpan, @@ -2886,7 +2885,7 @@ impl<'db> Evaluator<'db> { return Ok(()); }; - let generic_args = GenericArgs::new_from_iter(self.interner(), [ty.into()]); + let generic_args = GenericArgs::new_from_slice(&[ty.into()]); if let Ok(MirOrDynIndex::Mir(body)) = self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span) { @@ -2920,7 +2919,9 @@ impl<'db> Evaluator<'db> { .offset(u32::from(field.into_raw()) as usize) .bytes_usize(); let addr = addr.offset(offset); - let ty = field_types[field].instantiate(self.interner(), subst); + let ty = field_types[field] + .get() + .instantiate(self.interner(), subst); self.run_drop_glue_deep(ty, locals, addr, &[], span)?; } } @@ -3011,7 +3012,7 @@ pub fn render_const_using_debug_impl<'db>( let debug_fmt_fn_ptr = evaluator.vtable_map.id(Ty::new_fn_def( evaluator.interner(), CallableDefId::FunctionId(debug_fmt_fn).into(), - GenericArgs::new_from_iter(evaluator.interner(), [ty.into()]), + GenericArgs::new_from_slice(&[ty.into()]), )); evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?; // a3 = ::core::fmt::Arguments::new_v1(a1, a2) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index 42c11113ee303..a47a8c4400071 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -52,7 +52,7 @@ impl<'db> Evaluator<'db> { def: FunctionId, args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, span: MirSpan, ) -> Result<'db, bool> { @@ -149,7 +149,7 @@ impl<'db> Evaluator<'db> { def: FunctionId, args: &[IntervalAndTy<'db>], self_ty: Ty<'db>, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, span: MirSpan, ) -> Result<'db, ()> { @@ -195,7 +195,7 @@ impl<'db> Evaluator<'db> { self.exec_fn_with_args( def, args, - GenericArgs::new_from_iter(self.interner(), [self_ty.into()]), + GenericArgs::new_from_slice(&[self_ty.into()]), locals, destination, None, @@ -212,7 +212,7 @@ impl<'db> Evaluator<'db> { layout: Arc, addr: Address, def: FunctionId, - locals: &Locals<'db>, + locals: &Locals, destination: Interval, span: MirSpan, ) -> Result<'db, ()> { @@ -318,7 +318,7 @@ impl<'db> Evaluator<'db> { it: EvalLangItem, generic_args: GenericArgs<'db>, args: &[IntervalAndTy<'db>], - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, ) -> Result<'db, Vec> { use EvalLangItem::*; @@ -390,7 +390,7 @@ impl<'db> Evaluator<'db> { id: i64, args: &[IntervalAndTy<'db>], destination: Interval, - _locals: &Locals<'db>, + _locals: &Locals, _span: MirSpan, ) -> Result<'db, ()> { match id { @@ -421,7 +421,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], _generic_args: GenericArgs<'db>, destination: Interval, - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, ) -> Result<'db, ()> { match as_str { @@ -587,7 +587,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, destination: Interval, - locals: &Locals<'db>, + locals: &Locals, span: MirSpan, needs_override: bool, ) -> Result<'db, bool> { @@ -1235,7 +1235,7 @@ impl<'db> Evaluator<'db> { def, &args, // FIXME: wrong for manual impls of `FnOnce` - GenericArgs::new_from_iter(self.interner(), []), + GenericArgs::empty(self.interner()), locals, destination, None, @@ -1369,7 +1369,7 @@ impl<'db> Evaluator<'db> { &mut self, ty: Ty<'db>, metadata: Interval, - locals: &Locals<'db>, + locals: &Locals, ) -> Result<'db, (usize, usize)> { Ok(match ty.kind() { TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1), @@ -1391,8 +1391,13 @@ impl<'db> Evaluator<'db> { _ => not_supported!("unsized enum or union"), }; let field_types = self.db.field_types(id.into()); - let last_field_ty = - field_types.iter().next_back().unwrap().1.instantiate(self.interner(), subst); + let last_field_ty = field_types + .iter() + .next_back() + .unwrap() + .1 + .get() + .instantiate(self.interner(), subst); let sized_part_size = layout.fields.offset(field_types.iter().count() - 1).bytes_usize(); let sized_part_align = layout.align.bytes() as usize; @@ -1423,7 +1428,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], generic_args: GenericArgs<'db>, destination: Interval, - locals: &Locals<'db>, + locals: &Locals, _span: MirSpan, ) -> Result<'db, ()> { // We are a single threaded runtime with no UB checking and no optimization, so diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs index 4c64a70a7a624..3896917cab1a1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -35,6 +35,7 @@ impl<'db> Evaluator<'db> { not_supported!("simd type with no field"); }; let field_ty = self.db.field_types(id.into())[first_field] + .get() .instantiate(self.interner(), subst); return Ok((fields.len(), field_ty)); } @@ -67,7 +68,7 @@ impl<'db> Evaluator<'db> { args: &[IntervalAndTy<'db>], _generic_args: GenericArgs<'db>, destination: Interval, - _locals: &Locals<'db>, + _locals: &Locals, _span: MirSpan, ) -> Result<'db, ()> { match name { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index c13b76c125796..61dd7757c90b1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -15,7 +15,7 @@ use crate::{ use super::{MirEvalError, interpret_mir}; -fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> { +fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> { crate::attach_db(db, || { let interner = DbInterner::new_no_crate(db); let module_id = db.module_for_file(file_id.file_id(db)); @@ -39,11 +39,12 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), let body = db .monomorphized_mir_body( func_id.into(), - GenericArgs::new_from_iter(interner, []), + GenericArgs::empty(interner).store(), crate::ParamEnvAndCrate { param_env: db.trait_environment(func_id.into()), krate: func_id.krate(db), - }, + } + .store(), ) .map_err(|e| MirEvalError::MirLowerError(func_id, e))?; @@ -122,7 +123,7 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic: fn check_error_with( #[rust_analyzer::rust_fixture] ra_fixture: &str, - expect_err: impl FnOnce(MirEvalError<'_>) -> bool, + expect_err: impl FnOnce(MirEvalError) -> bool, ) { let (db, file_ids) = TestDB::with_many_files(ra_fixture); crate::attach_db(&db, || { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 5bce4222a4fa6..e8d42bed9fa7a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -19,7 +19,7 @@ use hir_expand::name::Name; use la_arena::ArenaMap; use rustc_apfloat::Float; use rustc_hash::FxHashMap; -use rustc_type_ir::inherent::{Const as _, IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{Const as _, IntoKind, Ty as _}; use span::{Edition, FileId}; use syntax::TextRange; use triomphe::Arc; @@ -42,7 +42,8 @@ use crate::{ TupleFieldId, Ty, UnOp, VariantId, return_slot, }, next_solver::{ - Const, DbInterner, ParamConst, ParamEnv, Region, TyKind, TypingMode, UnevaluatedConst, + Const, DbInterner, ParamConst, ParamEnv, Region, StoredGenericArgs, StoredTy, TyKind, + TypingMode, UnevaluatedConst, infer::{DbInternerInferExt, InferCtxt}, }, traits::FnTrait, @@ -56,39 +57,40 @@ mod pattern_matching; mod tests; #[derive(Debug, Clone)] -struct LoopBlocks<'db> { - begin: BasicBlockId<'db>, +struct LoopBlocks { + begin: BasicBlockId, /// `None` for loops that are not terminating - end: Option>, - place: Place<'db>, + end: Option, + place: Place, drop_scope_index: usize, } #[derive(Debug, Clone, Default)] -struct DropScope<'db> { +struct DropScope { /// locals, in order of definition (so we should run drop glues in reverse order) - locals: Vec>, + locals: Vec, } struct MirLowerCtx<'a, 'db> { - result: MirBody<'db>, + result: MirBody, owner: DefWithBodyId, - current_loop_blocks: Option>, - labeled_loop_blocks: FxHashMap>, - discr_temp: Option>, + current_loop_blocks: Option, + labeled_loop_blocks: FxHashMap, + discr_temp: Option, db: &'db dyn HirDatabase, body: &'a Body, - infer: &'a InferenceResult<'db>, + infer: &'a InferenceResult, + types: &'db crate::next_solver::DefaultAny<'db>, resolver: Resolver<'db>, - drop_scopes: Vec>, + drop_scopes: Vec, env: ParamEnv<'db>, infcx: InferCtxt<'db>, } // FIXME: Make this smaller, its stored in database queries #[derive(Debug, Clone, PartialEq, Eq)] -pub enum MirLowerError<'db> { - ConstEvalError(Box, Box>), +pub enum MirLowerError { + ConstEvalError(Box, Box), LayoutError(LayoutError), IncompleteExpr, IncompletePattern, @@ -98,9 +100,9 @@ pub enum MirLowerError<'db> { RecordLiteralWithoutPath, UnresolvedMethod(String), UnresolvedField, - UnsizedTemporary(Ty<'db>), + UnsizedTemporary(StoredTy), MissingFunctionDefinition(DefWithBodyId, ExprId), - TypeMismatch(TypeMismatch<'db>), + TypeMismatch(TypeMismatch), HasErrors, /// This should never happen. Type mismatch should catch everything. TypeError(&'static str), @@ -113,11 +115,11 @@ pub enum MirLowerError<'db> { LangItemNotFound, MutatingRvalue, UnresolvedLabel, - UnresolvedUpvar(Place<'db>), + UnresolvedUpvar(Place), InaccessibleLocal, // monomorphization errors: - GenericArgNotProvided(GenericParamId, GenericArgs<'db>), + GenericArgNotProvided(GenericParamId, StoredGenericArgs), } /// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves. @@ -126,9 +128,9 @@ impl DropScopeToken { fn pop_and_drop<'db>( self, ctx: &mut MirLowerCtx<'_, 'db>, - current: BasicBlockId<'db>, + current: BasicBlockId, span: MirSpan, - ) -> BasicBlockId<'db> { + ) -> BasicBlockId { std::mem::forget(self); ctx.pop_drop_scope_internal(current, span) } @@ -158,7 +160,7 @@ impl Drop for DropScopeToken { // } // } -impl MirLowerError<'_> { +impl MirLowerError { pub fn pretty_print( &self, f: &mut String, @@ -190,8 +192,8 @@ impl MirLowerError<'_> { MirLowerError::TypeMismatch(e) => writeln!( f, "Type mismatch: Expected {}, found {}", - e.expected.display(db, display_target), - e.actual.display(db, display_target), + e.expected.as_ref().display(db, display_target), + e.actual.as_ref().display(db, display_target), )?, MirLowerError::GenericArgNotProvided(id, subst) => { let param_name = match *id { @@ -211,7 +213,7 @@ impl MirLowerError<'_> { param_name.unwrap_or(Name::missing()).display(db, display_target.edition) )?; writeln!(f, "Provided args: [")?; - for g in subst.iter() { + for g in subst.as_ref() { write!(f, " {},", g.display(db, display_target))?; } writeln!(f, "]")?; @@ -254,13 +256,13 @@ macro_rules! implementation_error { }}; } -impl From for MirLowerError<'_> { +impl From for MirLowerError { fn from(value: LayoutError) -> Self { MirLowerError::LayoutError(value) } } -impl MirLowerError<'_> { +impl MirLowerError { fn unresolved_path( db: &dyn HirDatabase, p: &Path, @@ -273,14 +275,14 @@ impl MirLowerError<'_> { } } -type Result<'db, T> = std::result::Result>; +type Result<'db, T> = std::result::Result; impl<'a, 'db> MirLowerCtx<'a, 'db> { fn new( db: &'db dyn HirDatabase, owner: DefWithBodyId, body: &'a Body, - infer: &'a InferenceResult<'db>, + infer: &'a InferenceResult, ) -> Self { let mut basic_blocks = Arena::new(); let start_block = basic_blocks.alloc(BasicBlock { @@ -289,7 +291,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { is_cleanup: false, }); let locals = Arena::new(); - let binding_locals: ArenaMap> = ArenaMap::new(); + let binding_locals: ArenaMap = ArenaMap::new(); let mir = MirBody { projection_store: ProjectionStore::default(), basic_blocks, @@ -311,6 +313,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { db, infer, body, + types: crate::next_solver::default_types(db), owner, resolver, current_loop_blocks: None, @@ -332,16 +335,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.infcx.interner.lang_items() } - fn temp( - &mut self, - ty: Ty<'db>, - current: BasicBlockId<'db>, - span: MirSpan, - ) -> Result<'db, LocalId<'db>> { + fn temp(&mut self, ty: Ty<'db>, current: BasicBlockId, span: MirSpan) -> Result<'db, LocalId> { if matches!(ty.kind(), TyKind::Slice(_) | TyKind::Dynamic(..)) { - return Err(MirLowerError::UnsizedTemporary(ty)); + return Err(MirLowerError::UnsizedTemporary(ty.store())); } - let l = self.result.locals.alloc(Local { ty }); + let l = self.result.locals.alloc(Local { ty: ty.store() }); self.push_storage_live_for_local(l, current, span)?; Ok(l) } @@ -349,8 +347,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_some_operand( &mut self, expr_id: ExprId, - current: BasicBlockId<'db>, - ) -> Result<'db, Option<(Operand<'db>, BasicBlockId<'db>)>> { + current: BasicBlockId, + ) -> Result<'db, Option<(Operand, BasicBlockId)>> { if !self.has_adjustments(expr_id) && let Expr::Literal(l) = &self.body[expr_id] { @@ -366,18 +364,14 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place_with_adjust( &mut self, expr_id: ExprId, - place: Place<'db>, - current: BasicBlockId<'db>, - adjustments: &[Adjustment<'db>], - ) -> Result<'db, Option>> { + place: Place, + current: BasicBlockId, + adjustments: &[Adjustment], + ) -> Result<'db, Option> { match adjustments.split_last() { Some((last, rest)) => match &last.kind { Adjust::NeverToAny => { - let temp = self.temp( - Ty::new(self.interner(), TyKind::Never), - current, - MirSpan::Unknown, - )?; + let temp = self.temp(self.types.types.never, current, MirSpan::Unknown)?; self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest) } Adjust::Deref(_) => { @@ -416,7 +410,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Rvalue::Cast( CastKind::PointerCoercion(*cast), Operand { kind: OperandKind::Copy(p), span: None }, - last.target, + last.target.clone(), ), expr_id.into(), ); @@ -430,11 +424,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place_with_borrow_adjust( &mut self, expr_id: ExprId, - place: Place<'db>, - current: BasicBlockId<'db>, - rest: &[Adjustment<'db>], + place: Place, + current: BasicBlockId, + rest: &[Adjustment], m: Mutability, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else { @@ -448,9 +442,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place( &mut self, expr_id: ExprId, - place: Place<'db>, - prev_block: BasicBlockId<'db>, - ) -> Result<'db, Option>> { + place: Place, + prev_block: BasicBlockId, + ) -> Result<'db, Option> { if let Some(adjustments) = self.infer.expr_adjustments.get(&expr_id) { return self.lower_expr_to_place_with_adjust(expr_id, place, prev_block, adjustments); } @@ -460,9 +454,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_expr_to_place_without_adjust( &mut self, expr_id: ExprId, - place: Place<'db>, - mut current: BasicBlockId<'db>, - ) -> Result<'db, Option>> { + place: Place, + mut current: BasicBlockId, + ) -> Result<'db, Option> { match &self.body[expr_id] { Expr::OffsetOf(_) => { not_supported!("builtin#offset_of") @@ -537,7 +531,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { const_id.into(), current, place, - GenericArgs::new_from_iter(self.interner(), []), + GenericArgs::empty(self.interner()), expr_id.into(), )?; Ok(Some(current)) @@ -545,7 +539,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { ValueNs::EnumVariantId(variant_id) => { let variant_fields = variant_id.fields(self.db); if variant_fields.shape == FieldsShape::Unit { - let ty = self.infer.type_of_expr[expr_id]; + let ty = self.infer.expr_ty(expr_id); current = self.lower_enum_variant( variant_id, current, @@ -575,8 +569,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { konst: Const::new_param( self.interner(), ParamConst { id: p, index }, - ), - ty: self.db.const_param_ty_ns(p), + ) + .store(), + ty: self.db.const_param_ty_ns(p).store(), }, span: None, }), @@ -907,7 +902,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { operands[u32::from(field_id.into_raw()) as usize] = Some(op); } let rvalue = Rvalue::Aggregate( - AggregateKind::Adt(variant_id, subst), + AggregateKind::Adt(variant_id, subst.store()), match spread_place { Some(sp) => operands .into_iter() @@ -978,15 +973,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { let rvalue = if self.infer.coercion_casts.contains(expr) { Rvalue::Use(it) } else { - let source_ty = self.infer[*expr]; - let target_ty = self.infer[expr_id]; + let source_ty = self.infer.expr_ty(*expr); + let target_ty = self.infer.expr_ty(expr_id); let cast_kind = if source_ty.as_reference().is_some() { CastKind::PointerCoercion(PointerCast::ArrayToPointer) } else { cast_kind(self.db, source_ty, target_ty)? }; - Rvalue::Cast(cast_kind, it, target_ty) + Rvalue::Cast(cast_kind, it, target_ty.store()) }; self.push_assignment(current, place, rvalue, expr_id.into()); Ok(Some(current)) @@ -1004,7 +999,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.push_assignment( current, place, - Rvalue::ShallowInitBoxWithAlloc(ty), + Rvalue::ShallowInitBoxWithAlloc(ty.store()), expr_id.into(), ); let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? @@ -1222,7 +1217,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { current, place, Rvalue::Aggregate( - AggregateKind::Adt(st.into(), subst), + AggregateKind::Adt(st.into(), subst.store()), st.fields(self.db) .fields() .iter() @@ -1284,11 +1279,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { }; match &capture.kind { CaptureKind::ByRef(bk) => { - let tmp_ty = capture.ty.instantiate_identity(); + let tmp_ty = capture.ty.get().instantiate_identity(); // FIXME: Handle more than one span. let capture_spans = capture.spans(); - let tmp: Place<'db> = - self.temp(tmp_ty, current, capture_spans[0])?.into(); + let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into(); self.push_assignment( current, tmp, @@ -1305,7 +1299,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.push_assignment( current, place, - Rvalue::Aggregate(AggregateKind::Closure(ty), operands.into()), + Rvalue::Aggregate(AggregateKind::Closure(ty.store()), operands.into()), expr_id.into(), ); Ok(Some(current)) @@ -1325,7 +1319,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { return Ok(None); }; let r = Rvalue::Aggregate( - AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id)), + AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id).store()), values, ); self.push_assignment(current, place, r, expr_id.into()); @@ -1355,7 +1349,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { else { return Ok(None); }; - let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values); + let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty.store()), values); self.push_assignment(current, place, r, expr_id.into()); Ok(Some(current)) } @@ -1373,7 +1367,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { )); } }; - let r = Rvalue::Repeat(init, len); + let r = Rvalue::Repeat(init, len.store()); self.push_assignment(current, place, r, expr_id.into()); Ok(Some(current)) } @@ -1388,11 +1382,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { } } - fn push_field_projection( - &mut self, - place: &mut Place<'db>, - expr_id: ExprId, - ) -> Result<'db, ()> { + fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<'db, ()> { if let Expr::Field { expr, name } = &self.body[expr_id] { if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind() { let index = @@ -1421,7 +1411,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { &mut self, ty: Ty<'db>, loc: &ExprId, - ) -> Result<'db, Operand<'db>> { + ) -> Result<'db, Operand> { match &self.body[*loc] { Expr::Literal(l) => self.lower_literal_to_operand(ty, l), Expr::Path(c) => { @@ -1443,7 +1433,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { ResolveValueResult::ValueNs(v, _) => { if let ValueNs::ConstId(c) = v { self.lower_const_to_operand( - GenericArgs::new_from_iter(self.interner(), []), + GenericArgs::empty(self.interner()), c.into(), ) } else { @@ -1461,10 +1451,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { } } - fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand<'db>> { + fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand> { let size = || { self.db - .layout_of_ty(ty, ParamEnvAndCrate { param_env: self.env, krate: self.krate() }) + .layout_of_ty( + ty.store(), + ParamEnvAndCrate { param_env: self.env, krate: self.krate() }.store(), + ) .map(|it| it.size.bytes_usize()) }; const USIZE_SIZE: usize = size_of::(); @@ -1512,15 +1505,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Ok(Operand::from_concrete_const(bytes, MemoryMap::default(), ty)) } - fn new_basic_block(&mut self) -> BasicBlockId<'db> { + fn new_basic_block(&mut self) -> BasicBlockId { self.result.basic_blocks.alloc(BasicBlock::default()) } fn lower_const( &mut self, const_id: GeneralConstId, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, subst: GenericArgs<'db>, span: MirSpan, ) -> Result<'db, ()> { @@ -1533,8 +1526,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { &mut self, subst: GenericArgs<'db>, const_id: GeneralConstId, - ) -> Result<'db, Operand<'db>> { - let konst = if subst.len() != 0 { + ) -> Result<'db, Operand> { + let konst = if !subst.is_empty() { // We can't evaluate constant with substitution now, as generics are not monomorphized in lowering. Const::new_unevaluated( self.interner(), @@ -1564,13 +1557,16 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { }) .unwrap() .instantiate(self.interner(), subst); - Ok(Operand { kind: OperandKind::Constant { konst, ty }, span: None }) + Ok(Operand { + kind: OperandKind::Constant { konst: konst.store(), ty: ty.store() }, + span: None, + }) } fn write_bytes_to_place( &mut self, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, cv: Box<[u8]>, ty: Ty<'db>, span: MirSpan, @@ -1582,12 +1578,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_enum_variant( &mut self, variant_id: EnumVariantId, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, ty: Ty<'db>, - fields: Box<[Operand<'db>]>, + fields: Box<[Operand]>, span: MirSpan, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { let subst = match ty.kind() { TyKind::Adt(_, subst) => subst, _ => implementation_error!("Non ADT enum"), @@ -1595,7 +1591,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { self.push_assignment( prev_block, place, - Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst), fields), + Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst.store()), fields), span, ); Ok(prev_block) @@ -1603,13 +1599,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_call_and_args( &mut self, - func: Operand<'db>, + func: Operand, args: impl Iterator, - place: Place<'db>, - mut current: BasicBlockId<'db>, + place: Place, + mut current: BasicBlockId, is_uninhabited: bool, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let Some(args) = args .map(|arg| { if let Some((temp, c)) = self.lower_expr_to_some_operand(arg, current)? { @@ -1628,13 +1624,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_call( &mut self, - func: Operand<'db>, - args: Box<[Operand<'db>]>, - place: Place<'db>, - current: BasicBlockId<'db>, + func: Operand, + args: Box<[Operand]>, + place: Place, + current: BasicBlockId, is_uninhabited: bool, span: MirSpan, - ) -> Result<'db, Option>> { + ) -> Result<'db, Option> { let b = if is_uninhabited { None } else { Some(self.new_basic_block()) }; self.set_terminator( current, @@ -1651,25 +1647,20 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Ok(b) } - fn is_unterminated(&mut self, source: BasicBlockId<'db>) -> bool { + fn is_unterminated(&mut self, source: BasicBlockId) -> bool { self.result.basic_blocks[source].terminator.is_none() } - fn set_terminator( - &mut self, - source: BasicBlockId<'db>, - terminator: TerminatorKind<'db>, - span: MirSpan, - ) { + fn set_terminator(&mut self, source: BasicBlockId, terminator: TerminatorKind, span: MirSpan) { self.result.basic_blocks[source].terminator = Some(Terminator { span, kind: terminator }); } - fn set_goto(&mut self, source: BasicBlockId<'db>, target: BasicBlockId<'db>, span: MirSpan) { + fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId, span: MirSpan) { self.set_terminator(source, TerminatorKind::Goto { target }, span); } fn expr_ty_without_adjust(&self, e: ExprId) -> Ty<'db> { - self.infer[e] + self.infer.expr_ty(e) } fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> { @@ -1677,36 +1668,36 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { if let Some(it) = self.infer.expr_adjustments.get(&e) && let Some(it) = it.last() { - ty = Some(it.target); + ty = Some(it.target.as_ref()); } ty.unwrap_or_else(|| self.expr_ty_without_adjust(e)) } - fn push_statement(&mut self, block: BasicBlockId<'db>, statement: Statement<'db>) { + fn push_statement(&mut self, block: BasicBlockId, statement: Statement) { self.result.basic_blocks[block].statements.push(statement); } - fn push_fake_read(&mut self, block: BasicBlockId<'db>, p: Place<'db>, span: MirSpan) { + fn push_fake_read(&mut self, block: BasicBlockId, p: Place, span: MirSpan) { self.push_statement(block, StatementKind::FakeRead(p).with_span(span)); } fn push_assignment( &mut self, - block: BasicBlockId<'db>, - place: Place<'db>, - rvalue: Rvalue<'db>, + block: BasicBlockId, + place: Place, + rvalue: Rvalue, span: MirSpan, ) { self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span)); } - fn discr_temp_place(&mut self, current: BasicBlockId<'db>) -> Place<'db> { + fn discr_temp_place(&mut self, current: BasicBlockId) -> Place { match &self.discr_temp { Some(it) => *it, None => { // FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well let discr_ty = Ty::new_int(self.interner(), rustc_type_ir::IntTy::I128); - let tmp: Place<'db> = self + let tmp: Place = self .temp(discr_ty, current, MirSpan::Unknown) .expect("discr_ty is never unsized") .into(); @@ -1718,12 +1709,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_loop( &mut self, - prev_block: BasicBlockId<'db>, - place: Place<'db>, + prev_block: BasicBlockId, + place: Place, label: Option, span: MirSpan, - f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId<'db>) -> Result<'db, ()>, - ) -> Result<'db, Option>> { + f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId) -> Result<'db, ()>, + ) -> Result<'db, Option> { let begin = self.new_basic_block(); let prev = self.current_loop_blocks.replace(LoopBlocks { begin, @@ -1758,10 +1749,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn merge_blocks( &mut self, - b1: Option>, - b2: Option>, + b1: Option, + b2: Option, span: MirSpan, - ) -> Option> { + ) -> Option { match (b1, b2) { (None, None) => None, (None, Some(b)) | (Some(b), None) => Some(b), @@ -1774,7 +1765,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { } } - fn current_loop_end(&mut self) -> Result<'db, BasicBlockId<'db>> { + fn current_loop_end(&mut self) -> Result<'db, BasicBlockId> { let r = match self .current_loop_blocks .as_mut() @@ -1801,7 +1792,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn is_uninhabited(&self, expr_id: ExprId) -> bool { is_ty_uninhabited_from( &self.infcx, - self.infer[expr_id], + self.infer.expr_ty(expr_id), self.owner.module(self.db), self.env, ) @@ -1809,15 +1800,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and /// `Drop` in the appropriated places. - fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId<'db>) -> Result<'db, ()> { + fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<'db, ()> { let l = self.binding_local(b)?; self.push_storage_live_for_local(l, current, MirSpan::BindingId(b)) } fn push_storage_live_for_local( &mut self, - l: LocalId<'db>, - current: BasicBlockId<'db>, + l: LocalId, + current: BasicBlockId, span: MirSpan, ) -> Result<'db, ()> { self.drop_scopes.last_mut().unwrap().locals.push(l); @@ -1828,11 +1819,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn lower_block_to_place( &mut self, statements: &[hir_def::hir::Statement], - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, tail: Option, - place: Place<'db>, + place: Place, span: MirSpan, - ) -> Result<'db, Option>>> { + ) -> Result<'db, Option>> { let scope = self.push_drop_scope(); for statement in statements.iter() { match statement { @@ -1908,11 +1899,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { params: impl Iterator)> + Clone, self_binding: Option<(BindingId, Ty<'db>)>, pick_binding: impl Fn(BindingId) -> bool, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { let base_param_count = self.result.param_locals.len(); let self_binding = match self_binding { Some((self_binding, ty)) => { - let local_id = self.result.locals.alloc(Local { ty }); + let local_id = self.result.locals.alloc(Local { ty: ty.store() }); self.drop_scopes.last_mut().unwrap().locals.push(local_id); self.result.binding_locals.insert(self_binding, local_id); self.result.param_locals.push(local_id); @@ -1921,7 +1912,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { None => None, }; self.result.param_locals.extend(params.clone().map(|(it, ty)| { - let local_id = self.result.locals.alloc(Local { ty }); + let local_id = self.result.locals.alloc(Local { ty: ty.store() }); self.drop_scopes.last_mut().unwrap().locals.push(local_id); if let Pat::Bind { id, subpat: None } = self.body[it] && matches!( @@ -1939,9 +1930,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { continue; } if !self.result.binding_locals.contains_idx(id) { - self.result - .binding_locals - .insert(id, self.result.locals.alloc(Local { ty: self.infer[id] })); + self.result.binding_locals.insert( + id, + self.result.locals.alloc(Local { ty: self.infer.binding_ty(id).store() }), + ); } } let mut current = self.result.start_block; @@ -1976,7 +1968,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { Ok(current) } - fn binding_local(&self, b: BindingId) -> Result<'db, LocalId<'db>> { + fn binding_local(&self, b: BindingId) -> Result<'db, LocalId> { match self.result.binding_locals.get(b) { Some(it) => Ok(*it), None => { @@ -2025,9 +2017,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn drop_until_scope( &mut self, scope_index: usize, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - ) -> BasicBlockId<'db> { + ) -> BasicBlockId { for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() { self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span); } @@ -2047,9 +2039,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { /// Don't call directly fn pop_drop_scope_internal( &mut self, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - ) -> BasicBlockId<'db> { + ) -> BasicBlockId { let scope = self.drop_scopes.pop().unwrap(); self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span); current @@ -2057,9 +2049,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn pop_drop_scope_assert_finished( &mut self, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { current = self.pop_drop_scope_internal(current, span); if !self.drop_scopes.is_empty() { implementation_error!("Mismatched count between drop scope push and pops"); @@ -2069,12 +2061,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn emit_drop_and_storage_dead_for_scope( &mut self, - scope: &DropScope<'db>, - current: &mut Idx>, + scope: &DropScope, + current: &mut Idx, span: MirSpan, ) { for &l in scope.locals.iter().rev() { - if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty) { + if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty.as_ref()) + { let prev = std::mem::replace(current, self.new_basic_block()); self.set_terminator( prev, @@ -2112,36 +2105,37 @@ fn cast_kind<'db>( pub fn mir_body_for_closure_query<'db>( db: &'db dyn HirDatabase, closure: InternedClosureId, -) -> Result<'db, Arc>> { +) -> Result<'db, Arc> { let InternedClosure(owner, expr) = db.lookup_intern_closure(closure); let body = db.body(owner); let infer = InferenceResult::for_body(db, owner); let Expr::Closure { args, body: root, .. } = &body[expr] else { implementation_error!("closure expression is not closure"); }; - let crate::next_solver::TyKind::Closure(_, substs) = infer[expr].kind() else { + let crate::next_solver::TyKind::Closure(_, substs) = infer.expr_ty(expr).kind() else { implementation_error!("closure expression is not closure"); }; let (captures, kind) = infer.closure_info(closure); let mut ctx = MirLowerCtx::new(db, owner, &body, infer); // 0 is return local - ctx.result.locals.alloc(Local { ty: infer[*root] }); + ctx.result.locals.alloc(Local { ty: infer.expr_ty(*root).store() }); let closure_local = ctx.result.locals.alloc(Local { ty: match kind { - FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer[expr], + FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer.expr_ty(expr), FnTrait::FnMut | FnTrait::AsyncFnMut => Ty::new_ref( ctx.interner(), Region::error(ctx.interner()), - infer[expr], + infer.expr_ty(expr), Mutability::Mut, ), FnTrait::Fn | FnTrait::AsyncFn => Ty::new_ref( ctx.interner(), Region::error(ctx.interner()), - infer[expr], + infer.expr_ty(expr), Mutability::Not, ), - }, + } + .store(), }); ctx.result.param_locals.push(closure_local); let Some(sig) = @@ -2151,7 +2145,7 @@ pub fn mir_body_for_closure_query<'db>( }; let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr); let current = ctx.lower_params_and_bindings( - args.iter().zip(sig.skip_binder().inputs().iter()).map(|(it, y)| (*it, y)), + args.iter().zip(sig.skip_binder().inputs().iter()).map(|(it, y)| (*it, *y)), None, |_| true, )?; @@ -2160,8 +2154,7 @@ pub fn mir_body_for_closure_query<'db>( let current = ctx.pop_drop_scope_assert_finished(current, root.into())?; ctx.set_terminator(current, TerminatorKind::Return, (*root).into()); } - let mut upvar_map: FxHashMap, Vec<(&CapturedItem<'_>, usize)>> = - FxHashMap::default(); + let mut upvar_map: FxHashMap> = FxHashMap::default(); for (i, capture) in captures.iter().enumerate() { let local = ctx.binding_local(capture.place.local)?; upvar_map.entry(local).or_default().push((capture, i)); @@ -2226,7 +2219,7 @@ pub fn mir_body_for_closure_query<'db>( pub fn mir_body_query<'db>( db: &'db dyn HirDatabase, def: DefWithBodyId, -) -> Result<'db, Arc>> { +) -> Result<'db, Arc> { let krate = def.krate(db); let edition = krate.data(db).edition; let detail = match def { @@ -2261,8 +2254,9 @@ pub fn mir_body_query<'db>( pub(crate) fn mir_body_cycle_result<'db>( _db: &'db dyn HirDatabase, + _: salsa::Id, _def: DefWithBodyId, -) -> Result<'db, Arc>> { +) -> Result<'db, Arc> { Err(MirLowerError::Loop) } @@ -2270,17 +2264,17 @@ pub fn lower_to_mir<'db>( db: &'db dyn HirDatabase, owner: DefWithBodyId, body: &Body, - infer: &InferenceResult<'db>, + infer: &InferenceResult, // FIXME: root_expr should always be the body.body_expr, but since `X` in `[(); X]` doesn't have its own specific body yet, we // need to take this input explicitly. root_expr: ExprId, -) -> Result<'db, MirBody<'db>> { +) -> Result<'db, MirBody> { if infer.type_mismatches().next().is_some() || infer.is_erroneous() { return Err(MirLowerError::HasErrors); } let mut ctx = MirLowerCtx::new(db, owner, body, infer); // 0 is return local - ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) }); + ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr).store() }); let binding_picker = |b: BindingId| { let owner = ctx.body.binding_owner(b); if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) } @@ -2293,7 +2287,7 @@ pub fn lower_to_mir<'db>( if let DefWithBodyId::FunctionId(fid) = owner { let callable_sig = db.callable_item_signature(fid.into()).instantiate_identity().skip_binder(); - let mut params = callable_sig.inputs().iter(); + let mut params = callable_sig.inputs().iter().copied(); let self_param = body.self_param.and_then(|id| Some((id, params.next()?))); break 'b ctx.lower_params_and_bindings( body.params.iter().zip(params).map(|(it, y)| (*it, y)), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index 40c6c5de795a3..cf05ec27ac37e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -20,8 +20,8 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_expr_to_some_place_without_adjust( &mut self, expr_id: ExprId, - prev_block: BasicBlockId<'db>, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + prev_block: BasicBlockId, + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let ty = self.expr_ty_without_adjust(expr_id); let place = self.temp(ty, prev_block, expr_id.into())?; let Some(current) = @@ -35,12 +35,12 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_expr_to_some_place_with_adjust( &mut self, expr_id: ExprId, - prev_block: BasicBlockId<'db>, - adjustments: &[Adjustment<'db>], - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + prev_block: BasicBlockId, + adjustments: &[Adjustment], + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let ty = adjustments .last() - .map(|it| it.target) + .map(|it| it.target.as_ref()) .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)); let place = self.temp(ty, prev_block, expr_id.into())?; let Some(current) = @@ -53,11 +53,11 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn lower_expr_as_place_with_adjust( &mut self, - current: BasicBlockId<'db>, + current: BasicBlockId, expr_id: ExprId, upgrade_rvalue: bool, - adjustments: &[Adjustment<'db>], - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + adjustments: &[Adjustment], + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| { if !upgrade_rvalue { return Err(MirLowerError::MutatingRvalue); @@ -93,9 +93,9 @@ impl<'db> MirLowerCtx<'_, 'db> { current, r, rest.last() - .map(|it| it.target) + .map(|it| it.target.as_ref()) .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)), - last.target, + last.target.as_ref(), expr_id.into(), match od.0 { Some(Mutability::Mut) => true, @@ -115,10 +115,10 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn lower_expr_as_place( &mut self, - current: BasicBlockId<'db>, + current: BasicBlockId, expr_id: ExprId, upgrade_rvalue: bool, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { match self.infer.expr_adjustments.get(&expr_id) { Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a), None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue), @@ -127,10 +127,10 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn lower_expr_as_place_without_adjust( &mut self, - current: BasicBlockId<'db>, + current: BasicBlockId, expr_id: ExprId, upgrade_rvalue: bool, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| { if !upgrade_rvalue { return Err(MirLowerError::MutatingRvalue); @@ -159,7 +159,7 @@ impl<'db> MirLowerCtx<'_, 'db> { ty, Mutability::Not, ); - let temp: Place<'db> = self.temp(ref_ty, current, expr_id.into())?.into(); + let temp: Place = self.temp(ref_ty, current, expr_id.into())?.into(); self.push_assignment( current, temp, @@ -279,21 +279,21 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_overloaded_index( &mut self, - current: BasicBlockId<'db>, - place: Place<'db>, + current: BasicBlockId, + place: Place, base_ty: Ty<'db>, result_ty: Ty<'db>, - index_operand: Operand<'db>, + index_operand: Operand, span: MirSpan, index_fn: (FunctionId, GenericArgs<'db>), - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let mutability = match base_ty.as_reference() { Some((_, _, mutability)) => mutability, None => Mutability::Not, }; let result_ref = Ty::new_ref(self.interner(), Region::error(self.interner()), result_ty, mutability); - let mut result: Place<'db> = self.temp(result_ref, current, span)?.into(); + let mut result: Place = self.temp(result_ref, current, span)?.into(); let index_fn_op = Operand::const_zst(Ty::new_fn_def( self.interner(), CallableDefId::FunctionId(index_fn.0).into(), @@ -316,13 +316,13 @@ impl<'db> MirLowerCtx<'_, 'db> { fn lower_overloaded_deref( &mut self, - current: BasicBlockId<'db>, - place: Place<'db>, + current: BasicBlockId, + place: Place, source_ty: Ty<'db>, target_ty: Ty<'db>, span: MirSpan, mutability: bool, - ) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> { + ) -> Result<'db, Option<(Place, BasicBlockId)>> { let lang_items = self.lang_items(); let (mutability, trait_lang_item, trait_method_name, borrow_kind) = if !mutability { ( @@ -342,7 +342,7 @@ impl<'db> MirLowerCtx<'_, 'db> { let error_region = Region::error(self.interner()); let ty_ref = Ty::new_ref(self.interner(), error_region, source_ty, mutability); let target_ty_ref = Ty::new_ref(self.interner(), error_region, target_ty, mutability); - let ref_place: Place<'db> = self.temp(ty_ref, current, span)?.into(); + let ref_place: Place = self.temp(ty_ref, current, span)?.into(); self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span); let deref_trait = trait_lang_item.ok_or(MirLowerError::LangItemNotFound)?; let deref_fn = deref_trait @@ -352,9 +352,9 @@ impl<'db> MirLowerCtx<'_, 'db> { let deref_fn_op = Operand::const_zst(Ty::new_fn_def( self.interner(), CallableDefId::FunctionId(deref_fn).into(), - GenericArgs::new_from_iter(self.interner(), [source_ty.into()]), + GenericArgs::new_from_slice(&[source_ty.into()]), )); - let mut result: Place<'db> = self.temp(target_ty_ref, current, span)?.into(); + let mut result: Place = self.temp(target_ty_ref, current, span)?.into(); let Some(current) = self.lower_call( deref_fn_op, Box::new([Operand { kind: OperandKind::Copy(ref_place), span: None }]), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index c3a4814a3ab47..a8aacbff16fa8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,7 +1,7 @@ //! MIR lowering for patterns use hir_def::{hir::ExprId, signatures::VariantFields}; -use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{IntoKind, Ty as _}; use crate::{ BindingMode, @@ -63,11 +63,11 @@ impl<'db> MirLowerCtx<'_, 'db> { /// so it should be an empty block. pub(super) fn pattern_match( &mut self, - current: BasicBlockId<'db>, - current_else: Option>, - cond_place: Place<'db>, + current: BasicBlockId, + current_else: Option, + cond_place: Place, pattern: PatId, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let (current, current_else) = self.pattern_match_inner( current, current_else, @@ -87,10 +87,10 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn pattern_match_assignment( &mut self, - current: BasicBlockId<'db>, - value: Place<'db>, + current: BasicBlockId, + value: Place, pattern: PatId, - ) -> Result<'db, BasicBlockId<'db>> { + ) -> Result<'db, BasicBlockId> { let (current, _) = self.pattern_match_inner(current, None, value, pattern, MatchingMode::Assign)?; Ok(current) @@ -99,9 +99,9 @@ impl<'db> MirLowerCtx<'_, 'db> { pub(super) fn match_self_param( &mut self, id: BindingId, - current: BasicBlockId<'db>, - local: LocalId<'db>, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + current: BasicBlockId, + local: LocalId, + ) -> Result<'db, (BasicBlockId, Option)> { self.pattern_match_binding( id, BindingMode::Move, @@ -114,12 +114,12 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_inner( &mut self, - mut current: BasicBlockId<'db>, - mut current_else: Option>, - mut cond_place: Place<'db>, + mut current: BasicBlockId, + mut current_else: Option, + mut cond_place: Place, pattern: PatId, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default(); cond_place.projection = self.result.projection_store.intern( cond_place @@ -135,7 +135,7 @@ impl<'db> MirLowerCtx<'_, 'db> { Pat::Missing => return Err(MirLowerError::IncompletePattern), Pat::Wild => (current, current_else), Pat::Tuple { args, ellipsis } => { - let subst = match self.infer[pattern].kind() { + let subst = match self.infer.pat_ty(pattern).kind() { TyKind::Tuple(s) => s, _ => { return Err(MirLowerError::TypeError( @@ -209,10 +209,11 @@ impl<'db> MirLowerCtx<'_, 'db> { } Pat::Range { start, end, range_type: _ } => { let mut add_check = |l: &ExprId, binop| -> Result<'db, ()> { - let lv = self.lower_literal_or_const_to_operand(self.infer[pattern], l)?; + let lv = + self.lower_literal_or_const_to_operand(self.infer.pat_ty(pattern), l)?; let else_target = *current_else.get_or_insert_with(|| self.new_basic_block()); let next = self.new_basic_block(); - let discr: Place<'db> = + let discr: Place = self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into(); self.push_assignment( current, @@ -249,9 +250,9 @@ impl<'db> MirLowerCtx<'_, 'db> { Pat::Slice { prefix, slice, suffix } => { if mode == MatchingMode::Check { // emit runtime length check for slice - if let TyKind::Slice(_) = self.infer[pattern].kind() { + if let TyKind::Slice(_) = self.infer.pat_ty(pattern).kind() { let pattern_len = prefix.len() + suffix.len(); - let place_len: Place<'db> = self + let place_len: Place = self .temp(Ty::new_usize(self.interner()), current, pattern.into())? .into(); self.push_assignment( @@ -285,7 +286,7 @@ impl<'db> MirLowerCtx<'_, 'db> { MemoryMap::default(), Ty::new_usize(self.interner()), ); - let discr: Place<'db> = self + let discr: Place = self .temp(Ty::new_bool(self.interner()), current, pattern.into())? .into(); self.push_assignment( @@ -398,15 +399,15 @@ impl<'db> MirLowerCtx<'_, 'db> { break 'b (c, x.1); } if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr { - break 'b (c, GenericArgs::new_from_iter(self.interner(), [])); + break 'b (c, GenericArgs::empty(self.interner())); } not_supported!("path in pattern position that is not const or variant") }; - let tmp: Place<'db> = - self.temp(self.infer[pattern], current, pattern.into())?.into(); + let tmp: Place = + self.temp(self.infer.pat_ty(pattern), current, pattern.into())?.into(); let span = pattern.into(); self.lower_const(c.into(), current, tmp, subst, span)?; - let tmp2: Place<'db> = + let tmp2: Place = self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into(); self.push_assignment( current, @@ -434,7 +435,7 @@ impl<'db> MirLowerCtx<'_, 'db> { Pat::Lit(l) => match &self.body[*l] { Expr::Literal(l) => { if mode == MatchingMode::Check { - let c = self.lower_literal_to_operand(self.infer[pattern], l)?; + let c = self.lower_literal_to_operand(self.infer.pat_ty(pattern), l)?; self.pattern_match_const(current_else, current, c, cond_place, pattern)? } else { (current, current_else) @@ -506,11 +507,11 @@ impl<'db> MirLowerCtx<'_, 'db> { &mut self, id: BindingId, mode: BindingMode, - cond_place: Place<'db>, + cond_place: Place, span: MirSpan, - current: BasicBlockId<'db>, - current_else: Option>, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + current: BasicBlockId, + current_else: Option, + ) -> Result<'db, (BasicBlockId, Option)> { let target_place = self.binding_local(id)?; self.push_storage_live(id, current)?; self.push_match_assignment(current, target_place, mode, cond_place, span); @@ -519,10 +520,10 @@ impl<'db> MirLowerCtx<'_, 'db> { fn push_match_assignment( &mut self, - current: BasicBlockId<'db>, - target_place: LocalId<'db>, + current: BasicBlockId, + target_place: LocalId, mode: BindingMode, - cond_place: Place<'db>, + cond_place: Place, span: MirSpan, ) { self.push_assignment( @@ -545,15 +546,15 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_const( &mut self, - current_else: Option>, - current: BasicBlockId<'db>, - c: Operand<'db>, - cond_place: Place<'db>, + current_else: Option, + current: BasicBlockId, + c: Operand, + cond_place: Place, pattern: Idx, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let then_target = self.new_basic_block(); let else_target = current_else.unwrap_or_else(|| self.new_basic_block()); - let discr: Place<'db> = + let discr: Place = self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into(); self.push_assignment( current, @@ -579,14 +580,14 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_matching_variant( &mut self, - cond_place: Place<'db>, + cond_place: Place, variant: VariantId, - mut current: BasicBlockId<'db>, + mut current: BasicBlockId, span: MirSpan, - mut current_else: Option>, + mut current_else: Option, shape: AdtPatternShape<'_>, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { Ok(match variant { VariantId::EnumVariantId(v) => { if mode == MatchingMode::Check { @@ -635,11 +636,11 @@ impl<'db> MirLowerCtx<'_, 'db> { shape: AdtPatternShape<'_>, variant_data: &VariantFields, v: VariantId, - current: BasicBlockId<'db>, - current_else: Option>, - cond_place: &Place<'db>, + current: BasicBlockId, + current_else: Option, + cond_place: &Place, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { Ok(match shape { AdtPatternShape::Record { args } => { let it = args @@ -678,12 +679,12 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_adt( &mut self, - mut current: BasicBlockId<'db>, - mut current_else: Option>, - args: impl Iterator, PatId)>, - cond_place: &Place<'db>, + mut current: BasicBlockId, + mut current_else: Option, + args: impl Iterator, + cond_place: &Place, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { for (proj, arg) in args { let cond_place = cond_place.project(proj, &mut self.result.projection_store); (current, current_else) = @@ -694,14 +695,14 @@ impl<'db> MirLowerCtx<'_, 'db> { fn pattern_match_tuple_like( &mut self, - current: BasicBlockId<'db>, - current_else: Option>, + current: BasicBlockId, + current_else: Option, args: &[PatId], ellipsis: Option, - fields: impl DoubleEndedIterator> + Clone, - cond_place: &Place<'db>, + fields: impl DoubleEndedIterator + Clone, + cond_place: &Place, mode: MatchingMode, - ) -> Result<'db, (BasicBlockId<'db>, Option>)> { + ) -> Result<'db, (BasicBlockId, Option)> { let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); let it = al .iter() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index b67365c344a6d..5752a3d7fae4b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -8,7 +8,7 @@ //! So the monomorphization should be called even if the substitution is empty. use hir_def::DefWithBodyId; -use rustc_type_ir::inherent::{IntoKind, SliceLike}; +use rustc_type_ir::inherent::IntoKind; use rustc_type_ir::{ FallibleTypeFolder, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeVisitableExt, }; @@ -16,7 +16,8 @@ use triomphe::Arc; use crate::{ ParamEnvAndCrate, - next_solver::{Const, ConstKind, Region, RegionKind}, + next_solver::{Const, ConstKind, Region, RegionKind, StoredConst, StoredGenericArgs, StoredTy}, + traits::StoredParamEnvAndCrate, }; use crate::{ db::{HirDatabase, InternedClosureId}, @@ -37,7 +38,7 @@ struct Filler<'db> { } impl<'db> FallibleTypeFolder> for Filler<'db> { - type Error = MirLowerError<'db>; + type Error = MirLowerError; fn cx(&self) -> DbInterner<'db> { self.infcx.interner @@ -69,7 +70,7 @@ impl<'db> FallibleTypeFolder> for Filler<'db> { .get(param.index as usize) .and_then(|arg| arg.ty()) .ok_or_else(|| { - MirLowerError::GenericArgNotProvided(param.id.into(), self.subst) + MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()) })?), _ => ty.try_super_fold_with(self), } @@ -79,22 +80,18 @@ impl<'db> FallibleTypeFolder> for Filler<'db> { let ConstKind::Param(param) = ct.kind() else { return ct.try_super_fold_with(self); }; - self.subst - .as_slice() - .get(param.index as usize) - .and_then(|arg| arg.konst()) - .ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst)) + self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.konst()).ok_or_else( + || MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()), + ) } fn try_fold_region(&mut self, region: Region<'db>) -> Result, Self::Error> { let RegionKind::ReEarlyParam(param) = region.kind() else { return Ok(region); }; - self.subst - .as_slice() - .get(param.index as usize) - .and_then(|arg| arg.region()) - .ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst)) + self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.region()).ok_or_else( + || MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()), + ) } } @@ -105,33 +102,50 @@ impl<'db> Filler<'db> { Self { infcx, trait_env: env, subst } } - fn fill> + Copy>( - &mut self, - t: &mut T, - ) -> Result<(), MirLowerError<'db>> { + fn fill_ty(&mut self, t: &mut StoredTy) -> Result<(), MirLowerError> { // Can't deep normalized as that'll try to normalize consts and fail. - *t = t.try_fold_with(self)?; - if references_non_lt_error(t) { + *t = t.as_ref().try_fold_with(self)?.store(); + if references_non_lt_error(&t.as_ref()) { Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned())) } else { Ok(()) } } - fn fill_operand(&mut self, op: &mut Operand<'db>) -> Result<(), MirLowerError<'db>> { + fn fill_const(&mut self, t: &mut StoredConst) -> Result<(), MirLowerError> { + // Can't deep normalized as that'll try to normalize consts and fail. + *t = t.as_ref().try_fold_with(self)?.store(); + if references_non_lt_error(&t.as_ref()) { + Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned())) + } else { + Ok(()) + } + } + + fn fill_args(&mut self, t: &mut StoredGenericArgs) -> Result<(), MirLowerError> { + // Can't deep normalized as that'll try to normalize consts and fail. + *t = t.as_ref().try_fold_with(self)?.store(); + if references_non_lt_error(&t.as_ref()) { + Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned())) + } else { + Ok(()) + } + } + + fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> { match &mut op.kind { OperandKind::Constant { konst, ty } => { - self.fill(konst)?; - self.fill(ty)?; + self.fill_const(konst)?; + self.fill_ty(ty)?; } OperandKind::Copy(_) | OperandKind::Move(_) | OperandKind::Static(_) => (), } Ok(()) } - fn fill_body(&mut self, body: &mut MirBody<'db>) -> Result<(), MirLowerError<'db>> { + fn fill_body(&mut self, body: &mut MirBody) -> Result<(), MirLowerError> { for (_, l) in body.locals.iter_mut() { - self.fill(&mut l.ty)?; + self.fill_ty(&mut l.ty)?; } for (_, bb) in body.basic_blocks.iter_mut() { for statement in &mut bb.statements { @@ -144,20 +158,20 @@ impl<'db> Filler<'db> { match ak { super::AggregateKind::Array(ty) | super::AggregateKind::Tuple(ty) - | super::AggregateKind::Closure(ty) => self.fill(ty)?, - super::AggregateKind::Adt(_, subst) => self.fill(subst)?, + | super::AggregateKind::Closure(ty) => self.fill_ty(ty)?, + super::AggregateKind::Adt(_, subst) => self.fill_args(subst)?, super::AggregateKind::Union(_, _) => (), } } Rvalue::ShallowInitBox(_, ty) | Rvalue::ShallowInitBoxWithAlloc(ty) => { - self.fill(ty)?; + self.fill_ty(ty)?; } Rvalue::Use(op) => { self.fill_operand(op)?; } Rvalue::Repeat(op, len) => { self.fill_operand(op)?; - self.fill(len)?; + self.fill_const(len)?; } Rvalue::Ref(_, _) | Rvalue::Len(_) @@ -208,35 +222,36 @@ impl<'db> Filler<'db> { } } -pub fn monomorphized_mir_body_query<'db>( - db: &'db dyn HirDatabase, +pub fn monomorphized_mir_body_query( + db: &dyn HirDatabase, owner: DefWithBodyId, - subst: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, -) -> Result>, MirLowerError<'db>> { - let mut filler = Filler::new(db, trait_env, subst); + subst: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, +) -> Result, MirLowerError> { + let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref()); let body = db.mir_body(owner)?; let mut body = (*body).clone(); filler.fill_body(&mut body)?; Ok(Arc::new(body)) } -pub(crate) fn monomorphized_mir_body_cycle_result<'db>( - _db: &'db dyn HirDatabase, +pub(crate) fn monomorphized_mir_body_cycle_result( + _db: &dyn HirDatabase, + _: salsa::Id, _: DefWithBodyId, - _: GenericArgs<'db>, - _: ParamEnvAndCrate<'db>, -) -> Result>, MirLowerError<'db>> { + _: StoredGenericArgs, + _: StoredParamEnvAndCrate, +) -> Result, MirLowerError> { Err(MirLowerError::Loop) } -pub fn monomorphized_mir_body_for_closure_query<'db>( - db: &'db dyn HirDatabase, +pub fn monomorphized_mir_body_for_closure_query( + db: &dyn HirDatabase, closure: InternedClosureId, - subst: GenericArgs<'db>, - trait_env: ParamEnvAndCrate<'db>, -) -> Result>, MirLowerError<'db>> { - let mut filler = Filler::new(db, trait_env, subst); + subst: StoredGenericArgs, + trait_env: StoredParamEnvAndCrate, +) -> Result, MirLowerError> { + let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref()); let body = db.mir_body_for_closure(closure)?; let mut body = (*body).clone(); filler.fill_body(&mut body)?; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index 0c5a64935e498..96b90a3f40744 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -36,8 +36,8 @@ macro_rules! wln { }; } -impl<'db> MirBody<'db> { - pub fn pretty_print(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> String { +impl MirBody { + pub fn pretty_print(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> String { let hir_body = db.body(self.owner); let mut ctx = MirPrettyCtx::new(self, &hir_body, db, display_target); ctx.for_body(|this| match ctx.body.owner { @@ -80,7 +80,7 @@ impl<'db> MirBody<'db> { // String with lines is rendered poorly in `dbg` macros, which I use very much, so this // function exists to solve that. - pub fn dbg(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> impl Debug { + pub fn dbg(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> impl Debug { struct StringDbg(String); impl Debug for StringDbg { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -92,12 +92,12 @@ impl<'db> MirBody<'db> { } struct MirPrettyCtx<'a, 'db> { - body: &'a MirBody<'db>, + body: &'a MirBody, hir_body: &'a Body, db: &'db dyn HirDatabase, result: String, indent: String, - local_to_binding: ArenaMap, BindingId>, + local_to_binding: ArenaMap, display_target: DisplayTarget, } @@ -113,12 +113,12 @@ impl Write for MirPrettyCtx<'_, '_> { } } -enum LocalName<'db> { - Unknown(LocalId<'db>), - Binding(Name, LocalId<'db>), +enum LocalName { + Unknown(LocalId), + Binding(Name, LocalId), } -impl<'db> HirDisplay<'db> for LocalName<'db> { +impl<'db> HirDisplay<'db> for LocalName { fn hir_fmt( &self, f: &mut crate::display::HirFormatter<'_, 'db>, @@ -179,7 +179,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { } fn new( - body: &'a MirBody<'db>, + body: &'a MirBody, hir_body: &'a Body, db: &'db dyn HirDatabase, display_target: DisplayTarget, @@ -211,19 +211,19 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { self, "let {}: {};", self.local_name(id).display_test(self.db, self.display_target), - self.hir_display(&local.ty) + self.hir_display(&local.ty.as_ref()) ); } } - fn local_name(&self, local: LocalId<'db>) -> LocalName<'db> { + fn local_name(&self, local: LocalId) -> LocalName { match self.local_to_binding.get(local) { Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local), None => LocalName::Unknown(local), } } - fn basic_block_id(&self, basic_block_id: BasicBlockId<'db>) -> String { + fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String { format!("'bb{}", u32::from(basic_block_id.into_raw())) } @@ -311,12 +311,8 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { } } - fn place(&mut self, p: &Place<'db>) { - fn f<'db>( - this: &mut MirPrettyCtx<'_, 'db>, - local: LocalId<'db>, - projections: &[PlaceElem<'db>], - ) { + fn place(&mut self, p: &Place) { + fn f<'db>(this: &mut MirPrettyCtx<'_, 'db>, local: LocalId, projections: &[PlaceElem]) { let Some((last, head)) = projections.split_last() else { // no projection w!(this, "{}", this.local_name(local).display_test(this.db, this.display_target)); @@ -376,19 +372,21 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { f(self, p.local, p.projection.lookup(&self.body.projection_store)); } - fn operand(&mut self, r: &Operand<'db>) { + fn operand(&mut self, r: &Operand) { match &r.kind { OperandKind::Copy(p) | OperandKind::Move(p) => { // MIR at the time of writing doesn't have difference between move and copy, so we show them // equally. Feel free to change it. self.place(p); } - OperandKind::Constant { konst, .. } => w!(self, "Const({})", self.hir_display(konst)), + OperandKind::Constant { konst, .. } => { + w!(self, "Const({})", self.hir_display(&konst.as_ref())) + } OperandKind::Static(s) => w!(self, "Static({:?})", s), } } - fn rvalue(&mut self, r: &Rvalue<'db>) { + fn rvalue(&mut self, r: &Rvalue) { match r { Rvalue::Use(op) => self.operand(op), Rvalue::Ref(r, p) => { @@ -415,7 +413,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { Rvalue::Repeat(op, len) => { w!(self, "["); self.operand(op); - w!(self, "; {}]", len.display_test(self.db, self.display_target)); + w!(self, "; {}]", len.as_ref().display_test(self.db, self.display_target)); } Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => { w!(self, "Adt("); @@ -440,7 +438,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { Rvalue::Cast(ck, op, ty) => { w!(self, "Cast({ck:?}, "); self.operand(op); - w!(self, ", {})", self.hir_display(ty)); + w!(self, ", {})", self.hir_display(&ty.as_ref())); } Rvalue::CheckedBinaryOp(b, o1, o2) => { self.operand(o1); @@ -478,7 +476,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { } } - fn operand_list(&mut self, it: &[Operand<'db>]) { + fn operand_list(&mut self, it: &[Operand]) { let mut it = it.iter(); if let Some(first) = it.next() { self.operand(first); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs index 8c52a847d1e91..605e31404c575 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs @@ -1,9 +1,15 @@ //! Things relevant to the next trait solver. +// Note: in interned types defined in this module, we generally treat the lifetime as advisory +// and transmute it as needed. This is because no real memory unsafety can be caused from an +// incorrect lifetime here. + pub mod abi; +mod binder; mod consts; mod def_id; pub mod fold; +pub mod format_proof_tree; pub mod fulfill; mod generic_arg; pub mod generics; @@ -21,6 +27,9 @@ mod structural_normalize; mod ty; pub mod util; +use std::{mem::ManuallyDrop, sync::OnceLock}; + +pub use binder::*; pub use consts::*; pub use def_id::*; pub use generic_arg::*; @@ -31,6 +40,7 @@ pub use region::*; pub use solver::*; pub use ty::*; +use crate::db::HirDatabase; pub use crate::lower::ImplTraitIdx; pub use rustc_ast_ir::Mutability; @@ -47,3 +57,225 @@ pub type TypingMode<'db> = rustc_type_ir::TypingMode>; pub type TypeError<'db> = rustc_type_ir::error::TypeError>; pub type QueryResult<'db> = rustc_type_ir::solve::QueryResult>; pub type FxIndexMap = rustc_type_ir::data_structures::IndexMap; + +pub struct DefaultTypes<'db> { + pub usize: Ty<'db>, + pub u8: Ty<'db>, + pub u16: Ty<'db>, + pub u32: Ty<'db>, + pub u64: Ty<'db>, + pub u128: Ty<'db>, + pub isize: Ty<'db>, + pub i8: Ty<'db>, + pub i16: Ty<'db>, + pub i32: Ty<'db>, + pub i64: Ty<'db>, + pub i128: Ty<'db>, + pub f16: Ty<'db>, + pub f32: Ty<'db>, + pub f64: Ty<'db>, + pub f128: Ty<'db>, + pub unit: Ty<'db>, + pub bool: Ty<'db>, + pub char: Ty<'db>, + pub str: Ty<'db>, + pub never: Ty<'db>, + pub error: Ty<'db>, + /// `&'static str` + pub static_str_ref: Ty<'db>, + /// `*mut ()` + pub mut_unit_ptr: Ty<'db>, +} + +pub struct DefaultConsts<'db> { + pub error: Const<'db>, +} + +pub struct DefaultRegions<'db> { + pub error: Region<'db>, + pub statik: Region<'db>, + pub erased: Region<'db>, +} + +pub struct DefaultEmpty<'db> { + pub tys: Tys<'db>, + pub generic_args: GenericArgs<'db>, + pub bound_var_kinds: BoundVarKinds<'db>, + pub canonical_vars: CanonicalVars<'db>, + pub variances: VariancesOf<'db>, + pub pat_list: PatList<'db>, + pub predefined_opaques: PredefinedOpaques<'db>, + pub def_ids: SolverDefIds<'db>, + pub bound_existential_predicates: BoundExistentialPredicates<'db>, + pub clauses: Clauses<'db>, + pub region_assumptions: RegionAssumptions<'db>, +} + +pub struct DefaultAny<'db> { + pub types: DefaultTypes<'db>, + pub consts: DefaultConsts<'db>, + pub regions: DefaultRegions<'db>, + pub empty: DefaultEmpty<'db>, + /// `[Invariant]` + pub one_invariant: VariancesOf<'db>, + /// `[Covariant]` + pub one_covariant: VariancesOf<'db>, + /// `for<'env>` + pub coroutine_captures_by_ref_bound_var_kinds: BoundVarKinds<'db>, +} + +impl std::fmt::Debug for DefaultAny<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("DefaultAny").finish_non_exhaustive() + } +} + +#[inline] +pub fn default_types<'a, 'db>(db: &'db dyn HirDatabase) -> &'a DefaultAny<'db> { + static TYPES: OnceLock> = OnceLock::new(); + + let interner = DbInterner::new_no_crate(db); + TYPES.get_or_init(|| { + let create_ty = |kind| { + let ty = Ty::new(interner, kind); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_const = |kind| { + let ty = Const::new(interner, kind); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_region = |kind| { + let ty = Region::new(interner, kind); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_generic_args = |slice| { + let ty = GenericArgs::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_bound_var_kinds = |slice| { + let ty = BoundVarKinds::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_canonical_vars = |slice| { + let ty = CanonicalVars::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_variances_of = |slice| { + let ty = VariancesOf::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_pat_list = |slice| { + let ty = PatList::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_predefined_opaques = |slice| { + let ty = PredefinedOpaques::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_solver_def_ids = |slice| { + let ty = SolverDefIds::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_bound_existential_predicates = |slice| { + let ty = BoundExistentialPredicates::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_clauses = |slice| { + let ty = Clauses::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_region_assumptions = |slice| { + let ty = RegionAssumptions::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + let create_tys = |slice| { + let ty = Tys::new_from_slice(slice); + // We need to increase the refcount (forever), so that the types won't be freed. + let ty = ManuallyDrop::new(ty.store()); + ty.as_ref() + }; + + let str = create_ty(TyKind::Str); + let statik = create_region(RegionKind::ReStatic); + let empty_tys = create_tys(&[]); + let unit = create_ty(TyKind::Tuple(empty_tys)); + DefaultAny { + types: DefaultTypes { + usize: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::Usize)), + u8: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U8)), + u16: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U16)), + u32: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U32)), + u64: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U64)), + u128: create_ty(TyKind::Uint(rustc_ast_ir::UintTy::U128)), + isize: create_ty(TyKind::Int(rustc_ast_ir::IntTy::Isize)), + i8: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I8)), + i16: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I16)), + i32: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I32)), + i64: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I64)), + i128: create_ty(TyKind::Int(rustc_ast_ir::IntTy::I128)), + f16: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F16)), + f32: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F32)), + f64: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F64)), + f128: create_ty(TyKind::Float(rustc_ast_ir::FloatTy::F128)), + unit, + bool: create_ty(TyKind::Bool), + char: create_ty(TyKind::Char), + str, + never: create_ty(TyKind::Never), + error: create_ty(TyKind::Error(ErrorGuaranteed)), + static_str_ref: create_ty(TyKind::Ref(statik, str, rustc_ast_ir::Mutability::Not)), + mut_unit_ptr: create_ty(TyKind::RawPtr(unit, rustc_ast_ir::Mutability::Mut)), + }, + consts: DefaultConsts { error: create_const(ConstKind::Error(ErrorGuaranteed)) }, + regions: DefaultRegions { + error: create_region(RegionKind::ReError(ErrorGuaranteed)), + statik, + erased: create_region(RegionKind::ReErased), + }, + empty: DefaultEmpty { + tys: empty_tys, + generic_args: create_generic_args(&[]), + bound_var_kinds: create_bound_var_kinds(&[]), + canonical_vars: create_canonical_vars(&[]), + variances: create_variances_of(&[]), + pat_list: create_pat_list(&[]), + predefined_opaques: create_predefined_opaques(&[]), + def_ids: create_solver_def_ids(&[]), + bound_existential_predicates: create_bound_existential_predicates(&[]), + clauses: create_clauses(&[]), + region_assumptions: create_region_assumptions(&[]), + }, + one_invariant: create_variances_of(&[rustc_type_ir::Variance::Invariant]), + one_covariant: create_variances_of(&[rustc_type_ir::Variance::Covariant]), + coroutine_captures_by_ref_bound_var_kinds: create_bound_var_kinds(&[ + BoundVarKind::Region(BoundRegionKind::ClosureEnv), + ]), + } + }) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/binder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/binder.rs new file mode 100644 index 0000000000000..3645f8096cfd8 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/binder.rs @@ -0,0 +1,83 @@ +use crate::{ + FnAbi, + next_solver::{ + Binder, Clauses, EarlyBinder, FnSig, PolyFnSig, StoredBoundVarKinds, StoredClauses, + StoredTy, StoredTys, Ty, abi::Safety, + }, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct StoredEarlyBinder(T); + +impl StoredEarlyBinder { + #[inline] + pub fn bind(value: T) -> Self { + Self(value) + } + + #[inline] + pub fn skip_binder(self) -> T { + self.0 + } + + #[inline] + pub fn as_ref(&self) -> StoredEarlyBinder<&T> { + StoredEarlyBinder(&self.0) + } + + #[inline] + pub fn get_with<'db, 'a, R>(&'a self, f: impl FnOnce(&'a T) -> R) -> EarlyBinder<'db, R> { + EarlyBinder::bind(f(&self.0)) + } +} + +impl StoredEarlyBinder { + #[inline] + pub fn get<'db>(&self) -> EarlyBinder<'db, Ty<'db>> { + self.get_with(|it| it.as_ref()) + } +} + +impl StoredEarlyBinder { + #[inline] + pub fn get<'db>(&self) -> EarlyBinder<'db, Clauses<'db>> { + self.get_with(|it| it.as_ref()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct StoredPolyFnSig { + bound_vars: StoredBoundVarKinds, + inputs_and_output: StoredTys, + c_variadic: bool, + safety: Safety, + abi: FnAbi, +} + +impl StoredPolyFnSig { + #[inline] + pub fn new(sig: PolyFnSig<'_>) -> Self { + let bound_vars = sig.bound_vars().store(); + let sig = sig.skip_binder(); + Self { + bound_vars, + inputs_and_output: sig.inputs_and_output.store(), + c_variadic: sig.c_variadic, + safety: sig.safety, + abi: sig.abi, + } + } + + #[inline] + pub fn get(&self) -> PolyFnSig<'_> { + Binder::bind_with_vars( + FnSig { + inputs_and_output: self.inputs_and_output.as_ref(), + c_variadic: self.c_variadic, + safety: self.safety, + abi: self.abi, + }, + self.bound_vars.as_ref(), + ) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs index 926dbdc03d037..9643f1ba4c3a3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs @@ -3,19 +3,20 @@ use std::hash::Hash; use hir_def::ConstParamId; -use macros::{TypeFoldable, TypeVisitable}; +use intern::{Interned, InternedRef, impl_internable}; +use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable}; use rustc_ast_ir::visit::VisitorResult; use rustc_type_ir::{ - BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags, InferConst, - TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, - WithCachedTypeInfo, + BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags, + GenericTypeVisitable, InferConst, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, + TypeVisitable, TypeVisitableExt, WithCachedTypeInfo, inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike}, relate::Relate, }; use crate::{ MemoryMap, - next_solver::{ClauseKind, ParamEnv, interner::InternedWrapperNoDebug}, + next_solver::{ClauseKind, ParamEnv, impl_stored_interned}, }; use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, Ty}; @@ -23,34 +24,47 @@ use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, pub type ConstKind<'db> = rustc_type_ir::ConstKind>; pub type UnevaluatedConst<'db> = rustc_type_ir::UnevaluatedConst>; -#[salsa::interned(constructor = new_)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Const<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>>, + pub(super) interned: InternedRef<'db, ConstInterned>, } +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +#[repr(align(4))] // Required for `GenericArg` bit-tagging. +pub(super) struct ConstInterned(pub(super) WithCachedTypeInfo>); + +impl_internable!(gc; ConstInterned); +impl_stored_interned!(ConstInterned, Const, StoredConst); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; + impl<'db> Const<'db> { - pub fn new(interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self { + pub fn new(_interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, ConstKind<'static>>(kind) }; let flags = FlagComputation::for_const_kind(&kind); let cached = WithCachedTypeInfo { internee: kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; - Const::new_(interner.db(), InternedWrapperNoDebug(cached)) + Self { interned: Interned::new_gc(ConstInterned(cached)) } } pub fn inner(&self) -> &WithCachedTypeInfo> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Const<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::< + &WithCachedTypeInfo>, + &WithCachedTypeInfo>, + >(inner) + } } pub fn error(interner: DbInterner<'db>) -> Self { - Const::new(interner, ConstKind::Error(ErrorGuaranteed)) + interner.default_types().consts.error } pub fn new_param(interner: DbInterner<'db>, param: ParamConst) -> Self { @@ -106,12 +120,6 @@ impl<'db> std::fmt::Debug for Const<'db> { } } -impl<'db> std::fmt::Debug for InternedWrapperNoDebug>> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.internee.fmt(f) - } -} - pub type PlaceholderConst = Placeholder; #[derive(Copy, Clone, Hash, Eq, PartialEq)] @@ -164,7 +172,9 @@ impl ParamConst { /// A type-level constant value. /// /// Represents a typed, fully evaluated constant. -#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable)] +#[derive( + Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable, GenericTypeVisitable, +)] pub struct ValueConst<'db> { pub ty: Ty<'db>, // FIXME: Should we ignore this for TypeVisitable, TypeFoldable? @@ -190,7 +200,7 @@ impl<'db> rustc_type_ir::inherent::ValueConst> for ValueConst<'d } } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, GenericTypeVisitable)] pub struct ConstBytes<'db> { pub memory: Box<[u8]>, pub memory_map: MemoryMap<'db>, @@ -202,31 +212,52 @@ impl Hash for ConstBytes<'_> { } } -#[salsa::interned(constructor = new_, debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Valtree<'db> { - #[returns(ref)] - bytes_: ConstBytes<'db>, + interned: InternedRef<'db, ValtreeInterned>, +} + +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Valtree<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned(self.interned).is_continue() { + self.inner().generic_visit_with(visitor); + } + } } +#[derive(Debug, PartialEq, Eq, Hash, GenericTypeVisitable)] +pub(super) struct ValtreeInterned(ConstBytes<'static>); + +impl_internable!(gc; ValtreeInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; + impl<'db> Valtree<'db> { + #[inline] pub fn new(bytes: ConstBytes<'db>) -> Self { - crate::with_attached_db(|db| unsafe { - // SAFETY: ¯\_(ツ)_/¯ - std::mem::transmute(Valtree::new_(db, bytes)) - }) + let bytes = unsafe { std::mem::transmute::, ConstBytes<'static>>(bytes) }; + Self { interned: Interned::new_gc(ValtreeInterned(bytes)) } } + #[inline] pub fn inner(&self) -> &ConstBytes<'db> { - crate::with_attached_db(|db| { - let inner = self.bytes_(db); - // SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { std::mem::transmute::<&ConstBytes<'static>, &ConstBytes<'db>>(inner) } + } +} + +impl std::fmt::Debug for Valtree<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.interned.fmt(f) } } -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)] +#[derive( + Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable, +)] pub struct ExprConst; impl rustc_type_ir::inherent::ParamLike for ParamConst { @@ -243,6 +274,14 @@ impl<'db> IntoKind for Const<'db> { } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Const<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } + } +} + impl<'db> TypeVisitable> for Const<'db> { fn visit_with>>( &self, @@ -382,8 +421,8 @@ impl<'db> rustc_type_ir::inherent::Const> for Const<'db> { Const::new(interner, ConstKind::Expr(expr)) } - fn new_error(interner: DbInterner<'db>, guar: ErrorGuaranteed) -> Self { - Const::new(interner, ConstKind::Error(guar)) + fn new_error(interner: DbInterner<'db>, _guar: ErrorGuaranteed) -> Self { + Const::error(interner) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/format_proof_tree.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/format_proof_tree.rs new file mode 100644 index 0000000000000..59fb0d65c5e26 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/format_proof_tree.rs @@ -0,0 +1,93 @@ +use rustc_type_ir::{solve::GoalSource, solve::inspect::GoalEvaluation}; +use serde_derive::{Deserialize, Serialize}; + +use crate::next_solver::infer::InferCtxt; +use crate::next_solver::inspect::{InspectCandidate, InspectGoal}; +use crate::next_solver::{DbInterner, Span}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProofTreeData { + pub goal: String, + pub result: String, + pub depth: usize, + pub candidates: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CandidateData { + pub kind: String, + pub result: String, + pub impl_header: Option, + pub nested_goals: Vec, +} + +pub fn dump_proof_tree_structured<'db>( + proof_tree: GoalEvaluation>, + _span: Span, + infcx: &InferCtxt<'db>, +) -> ProofTreeData { + let goal_eval = InspectGoal::new(infcx, 0, proof_tree, None, GoalSource::Misc); + let mut serializer = ProofTreeSerializer::new(infcx); + serializer.serialize_goal(&goal_eval) +} + +struct ProofTreeSerializer<'a, 'db> { + infcx: &'a InferCtxt<'db>, +} + +impl<'a, 'db> ProofTreeSerializer<'a, 'db> { + fn new(infcx: &'a InferCtxt<'db>) -> Self { + Self { infcx } + } + + fn serialize_goal(&mut self, goal: &InspectGoal<'_, 'db>) -> ProofTreeData { + let candidates = goal.candidates(); + let candidates_data: Vec = + candidates.iter().map(|c| self.serialize_candidate(c)).collect(); + + ProofTreeData { + goal: format!("{:?}", goal.goal()), + result: format!("{:?}", goal.result()), + depth: goal.depth(), + candidates: candidates_data, + } + } + + fn serialize_candidate(&mut self, candidate: &InspectCandidate<'_, 'db>) -> CandidateData { + let kind = candidate.kind(); + let impl_header = self.get_impl_header(candidate); + + let mut nested = Vec::new(); + self.infcx.probe(|_| { + for nested_goal in candidate.instantiate_nested_goals() { + nested.push(self.serialize_goal(&nested_goal)); + } + }); + + CandidateData { + kind: format!("{:?}", kind), + result: format!("{:?}", candidate.result()), + impl_header, + nested_goals: nested, + } + } + + fn get_impl_header(&self, candidate: &InspectCandidate<'_, 'db>) -> Option { + use rustc_type_ir::solve::inspect::ProbeKind; + match candidate.kind() { + ProbeKind::TraitCandidate { source, .. } => { + use rustc_type_ir::solve::CandidateSource; + match source { + CandidateSource::Impl(impl_def_id) => { + use hir_def::{Lookup, src::HasSource}; + let db = self.infcx.interner.db; + let impl_src = impl_def_id.0.lookup(db).source(db); + Some(impl_src.value.to_string()) + } + _ => None, + } + } + _ => None, + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs index 40cc84e0c0ed7..0fe073297279a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs @@ -187,6 +187,9 @@ impl<'db> FulfillmentCtxt<'db> { } let result = delegate.evaluate_root_goal(goal, Span::dummy(), stalled_on); + infcx.inspect_evaluated_obligation(&obligation, &result, || { + Some(delegate.evaluate_root_goal_for_proof_tree(goal, Span::dummy()).1) + }); let GoalEvaluation { goal: _, certainty, has_changed, stalled_on } = match result { Ok(result) => result, Err(NoSolution) => { @@ -249,7 +252,7 @@ impl<'db> FulfillmentCtxt<'db> { | TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ } | TypingMode::PostAnalysis => return Default::default(), }; - let stalled_coroutines = stalled_coroutines.inner(); + let stalled_coroutines = stalled_coroutines.as_slice(); if stalled_coroutines.is_empty() { return Default::default(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs index 8495af4b755ee..8f798b4ade249 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs @@ -9,7 +9,7 @@ use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt}; use rustc_type_ir::{ AliasRelationDirection, AliasTermKind, HostEffectPredicate, Interner, PredicatePolarity, error::ExpectedFound, - inherent::{IntoKind, SliceLike, Span as _}, + inherent::{IntoKind, Span as _}, lang_items::SolverTraitLangItem, solve::{Certainty, GoalSource, MaybeCause, NoSolution}, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs index 10f2ba2b119bf..9936e443210aa 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs @@ -1,41 +1,226 @@ -//! Things related to generic args in the next-trait-solver. +//! Things related to generic args in the next-trait-solver (`GenericArg`, `GenericArgs`, `Term`). +//! +//! Implementations of `GenericArg` and `Term` are pointer-tagged instead of an enum (rustc does +//! the same). This is done to save memory (which also helps speed) - one `GenericArg` is a machine +//! word instead of two, while matching on it is basically as cheap. The implementation for both +//! `GenericArg` and `Term` is shared in [`GenericArgImpl`]. This both simplifies the implementation, +//! as well as enables a noop conversion from `Term` to `GenericArg`. + +use std::{hint::unreachable_unchecked, marker::PhantomData, ptr::NonNull}; use hir_def::{GenericDefId, GenericParamId}; -use macros::{TypeFoldable, TypeVisitable}; +use intern::InternedRef; use rustc_type_ir::{ - ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSigTys, - GenericArgKind, Interner, TermKind, TyKind, TyVid, Variance, + ClosureArgs, ConstVid, CoroutineArgs, CoroutineClosureArgs, FallibleTypeFolder, FnSigTys, + GenericTypeVisitable, Interner, TyKind, TyVid, TypeFoldable, TypeFolder, TypeVisitable, + TypeVisitor, Variance, inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _}, relate::{Relate, VarianceDiagInfo}, walk::TypeWalker, }; use smallvec::SmallVec; -use crate::next_solver::{PolyFnSig, interned_vec_db}; +use crate::next_solver::{ + ConstInterned, PolyFnSig, RegionInterned, TyInterned, impl_foldable_for_interned_slice, + interned_slice, +}; use super::{ Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys, generics::Generics, }; -#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, salsa::Supertype)] -pub enum GenericArg<'db> { - Ty(Ty<'db>), - Lifetime(Region<'db>), - Const(Const<'db>), +pub type GenericArgKind<'db> = rustc_type_ir::GenericArgKind>; +pub type TermKind<'db> = rustc_type_ir::TermKind>; + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +struct GenericArgImpl<'db> { + /// # Invariant + /// + /// Contains an [`InternedRef`] of a [`Ty`], [`Const`] or [`Region`], bit-tagged as per the consts below. + ptr: NonNull<()>, + _marker: PhantomData<(Ty<'db>, Const<'db>, Region<'db>)>, +} + +// SAFETY: We essentially own the `Ty`, `Const` or `Region`, and they are `Send + Sync`. +unsafe impl Send for GenericArgImpl<'_> {} +unsafe impl Sync for GenericArgImpl<'_> {} + +impl<'db> GenericArgImpl<'db> { + const KIND_MASK: usize = 0b11; + const PTR_MASK: usize = !Self::KIND_MASK; + const TY_TAG: usize = 0b00; + const CONST_TAG: usize = 0b01; + const REGION_TAG: usize = 0b10; + + #[inline] + fn new_ty(ty: Ty<'db>) -> Self { + Self { + // SAFETY: We create it from an `InternedRef`, and it's never null. + ptr: unsafe { + NonNull::new_unchecked( + ty.interned + .as_raw() + .cast::<()>() + .cast_mut() + .map_addr(|addr| addr | Self::TY_TAG), + ) + }, + _marker: PhantomData, + } + } + + #[inline] + fn new_const(ty: Const<'db>) -> Self { + Self { + // SAFETY: We create it from an `InternedRef`, and it's never null. + ptr: unsafe { + NonNull::new_unchecked( + ty.interned + .as_raw() + .cast::<()>() + .cast_mut() + .map_addr(|addr| addr | Self::CONST_TAG), + ) + }, + _marker: PhantomData, + } + } + + #[inline] + fn new_region(ty: Region<'db>) -> Self { + Self { + // SAFETY: We create it from an `InternedRef`, and it's never null. + ptr: unsafe { + NonNull::new_unchecked( + ty.interned + .as_raw() + .cast::<()>() + .cast_mut() + .map_addr(|addr| addr | Self::REGION_TAG), + ) + }, + _marker: PhantomData, + } + } + + #[inline] + fn kind(self) -> GenericArgKind<'db> { + let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK); + // SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match. + unsafe { + match self.ptr.addr().get() & Self::KIND_MASK { + Self::TY_TAG => GenericArgKind::Type(Ty { + interned: InternedRef::from_raw(ptr.cast::()), + }), + Self::CONST_TAG => GenericArgKind::Const(Const { + interned: InternedRef::from_raw(ptr.cast::()), + }), + Self::REGION_TAG => GenericArgKind::Lifetime(Region { + interned: InternedRef::from_raw(ptr.cast::()), + }), + _ => unreachable_unchecked(), + } + } + } + + #[inline] + fn term_kind(self) -> TermKind<'db> { + let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK); + // SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match. + // It is the caller's responsibility (encapsulated within this module) to only call this with + // `Term`, which cannot be constructed from a `Region`. + unsafe { + match self.ptr.addr().get() & Self::KIND_MASK { + Self::TY_TAG => { + TermKind::Ty(Ty { interned: InternedRef::from_raw(ptr.cast::()) }) + } + Self::CONST_TAG => TermKind::Const(Const { + interned: InternedRef::from_raw(ptr.cast::()), + }), + _ => unreachable_unchecked(), + } + } + } +} + +#[derive(PartialEq, Eq, Hash)] +pub struct StoredGenericArg { + ptr: GenericArgImpl<'static>, +} + +impl Clone for StoredGenericArg { + #[inline] + fn clone(&self) -> Self { + match self.ptr.kind() { + GenericArgKind::Lifetime(it) => std::mem::forget(it.interned.to_owned()), + GenericArgKind::Type(it) => std::mem::forget(it.interned.to_owned()), + GenericArgKind::Const(it) => std::mem::forget(it.interned.to_owned()), + } + Self { ptr: self.ptr } + } +} + +impl Drop for StoredGenericArg { + #[inline] + fn drop(&mut self) { + unsafe { + match self.ptr.kind() { + GenericArgKind::Lifetime(it) => it.interned.decrement_refcount(), + GenericArgKind::Type(it) => it.interned.decrement_refcount(), + GenericArgKind::Const(it) => it.interned.decrement_refcount(), + } + } + } +} + +impl StoredGenericArg { + #[inline] + fn new(value: GenericArg<'_>) -> Self { + let result = Self { ptr: GenericArgImpl { ptr: value.ptr.ptr, _marker: PhantomData } }; + // Increase refcount. + std::mem::forget(result.clone()); + result + } + + #[inline] + pub fn as_ref<'db>(&self) -> GenericArg<'db> { + GenericArg { ptr: self.ptr } + } +} + +impl std::fmt::Debug for StoredGenericArg { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_ref().fmt(f) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct GenericArg<'db> { + ptr: GenericArgImpl<'db>, } impl<'db> std::fmt::Debug for GenericArg<'db> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Ty(t) => std::fmt::Debug::fmt(t, f), - Self::Lifetime(r) => std::fmt::Debug::fmt(r, f), - Self::Const(c) => std::fmt::Debug::fmt(c, f), + match self.kind() { + GenericArgKind::Type(t) => std::fmt::Debug::fmt(&t, f), + GenericArgKind::Lifetime(r) => std::fmt::Debug::fmt(&r, f), + GenericArgKind::Const(c) => std::fmt::Debug::fmt(&c, f), } } } impl<'db> GenericArg<'db> { + #[inline] + pub fn store(self) -> StoredGenericArg { + StoredGenericArg::new(self) + } + + #[inline] + pub fn kind(self) -> GenericArgKind<'db> { + self.ptr.kind() + } + pub fn ty(self) -> Option> { match self.kind() { GenericArgKind::Type(ty) => Some(ty), @@ -66,8 +251,8 @@ impl<'db> GenericArg<'db> { #[inline] pub(crate) fn expect_region(self) -> Region<'db> { - match self { - GenericArg::Lifetime(region) => region, + match self.kind() { + GenericArgKind::Lifetime(region) => region, _ => panic!("expected a region, got {self:?}"), } } @@ -87,30 +272,32 @@ impl<'db> GenericArg<'db> { } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Term<'db>) -> Self { - match value { - Term::Ty(ty) => GenericArg::Ty(ty), - Term::Const(c) => GenericArg::Const(c), - } + GenericArg { ptr: value.ptr } } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)] -pub enum Term<'db> { - Ty(Ty<'db>), - Const(Const<'db>), +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Term<'db> { + ptr: GenericArgImpl<'db>, } impl<'db> std::fmt::Debug for Term<'db> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Ty(t) => std::fmt::Debug::fmt(t, f), - Self::Const(c) => std::fmt::Debug::fmt(c, f), + match self.kind() { + TermKind::Ty(t) => std::fmt::Debug::fmt(&t, f), + TermKind::Const(c) => std::fmt::Debug::fmt(&c, f), } } } impl<'db> Term<'db> { + #[inline] + pub fn kind(self) -> TermKind<'db> { + self.ptr.term_kind() + } + pub fn expect_type(&self) -> Ty<'db> { self.as_type().expect("expected a type, but found a const") } @@ -124,31 +311,108 @@ impl<'db> Term<'db> { } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Ty<'db>) -> Self { - Self::Ty(value) + GenericArg { ptr: GenericArgImpl::new_ty(value) } } } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Region<'db>) -> Self { - Self::Lifetime(value) + GenericArg { ptr: GenericArgImpl::new_region(value) } } } impl<'db> From> for GenericArg<'db> { + #[inline] fn from(value: Const<'db>) -> Self { - Self::Const(value) + GenericArg { ptr: GenericArgImpl::new_const(value) } } } impl<'db> IntoKind for GenericArg<'db> { - type Kind = GenericArgKind>; + type Kind = GenericArgKind<'db>; + #[inline] fn kind(self) -> Self::Kind { - match self { - GenericArg::Ty(ty) => GenericArgKind::Type(ty), - GenericArg::Lifetime(region) => GenericArgKind::Lifetime(region), - GenericArg::Const(c) => GenericArgKind::Const(c), + self.ptr.kind() + } +} + +impl<'db, V> GenericTypeVisitable for GenericArg<'db> +where + GenericArgKind<'db>: GenericTypeVisitable, +{ + fn generic_visit_with(&self, visitor: &mut V) { + self.kind().generic_visit_with(visitor); + } +} + +impl<'db, V> GenericTypeVisitable for Term<'db> +where + TermKind<'db>: GenericTypeVisitable, +{ + fn generic_visit_with(&self, visitor: &mut V) { + self.kind().generic_visit_with(visitor); + } +} + +impl<'db> TypeVisitable> for GenericArg<'db> { + fn visit_with>>(&self, visitor: &mut V) -> V::Result { + match self.kind() { + GenericArgKind::Lifetime(it) => it.visit_with(visitor), + GenericArgKind::Type(it) => it.visit_with(visitor), + GenericArgKind::Const(it) => it.visit_with(visitor), + } + } +} + +impl<'db> TypeVisitable> for Term<'db> { + fn visit_with>>(&self, visitor: &mut V) -> V::Result { + match self.kind() { + TermKind::Ty(it) => it.visit_with(visitor), + TermKind::Const(it) => it.visit_with(visitor), + } + } +} + +impl<'db> TypeFoldable> for GenericArg<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(match self.kind() { + GenericArgKind::Lifetime(it) => it.try_fold_with(folder)?.into(), + GenericArgKind::Type(it) => it.try_fold_with(folder)?.into(), + GenericArgKind::Const(it) => it.try_fold_with(folder)?.into(), + }) + } + + fn fold_with>>(self, folder: &mut F) -> Self { + match self.kind() { + GenericArgKind::Lifetime(it) => it.fold_with(folder).into(), + GenericArgKind::Type(it) => it.fold_with(folder).into(), + GenericArgKind::Const(it) => it.fold_with(folder).into(), + } + } +} + +impl<'db> TypeFoldable> for Term<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(match self.kind() { + TermKind::Ty(it) => it.try_fold_with(folder)?.into(), + TermKind::Const(it) => it.try_fold_with(folder)?.into(), + }) + } + + fn fold_with>>(self, folder: &mut F) -> Self { + match self.kind() { + TermKind::Ty(it) => it.fold_with(folder).into(), + TermKind::Const(it) => it.fold_with(folder).into(), } } } @@ -182,7 +446,15 @@ impl<'db> Relate> for GenericArg<'db> { } } -interned_vec_db!(GenericArgs, GenericArg); +interned_slice!( + GenericArgsStorage, + GenericArgs, + StoredGenericArgs, + generic_args, + GenericArg<'db>, + GenericArg<'static>, +); +impl_foldable_for_interned_slice!(GenericArgs); impl<'db> rustc_type_ir::inherent::GenericArg> for GenericArg<'db> {} @@ -306,11 +578,10 @@ impl<'db> GenericArgs<'db> { /// A "sensible" `.split_closure_args()`, where the arguments are not in a tuple. pub fn split_closure_args_untupled(self) -> rustc_type_ir::ClosureArgsParts> { // FIXME: should use `ClosureSubst` when possible - match self.inner().as_slice() { + match self.as_slice() { [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => { - let interner = DbInterner::conjure(); rustc_type_ir::ClosureArgsParts { - parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()), + parent_args, closure_sig_as_fn_ptr_ty: sig_ty.expect_ty(), closure_kind_ty: closure_kind_ty.expect_ty(), tupled_upvars_ty: tupled_upvars_ty.expect_ty(), @@ -341,8 +612,8 @@ impl<'db> rustc_type_ir::relate::Relate> for GenericArgs<'db> { a: Self, b: Self, ) -> rustc_type_ir::relate::RelateResult, Self> { - let interner = relation.cx(); - CollectAndApply::collect_and_apply( + GenericArgs::new_from_iter( + relation.cx(), std::iter::zip(a.iter(), b.iter()).map(|(a, b)| { relation.relate_with_variance( Variance::Invariant, @@ -351,7 +622,6 @@ impl<'db> rustc_type_ir::relate::Relate> for GenericArgs<'db> { b, ) }), - |g| GenericArgs::new_from_iter(interner, g.iter().cloned()), ) } } @@ -397,29 +667,26 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< }) } fn type_at(self, i: usize) -> as rustc_type_ir::Interner>::Ty { - self.inner() - .get(i) + self.get(i) .and_then(|g| g.as_type()) .unwrap_or_else(|| Ty::new_error(DbInterner::conjure(), ErrorGuaranteed)) } fn region_at(self, i: usize) -> as rustc_type_ir::Interner>::Region { - self.inner() - .get(i) + self.get(i) .and_then(|g| g.as_region()) .unwrap_or_else(|| Region::error(DbInterner::conjure())) } fn const_at(self, i: usize) -> as rustc_type_ir::Interner>::Const { - self.inner() - .get(i) + self.get(i) .and_then(|g| g.as_const()) .unwrap_or_else(|| Const::error(DbInterner::conjure())) } fn split_closure_args(self) -> rustc_type_ir::ClosureArgsParts> { // FIXME: should use `ClosureSubst` when possible - match self.inner().as_slice() { + match self.as_slice() { [parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => { let interner = DbInterner::conjure(); // This is stupid, but the next solver expects the first input to actually be a tuple @@ -428,13 +695,10 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< interner, TyKind::FnPtr( sig_tys.map_bound(|s| { - let inputs = Ty::new_tup_from_iter(interner, s.inputs().iter()); + let inputs = Ty::new_tup(interner, s.inputs()); let output = s.output(); FnSigTys { - inputs_and_output: Tys::new_from_iter( - interner, - [inputs, output], - ), + inputs_and_output: Tys::new_from_slice(&[inputs, output]), } }), header, @@ -443,7 +707,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< _ => unreachable!("sig_ty should be last"), }; rustc_type_ir::ClosureArgsParts { - parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()), + parent_args, closure_sig_as_fn_ptr_ty: sig_ty, closure_kind_ty: closure_kind_ty.expect_ty(), tupled_upvars_ty: tupled_upvars_ty.expect_ty(), @@ -458,7 +722,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< fn split_coroutine_closure_args( self, ) -> rustc_type_ir::CoroutineClosureArgsParts> { - match self.inner().as_slice() { + match self.as_slice() { [ parent_args @ .., closure_kind_ty, @@ -466,10 +730,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< tupled_upvars_ty, coroutine_captures_by_ref_ty, ] => rustc_type_ir::CoroutineClosureArgsParts { - parent_args: GenericArgs::new_from_iter( - DbInterner::conjure(), - parent_args.iter().cloned(), - ), + parent_args, closure_kind_ty: closure_kind_ty.expect_ty(), signature_parts_ty: signature_parts_ty.expect_ty(), tupled_upvars_ty: tupled_upvars_ty.expect_ty(), @@ -480,11 +741,10 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< } fn split_coroutine_args(self) -> rustc_type_ir::CoroutineArgsParts> { - let interner = DbInterner::conjure(); - match self.inner().as_slice() { + match self.as_slice() { [parent_args @ .., kind_ty, resume_ty, yield_ty, return_ty, tupled_upvars_ty] => { rustc_type_ir::CoroutineArgsParts { - parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()), + parent_args, kind_ty: kind_ty.expect_ty(), resume_ty: resume_ty.expect_ty(), yield_ty: yield_ty.expect_ty(), @@ -518,25 +778,25 @@ pub fn error_for_param_kind<'db>(id: GenericParamId, interner: DbInterner<'db>) } impl<'db> IntoKind for Term<'db> { - type Kind = TermKind>; + type Kind = TermKind<'db>; + #[inline] fn kind(self) -> Self::Kind { - match self { - Term::Ty(ty) => TermKind::Ty(ty), - Term::Const(c) => TermKind::Const(c), - } + self.ptr.term_kind() } } impl<'db> From> for Term<'db> { + #[inline] fn from(value: Ty<'db>) -> Self { - Self::Ty(value) + Term { ptr: GenericArgImpl::new_ty(value) } } } impl<'db> From> for Term<'db> { + #[inline] fn from(value: Const<'db>) -> Self { - Self::Const(value) + Term { ptr: GenericArgImpl::new_const(value) } } } @@ -583,7 +843,7 @@ impl From for TermVid { impl<'db> DbInterner<'db> { pub(super) fn mk_args(self, args: &[GenericArg<'db>]) -> GenericArgs<'db> { - GenericArgs::new_from_iter(self, args.iter().cloned()) + GenericArgs::new_from_slice(args) } pub(super) fn mk_args_from_iter(self, iter: I) -> T::Output diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs index 70b659406f86c..dc0b584084e88 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs @@ -28,7 +28,7 @@ use rustc_type_ir::{ FnSig, GenericArgKind, TypeFoldable, TypingMode, Variance, error::ExpectedFound, - inherent::{IntoKind, Span as _}, + inherent::Span as _, relate::{Relate, TypeRelation, solver_relating::RelateExt}, }; @@ -68,6 +68,7 @@ impl<'db> InferCtxt<'db> { inner: self.inner.clone(), tainted_by_errors: self.tainted_by_errors.clone(), universe: self.universe.clone(), + obligation_inspector: self.obligation_inspector.clone(), } } @@ -84,6 +85,7 @@ impl<'db> InferCtxt<'db> { inner: self.inner.clone(), tainted_by_errors: self.tainted_by_errors.clone(), universe: self.universe.clone(), + obligation_inspector: self.obligation_inspector.clone(), } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs index 1029a7ff39e80..ccd93590107fc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs @@ -8,7 +8,7 @@ use rustc_hash::FxHashMap; use rustc_index::Idx; use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar}; -use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, Ty as _}; use rustc_type_ir::{ BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, @@ -498,7 +498,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> { { let base = Canonical { max_universe: UniverseIndex::ROOT, - variables: CanonicalVars::new_from_iter(tcx, []), + variables: CanonicalVars::empty(tcx), value: (), }; Canonicalizer::canonicalize_with_base( @@ -562,7 +562,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> { debug_assert!(!out_value.has_infer() && !out_value.has_placeholders()); let canonical_variables = - CanonicalVars::new_from_iter(tcx, canonicalizer.universe_canonicalized_variables()); + CanonicalVars::new_from_slice(&canonicalizer.universe_canonicalized_variables()); let max_universe = canonical_variables .iter() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs index 13c620cfdbc97..b758042e85b06 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs @@ -23,7 +23,7 @@ use rustc_index::{Idx as _, IndexVec}; use rustc_type_ir::{ BoundVar, BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, - inherent::{GenericArg as _, IntoKind, SliceLike}, + inherent::{GenericArg as _, IntoKind}, }; use tracing::{debug, instrument}; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs index 14b8a61088d8a..2926dc30def7c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs @@ -10,8 +10,9 @@ use ena::unify as ut; use hir_def::GenericParamId; use opaque_types::{OpaqueHiddenType, OpaqueTypeStorage}; use region_constraints::{RegionConstraintCollector, RegionConstraintStorage}; -use rustc_next_trait_solver::solve::SolverDelegateEvalExt; +use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt}; use rustc_pattern_analysis::Captures; +use rustc_type_ir::solve::{NoSolution, inspect}; use rustc_type_ir::{ ClosureKind, ConstVid, FloatVarValue, FloatVid, GenericArgKind, InferConst, InferTy, IntVarValue, IntVid, OutlivesPredicate, RegionVid, TermKind, TyVid, TypeFoldable, TypeFolder, @@ -27,6 +28,7 @@ use traits::{ObligationCause, PredicateObligations}; use type_variable::TypeVariableOrigin; use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey}; +pub use crate::next_solver::infer::traits::ObligationInspector; use crate::next_solver::{ ArgOutlivesPredicate, BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, Predicate, SolverContext, @@ -250,6 +252,8 @@ pub struct InferCtxt<'db> { /// when we enter into a higher-ranked (`for<..>`) type or trait /// bound. universe: Cell, + + obligation_inspector: Cell>>, } /// See the `error_reporting` module for more details. @@ -375,6 +379,7 @@ impl<'db> InferCtxtBuilder<'db> { inner: RefCell::new(InferCtxtInner::new()), tainted_by_errors: Cell::new(None), universe: Cell::new(UniverseIndex::ROOT), + obligation_inspector: Cell::new(None), } } } @@ -1223,6 +1228,30 @@ impl<'db> InferCtxt<'db> { fn sub_unify_ty_vids_raw(&self, a: rustc_type_ir::TyVid, b: rustc_type_ir::TyVid) { self.inner.borrow_mut().type_variables().sub_unify(a, b); } + + /// Attach a callback to be invoked on each root obligation evaluated in the new trait solver. + pub fn attach_obligation_inspector(&self, inspector: ObligationInspector<'db>) { + debug_assert!( + self.obligation_inspector.get().is_none(), + "shouldn't override a set obligation inspector" + ); + self.obligation_inspector.set(Some(inspector)); + } + + pub fn inspect_evaluated_obligation( + &self, + obligation: &PredicateObligation<'db>, + result: &Result>, NoSolution>, + get_proof_tree: impl FnOnce() -> Option>>, + ) { + if let Some(inspector) = self.obligation_inspector.get() { + let result = match result { + Ok(GoalEvaluation { certainty, .. }) => Ok(*certainty), + Err(_) => Err(NoSolution), + }; + (inspector)(self, obligation, result, get_proof_tree()); + } + } } /// Helper for [InferCtxt::ty_or_const_infer_var_changed] (see comment on that), currently diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs index befb2001b1b9b..617dbda1338ba 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs @@ -3,7 +3,7 @@ use rustc_type_ir::{OutlivesPredicate, TypeVisitableExt}; use tracing::{debug, instrument}; use crate::next_solver::{ - ArgOutlivesPredicate, GenericArg, Region, RegionOutlivesPredicate, Ty, + ArgOutlivesPredicate, GenericArgKind, Region, RegionOutlivesPredicate, Ty, infer::{InferCtxt, TypeOutlivesConstraint, snapshot::undo_log::UndoLog}, }; @@ -12,14 +12,14 @@ impl<'db> InferCtxt<'db> { &self, OutlivesPredicate(arg, r2): ArgOutlivesPredicate<'db>, ) { - match arg { - GenericArg::Lifetime(r1) => { + match arg.kind() { + GenericArgKind::Lifetime(r1) => { self.register_region_outlives_constraint(OutlivesPredicate(r1, r2)); } - GenericArg::Ty(ty1) => { + GenericArgKind::Type(ty1) => { self.register_type_outlives_constraint(ty1, r2); } - GenericArg::Const(_) => unreachable!(), + GenericArgKind::Const(_) => unreachable!(), } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs index d06984cac11cb..0f7ae99fa41d0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs @@ -16,14 +16,14 @@ use tracing::{debug, instrument, warn}; use super::{ PredicateEmittingRelation, Relate, RelateResult, StructurallyRelateAliases, TypeRelation, }; -use crate::next_solver::infer::type_variable::TypeVariableValue; use crate::next_solver::infer::unify_key::ConstVariableValue; use crate::next_solver::infer::{InferCtxt, relate}; use crate::next_solver::util::MaxUniverse; use crate::next_solver::{ - AliasTy, Binder, ClauseKind, Const, ConstKind, DbInterner, GenericArgs, PredicateKind, Region, - SolverDefId, Term, TermVid, Ty, TyKind, TypingMode, UnevaluatedConst, + AliasTy, Binder, ClauseKind, Const, ConstKind, DbInterner, PredicateKind, Region, SolverDefId, + Term, TermVid, Ty, TyKind, TypingMode, UnevaluatedConst, }; +use crate::next_solver::{GenericArgs, infer::type_variable::TypeVariableValue}; impl<'db> InferCtxt<'db> { /// The idea is that we should ensure that the type variable `target_vid` @@ -384,29 +384,26 @@ impl<'db> TypeRelation> for Generalizer<'_, 'db> { self.infcx.interner } - fn relate_item_args( + fn relate_ty_args( &mut self, - item_def_id: SolverDefId, - a_arg: GenericArgs<'db>, - b_arg: GenericArgs<'db>, - ) -> RelateResult<'db, GenericArgs<'db>> { - if self.ambient_variance == Variance::Invariant { + a_ty: Ty<'db>, + _: Ty<'db>, + def_id: SolverDefId, + a_args: GenericArgs<'db>, + b_args: GenericArgs<'db>, + mk: impl FnOnce(GenericArgs<'db>) -> Ty<'db>, + ) -> RelateResult<'db, Ty<'db>> { + let args = if self.ambient_variance == Variance::Invariant { // Avoid fetching the variance if we are in an invariant // context; no need, and it can induce dependency cycles // (e.g., #41849). - relate::relate_args_invariantly(self, a_arg, b_arg) + relate::relate_args_invariantly(self, a_args, b_args) } else { - let tcx = self.cx(); - let opt_variances = tcx.variances_of(item_def_id); - relate::relate_args_with_variances( - self, - item_def_id, - opt_variances, - a_arg, - b_arg, - false, - ) - } + let interner = self.cx(); + let variances = interner.variances_of(def_id); + relate::relate_args_with_variances(self, variances, a_args, b_args) + }?; + if args == a_args { Ok(a_ty) } else { Ok(mk(args)) } } #[instrument(level = "debug", skip(self, variance, b), ret)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs index 374895c337c78..1abe6a93f4dd5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs @@ -18,17 +18,19 @@ //! [lattices]: https://en.wikipedia.org/wiki/Lattice_(order) use rustc_type_ir::{ - AliasRelationDirection, TypeVisitableExt, Upcast, Variance, + AliasRelationDirection, Interner, TypeVisitableExt, Upcast, Variance, inherent::{IntoKind, Span as _}, relate::{ Relate, StructurallyRelateAliases, TypeRelation, VarianceDiagInfo, - combine::{PredicateEmittingRelation, super_combine_consts, super_combine_tys}, + combine::{ + PredicateEmittingRelation, combine_ty_args, super_combine_consts, super_combine_tys, + }, }, }; use crate::next_solver::{ - AliasTy, Binder, Const, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Region, Span, Ty, - TyKind, + AliasTy, Binder, Const, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, PredicateKind, + Region, SolverDefId, Span, Ty, TyKind, infer::{ InferCtxt, TypeTrace, relate::RelateResult, @@ -82,6 +84,19 @@ impl<'db> TypeRelation> for LatticeOp<'_, 'db> { self.infcx.interner } + fn relate_ty_args( + &mut self, + a_ty: Ty<'db>, + b_ty: Ty<'db>, + def_id: SolverDefId, + a_args: GenericArgs<'db>, + b_args: GenericArgs<'db>, + mk: impl FnOnce(GenericArgs<'db>) -> Ty<'db>, + ) -> RelateResult<'db, Ty<'db>> { + let variances = self.cx().variances_of(def_id); + combine_ty_args(self.infcx, self, a_ty, b_ty, variances, a_args, b_args, mk) + } + fn relate_with_variance>>( &mut self, variance: Variance, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs index 3409de17a1223..14df42dc2aebe 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs @@ -9,8 +9,11 @@ use std::{ use hir_def::TraitId; use macros::{TypeFoldable, TypeVisitable}; -use rustc_type_ir::Upcast; use rustc_type_ir::elaborate::Elaboratable; +use rustc_type_ir::{ + Upcast, + solve::{Certainty, NoSolution, inspect}, +}; use tracing::debug; use crate::next_solver::{ @@ -79,6 +82,15 @@ pub struct Obligation<'db, T> { pub recursion_depth: usize, } +/// A callback that can be provided to `inspect_typeck`. Invoked on evaluation +/// of root obligations. +pub type ObligationInspector<'db> = fn( + &InferCtxt<'db>, + &PredicateObligation<'db>, + Result, + Option>>, +); + /// For [`Obligation`], a sub-obligation is combined with the current obligation's /// param-env and cause code. impl<'db> Elaboratable> for PredicateObligation<'db> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs index d66aa9f277c73..5286977549597 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs @@ -74,7 +74,7 @@ impl<'a, 'db> std::fmt::Debug for InspectCandidate<'a, 'db> { /// treat `NormalizesTo` goals as if they apply the expected /// type at the end of each candidate. #[derive(Debug, Copy, Clone)] -struct NormalizesToTermHack<'db> { +pub(crate) struct NormalizesToTermHack<'db> { term: Term<'db>, unconstrained_term: Term<'db>, } @@ -311,10 +311,7 @@ impl<'a, 'db> InspectCandidate<'a, 'db> { /// Visit all nested goals of this candidate, rolling back /// all inference constraints. #[expect(dead_code, reason = "used in rustc")] - pub(crate) fn visit_nested_in_probe>( - &self, - visitor: &mut V, - ) -> V::Result { + fn visit_nested_in_probe>(&self, visitor: &mut V) -> V::Result { self.goal.infcx.probe(|_| self.visit_nested_no_probe(visitor)) } } @@ -430,7 +427,7 @@ impl<'a, 'db> InspectGoal<'a, 'db> { candidates.pop().filter(|_| candidates.is_empty()) } - fn new( + pub(crate) fn new( infcx: &'a InferCtxt<'db>, depth: usize, root: inspect::GoalEvaluation>, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index 8b24a20a5bed4..2ebc5b81ba5c1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -1,7 +1,9 @@ //! Things related to the Interner in the next-trait-solver. -use std::fmt; +use std::{fmt, ops::ControlFlow}; +use intern::{Interned, InternedRef, InternedSliceRef, impl_internable}; +use macros::GenericTypeVisitable; use rustc_ast_ir::{FloatTy, IntTy, UintTy}; pub use tls_cache::clear_tls_solver_cache; pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db}; @@ -19,13 +21,13 @@ use rustc_abi::{ReprFlags, ReprOptions}; use rustc_hash::FxHashSet; use rustc_index::bit_set::DenseBitSet; use rustc_type_ir::{ - AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, CoroutineWitnessTypes, DebruijnIndex, - EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef, - TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance, + AliasTermKind, AliasTyKind, BoundVar, CoroutineWitnessTypes, DebruijnIndex, EarlyBinder, + FlagComputation, Flags, GenericArgKind, GenericTypeVisitable, ImplPolarity, InferTy, Interner, + TraitRef, TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance, elaborate::elaborate, error::TypeError, fast_reject, - inherent::{self, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _}, + inherent::{self, Const as _, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _}, lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem}, solve::SizedTraitKind, }; @@ -39,7 +41,7 @@ use crate::{ AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper, CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, ImplIdWrapper, OpaqueTypeKey, RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds, - TraitIdWrapper, TypeAliasIdWrapper, util::explicit_item_bounds, + TraitIdWrapper, TypeAliasIdWrapper, UnevaluatedConst, util::explicit_item_bounds, }, }; @@ -57,216 +59,256 @@ use super::{ util::sizedness_constraint_for_ty, }; -#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] -pub struct InternedWrapperNoDebug(pub(crate) T); +macro_rules! interned_slice { + ($storage:ident, $name:ident, $stored_name:ident, $default_types_field:ident, $ty_db:ty, $ty_static:ty $(,)?) => { + const _: () = { + #[allow(unused_lifetimes)] + fn _ensure_correct_types<'db: 'static>(v: $ty_db) -> $ty_static { v } + }; -#[macro_export] -#[doc(hidden)] -macro_rules! _interned_vec_nolifetime_salsa { - ($name:ident, $ty:ty) => { - interned_vec_nolifetime_salsa!($name, $ty, nofold); + ::intern::impl_slice_internable!(gc; $storage, (), $ty_static); - impl<'db> rustc_type_ir::TypeFoldable> for $name<'db> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; - Ok($name::new_(folder.cx().db(), inner)) - } - fn fold_with>>( - self, - folder: &mut F, - ) -> Self { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.fold_with(folder)).collect(); - $name::new_(folder.cx().db(), inner) - } + #[derive(Clone, Copy, PartialEq, Eq, Hash)] + pub struct $name<'db> { + interned: ::intern::InternedSliceRef<'db, $storage>, } - impl<'db> rustc_type_ir::TypeVisitable> for $name<'db> { - fn visit_with>>( - &self, - visitor: &mut V, - ) -> V::Result { - use rustc_ast_ir::visit::VisitorResult; - use rustc_type_ir::inherent::SliceLike as _; - rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); - V::Result::output() + impl<'db> std::fmt::Debug for $name<'db> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_slice().fmt(fmt) } } - }; - ($name:ident, $ty:ty, nofold) => { - #[salsa::interned(constructor = new_)] - pub struct $name { - #[returns(ref)] - inner_: smallvec::SmallVec<[$ty; 2]>, - } impl<'db> $name<'db> { - pub fn new_from_iter( - interner: DbInterner<'db>, - data: impl IntoIterator, - ) -> Self { - $name::new_(interner.db(), data.into_iter().collect::>()) + #[inline] + pub fn empty(interner: DbInterner<'db>) -> Self { + interner.default_types().empty.$default_types_field + } + + #[inline] + pub fn new_from_slice(slice: &[$ty_db]) -> Self { + let slice = unsafe { ::std::mem::transmute::<&[$ty_db], &[$ty_static]>(slice) }; + Self { interned: ::intern::InternedSlice::from_header_and_slice((), slice) } } - pub fn inner(&self) -> &smallvec::SmallVec<[$ty; 2]> { - // SAFETY: ¯\_(ツ)_/¯ - $crate::with_attached_db(|db| { - let inner = self.inner_(db); - unsafe { std::mem::transmute(inner) } + #[inline] + pub fn new_from_iter(_interner: DbInterner<'db>, args: I) -> T::Output + where + I: IntoIterator, + T: ::rustc_type_ir::CollectAndApply<$ty_db, Self>, + { + ::rustc_type_ir::CollectAndApply::collect_and_apply(args.into_iter(), |g| { + Self::new_from_slice(g) }) } + + #[inline] + pub fn as_slice(self) -> &'db [$ty_db] { + let slice = &self.interned.get().slice; + unsafe { ::std::mem::transmute::<&[$ty_static], &[$ty_db]>(slice) } + } + + #[inline] + pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>> { + self.as_slice().iter().copied() + } + + #[inline] + pub fn len(self) -> usize { + self.as_slice().len() + } + + #[inline] + pub fn is_empty(self) -> bool { + self.as_slice().is_empty() + } } - impl<'db> std::fmt::Debug for $name<'db> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.as_slice().fmt(fmt) + impl<'db> IntoIterator for $name<'db> { + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>; + type Item = $ty_db; + #[inline] + fn into_iter(self) -> Self::IntoIter { self.iter() } + } + + impl<'db> ::std::ops::Deref for $name<'db> { + type Target = [$ty_db]; + + #[inline] + fn deref(&self) -> &Self::Target { + (*self).as_slice() } } impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> { - type Item = $ty; + type Item = $ty_db; - type IntoIter = as IntoIterator>::IntoIter; + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>; + #[inline] fn iter(self) -> Self::IntoIter { - self.inner().clone().into_iter() + self.iter() } + #[inline] fn as_slice(&self) -> &[Self::Item] { - self.inner().as_slice() + (*self).as_slice() } } - impl<'db> IntoIterator for $name<'db> { - type Item = $ty; - type IntoIter = ::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - rustc_type_ir::inherent::SliceLike::iter(self) + impl<'db> Default for $name<'db> { + #[inline] + fn default() -> Self { + $name::empty(DbInterner::conjure()) } } - impl<'db> Default for $name<'db> { - fn default() -> Self { - $name::new_from_iter(DbInterner::conjure(), []) + + impl<'db, V: $crate::next_solver::interner::WorldExposer> + rustc_type_ir::GenericTypeVisitable for $name<'db> + { + #[inline] + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned_slice(self.interned).is_continue() { + self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); + } } } + + $crate::next_solver::interner::impl_stored_interned_slice!($storage, $name, $stored_name); }; } +pub(crate) use interned_slice; -pub use crate::_interned_vec_nolifetime_salsa as interned_vec_nolifetime_salsa; +macro_rules! impl_stored_interned_slice { + ( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => { + #[derive(Clone, PartialEq, Eq, Hash)] + pub struct $stored_name { + interned: ::intern::InternedSlice<$storage>, + } -#[macro_export] -#[doc(hidden)] -macro_rules! _interned_vec_db { - ($name:ident, $ty:ident) => { - interned_vec_db!($name, $ty, nofold); + impl $stored_name { + #[inline] + fn new(it: $name<'_>) -> Self { + Self { interned: it.interned.to_owned() } + } - impl<'db> rustc_type_ir::TypeFoldable> for $name<'db> { - fn try_fold_with>>( - self, - folder: &mut F, - ) -> Result { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; - Ok($name::new_(folder.cx().db(), inner)) + #[inline] + pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> { + let it = $name { interned: self.interned.as_ref() }; + unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) } } - fn fold_with>>( - self, - folder: &mut F, - ) -> Self { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.fold_with(folder)).collect(); - $name::new_(folder.cx().db(), inner) + } + + // SAFETY: It is safe to store this type in queries (but not `$name`). + unsafe impl salsa::Update for $stored_name { + unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { + // SAFETY: Comparing by (pointer) equality is safe. + unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) } + } + } + + impl std::fmt::Debug for $stored_name { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_ref().fmt(f) + } + } + + impl $name<'_> { + #[inline] + pub fn store(self) -> $stored_name { + $stored_name::new(self) } } + }; +} +pub(crate) use impl_stored_interned_slice; - impl<'db> rustc_type_ir::TypeVisitable> for $name<'db> { +macro_rules! impl_foldable_for_interned_slice { + ($name:ident) => { + impl<'db> ::rustc_type_ir::TypeVisitable> for $name<'db> { fn visit_with>>( &self, visitor: &mut V, ) -> V::Result { use rustc_ast_ir::visit::VisitorResult; - use rustc_type_ir::inherent::SliceLike as _; - rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); + rustc_ast_ir::walk_visitable_list!(visitor, (*self).iter()); V::Result::output() } } - }; - ($name:ident, $ty:ident, nofold) => { - #[salsa::interned(constructor = new_)] - pub struct $name<'db> { - #[returns(ref)] - inner_: smallvec::SmallVec<[$ty<'db>; 2]>, - } - - impl<'db> std::fmt::Debug for $name<'db> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.as_slice().fmt(fmt) - } - } - impl<'db> $name<'db> { - pub fn empty(interner: DbInterner<'db>) -> Self { - $name::new_(interner.db(), smallvec::SmallVec::new()) + impl<'db> rustc_type_ir::TypeFoldable> for $name<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Self::new_from_iter(folder.cx(), self.iter().map(|it| it.try_fold_with(folder))) } - - pub fn new_from_iter( - interner: DbInterner<'db>, - data: impl IntoIterator>, + fn fold_with>>( + self, + folder: &mut F, ) -> Self { - $name::new_(interner.db(), data.into_iter().collect::>()) - } - - pub fn inner(&self) -> &smallvec::SmallVec<[$ty<'db>; 2]> { - // SAFETY: ¯\_(ツ)_/¯ - $crate::with_attached_db(|db| { - let inner = self.inner_(db); - unsafe { std::mem::transmute(inner) } - }) + Self::new_from_iter(folder.cx(), self.iter().map(|it| it.fold_with(folder))) } } + }; +} +pub(crate) use impl_foldable_for_interned_slice; - impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> { - type Item = $ty<'db>; - - type IntoIter = ; 2]> as IntoIterator>::IntoIter; +macro_rules! impl_stored_interned { + ( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => { + #[derive(Clone, PartialEq, Eq, Hash)] + pub struct $stored_name { + interned: ::intern::Interned<$storage>, + } - fn iter(self) -> Self::IntoIter { - self.inner().clone().into_iter() + impl $stored_name { + #[inline] + fn new(it: $name<'_>) -> Self { + Self { interned: it.interned.to_owned() } } - fn as_slice(&self) -> &[Self::Item] { - self.inner().as_slice() + #[inline] + pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> { + let it = $name { interned: self.interned.as_ref() }; + unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) } } } - impl<'db> IntoIterator for $name<'db> { - type Item = $ty<'db>; - type IntoIter = ::IntoIter; + unsafe impl salsa::Update for $stored_name { + unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { + unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) } + } + } - fn into_iter(self) -> Self::IntoIter { - rustc_type_ir::inherent::SliceLike::iter(self) + impl std::fmt::Debug for $stored_name { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_ref().fmt(f) } } - impl<'db> Default for $name<'db> { - fn default() -> Self { - $name::new_from_iter(DbInterner::conjure(), []) + impl $name<'_> { + #[inline] + pub fn store(self) -> $stored_name { + $stored_name::new(self) } } }; } - -pub use crate::_interned_vec_db as interned_vec_db; +pub(crate) use impl_stored_interned; + +/// This is a visitor trait that treats any interned thing specifically. Visitables are expected to call +/// the trait's methods when encountering an interned. This is used to implement marking in GC. +pub trait WorldExposer { + fn on_interned( + &mut self, + interned: InternedRef<'_, T>, + ) -> ControlFlow<()>; + fn on_interned_slice( + &mut self, + interned: InternedSliceRef<'_, T>, + ) -> ControlFlow<()>; +} #[derive(Debug, Copy, Clone)] pub struct DbInterner<'db> { @@ -321,6 +363,11 @@ impl<'db> DbInterner<'db> { where you should've called `DbInterner::new_with()`", ) } + + #[inline] + pub fn default_types<'a>(&self) -> &'a crate::next_solver::DefaultAny<'db> { + crate::next_solver::default_types(self.db) + } } // This is intentionally left as `()` @@ -333,7 +380,14 @@ impl<'db> inherent::Span> for Span { } } -interned_vec_nolifetime_salsa!(BoundVarKinds, BoundVarKind, nofold); +interned_slice!( + BoundVarKindsStorage, + BoundVarKinds, + StoredBoundVarKinds, + bound_var_kinds, + BoundVarKind, + BoundVarKind, +); #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub enum BoundVarKind { @@ -365,7 +419,14 @@ impl BoundVarKind { } } -interned_vec_db!(CanonicalVars, CanonicalVarKind, nofold); +interned_slice!( + CanonicalVarsStorage, + CanonicalVars, + StoredCanonicalVars, + canonical_vars, + CanonicalVarKind<'db>, + CanonicalVarKind<'static> +); pub struct DepNodeIndex; @@ -391,7 +452,7 @@ impl std::fmt::Debug for Placeholder { #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub struct AllocId; -interned_vec_nolifetime_salsa!(VariancesOf, Variance, nofold); +interned_slice!(VariancesOfStorage, VariancesOf, StoredVariancesOf, variances, Variance, Variance); #[derive(Debug, Clone, Eq, PartialEq, Hash)] pub struct VariantIdx(usize); @@ -658,7 +719,7 @@ impl<'db> inherent::AdtDef> for AdtDef { let id: VariantId = struct_id.into(); let field_types = interner.db().field_types(id); - field_types.iter().last().map(|f| *f.1) + field_types.iter().last().map(|f| f.1.get()) } fn all_field_tys( @@ -668,7 +729,7 @@ impl<'db> inherent::AdtDef> for AdtDef { let db = interner.db(); // FIXME: this is disabled just to match the behavior with chalk right now let _field_tys = |id: VariantId| { - db.field_types(id).iter().map(|(_, ty)| ty.skip_binder()).collect::>() + db.field_types(id).iter().map(|(_, ty)| ty.get().skip_binder()).collect::>() }; let field_tys = |_id: VariantId| vec![]; let tys: Vec<_> = match self.inner().id { @@ -762,30 +823,36 @@ impl std::ops::Deref for UnsizingParams { pub type PatternKind<'db> = rustc_type_ir::PatternKind>; -#[salsa::interned(constructor = new_, debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Pattern<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>, + interned: InternedRef<'db, PatternInterned>, } -impl<'db> std::fmt::Debug for InternedWrapperNoDebug> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +struct PatternInterned(PatternKind<'static>); + +impl_internable!(gc; PatternInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; impl<'db> Pattern<'db> { - pub fn new(interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self { - Pattern::new_(interner.db(), InternedWrapperNoDebug(kind)) + pub fn new(_interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, PatternKind<'static>>(kind) }; + Self { interned: Interned::new_gc(PatternInterned(kind)) } } pub fn inner(&self) -> &PatternKind<'db> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { std::mem::transmute::<&PatternKind<'static>, &PatternKind<'db>>(inner) } + } +} + +impl<'db> std::fmt::Debug for Pattern<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.kind().fmt(f) } } @@ -831,6 +898,36 @@ impl<'db> rustc_type_ir::inherent::IntoKind for Pattern<'db> { } } +impl<'db> rustc_type_ir::TypeVisitable> for Pattern<'db> { + fn visit_with>>( + &self, + visitor: &mut V, + ) -> V::Result { + self.kind().visit_with(visitor) + } +} + +impl<'db, V: WorldExposer> rustc_type_ir::GenericTypeVisitable for Pattern<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } + } +} + +impl<'db> rustc_type_ir::TypeFoldable> for Pattern<'db> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + Ok(Pattern::new(folder.cx(), self.kind().try_fold_with(folder)?)) + } + + fn fold_with>>(self, folder: &mut F) -> Self { + Pattern::new(folder.cx(), self.kind().fold_with(folder)) + } +} + impl<'db> rustc_type_ir::relate::Relate> for Pattern<'db> { fn relate>>( relation: &mut R, @@ -851,9 +948,9 @@ impl<'db> rustc_type_ir::relate::Relate> for Pattern<'db> { if a.len() != b.len() { return Err(TypeError::Mismatch); } - let pats = CollectAndApply::collect_and_apply( + let pats = PatList::new_from_iter( + relation.cx(), std::iter::zip(a.iter(), b.iter()).map(|(a, b)| relation.relate(a, b)), - |g| PatList::new_from_iter(tcx, g.iter().cloned()), )?; Ok(Pattern::new(tcx, PatternKind::Or(pats))) } @@ -865,7 +962,8 @@ impl<'db> rustc_type_ir::relate::Relate> for Pattern<'db> { } } -interned_vec_db!(PatList, Pattern); +interned_slice!(PatListStorage, PatList, StoredPatList, pat_list, Pattern<'db>, Pattern<'static>); +impl_foldable_for_interned_slice!(PatList); macro_rules! as_lang_item { ( @@ -927,7 +1025,7 @@ impl<'db> Interner for DbInterner<'db> { type Span = Span; type GenericArgs = GenericArgs<'db>; - type GenericArgsSlice = GenericArgs<'db>; + type GenericArgsSlice = &'db [GenericArg<'db>]; type GenericArg = GenericArg<'db>; type Term = Term<'db>; @@ -941,7 +1039,7 @@ impl<'db> Interner for DbInterner<'db> { self, data: &[(OpaqueTypeKey<'db>, Self::Ty)], ) -> Self::PredefinedOpaques { - PredefinedOpaques::new_from_iter(self, data.iter().cloned()) + PredefinedOpaques::new_from_slice(data) } type CanonicalVarKinds = CanonicalVars<'db>; @@ -950,7 +1048,7 @@ impl<'db> Interner for DbInterner<'db> { self, kinds: &[rustc_type_ir::CanonicalVarKind], ) -> Self::CanonicalVarKinds { - CanonicalVars::new_from_iter(self, kinds.iter().cloned()) + CanonicalVars::new_from_slice(kinds) } type ExternalConstraints = ExternalConstraints<'db>; @@ -968,7 +1066,7 @@ impl<'db> Interner for DbInterner<'db> { type Ty = Ty<'db>; type Tys = Tys<'db>; - type FnInputTys = Tys<'db>; + type FnInputTys = &'db [Ty<'db>]; type ParamTy = ParamTy; type BoundTy = BoundTy; type PlaceholderTy = PlaceholderTy; @@ -1012,7 +1110,7 @@ impl<'db> Interner for DbInterner<'db> { type Features = Features; fn mk_args(self, args: &[Self::GenericArg]) -> Self::GenericArgs { - GenericArgs::new_from_iter(self, args.iter().cloned()) + GenericArgs::new_from_slice(args) } fn mk_args_from_iter(self, args: I) -> T::Output @@ -1020,9 +1118,7 @@ impl<'db> Interner for DbInterner<'db> { I: Iterator, T: rustc_type_ir::CollectAndApply, { - CollectAndApply::collect_and_apply(args, |g| { - GenericArgs::new_from_iter(self, g.iter().cloned()) - }) + GenericArgs::new_from_iter(self, args) } type UnsizingParams = UnsizingParams; @@ -1096,7 +1192,7 @@ impl<'db> Interner for DbInterner<'db> { | SolverDefId::ImplId(_) | SolverDefId::InternedClosureId(_) | SolverDefId::InternedCoroutineId(_) => { - return VariancesOf::new_from_iter(self, []); + return VariancesOf::empty(self); } }; self.db.variances_of(generic_def) @@ -1174,12 +1270,9 @@ impl<'db> Interner for DbInterner<'db> { ) -> (rustc_type_ir::TraitRef, Self::GenericArgsSlice) { let trait_def_id = self.parent(def_id); let trait_generics = self.generics_of(trait_def_id); - let trait_args = GenericArgs::new_from_iter( - self, - args.as_slice()[0..trait_generics.own_params.len()].iter().cloned(), - ); - let alias_args = - GenericArgs::new_from_iter(self, args.iter().skip(trait_generics.own_params.len())); + let trait_args = + GenericArgs::new_from_slice(&args.as_slice()[0..trait_generics.own_params.len()]); + let alias_args = &args.as_slice()[trait_generics.own_params.len()..]; (TraitRef::new_from_args(self, trait_def_id.try_into().unwrap(), trait_args), alias_args) } @@ -1202,7 +1295,7 @@ impl<'db> Interner for DbInterner<'db> { I: Iterator, T: rustc_type_ir::CollectAndApply, { - CollectAndApply::collect_and_apply(args, |g| Tys::new_from_iter(self, g.iter().cloned())) + Tys::new_from_iter(self, args) } fn parent(self, def_id: Self::DefId) -> Self::DefId { @@ -1338,7 +1431,7 @@ impl<'db> Interner for DbInterner<'db> { let own_bounds: FxHashSet<_> = self.item_self_bounds(def_id).skip_binder().into_iter().collect(); if all_bounds.len() == own_bounds.len() { - EarlyBinder::bind(Clauses::new_from_iter(self, [])) + EarlyBinder::bind(Clauses::empty(self)) } else { EarlyBinder::bind(Clauses::new_from_iter( self, @@ -1512,6 +1605,7 @@ impl<'db> Interner for DbInterner<'db> { SolverTraitLangItem::BikeshedGuaranteedNoDrop => { unimplemented!() } + SolverTraitLangItem::TrivialClone => lang_items.TrivialClone, }; lang_item.expect("Lang item required but not found.").into() } @@ -1565,6 +1659,7 @@ impl<'db> Interner for DbInterner<'db> { AsyncFn, AsyncFnMut, AsyncFnOnce, + TrivialClone, ) } @@ -1651,6 +1746,7 @@ impl<'db> Interner for DbInterner<'db> { AsyncFn, AsyncFnMut, AsyncFnOnce, + TrivialClone, ) } @@ -1949,7 +2045,7 @@ impl<'db> Interner for DbInterner<'db> { let field_types = self.db().field_types(variant.id()); let mut unsizing_params = DenseBitSet::new_empty(num_params); - let ty = field_types[tail_field.0]; + let ty = field_types[tail_field.0].get(); for arg in ty.instantiate_identity().walk() { if let Some(i) = maybe_unsizing_param_idx(arg) { unsizing_params.insert(i); @@ -1959,7 +2055,7 @@ impl<'db> Interner for DbInterner<'db> { // Ensure none of the other fields mention the parameters used // in unsizing. for field in prefix_fields { - for arg in field_types[field.0].instantiate_identity().walk() { + for arg in field_types[field.0].get().instantiate_identity().walk() { if let Some(i) = maybe_unsizing_param_idx(arg) { unsizing_params.remove(i); } @@ -2007,9 +2103,7 @@ impl<'db> Interner for DbInterner<'db> { let mut map = Default::default(); let delegate = Anonymize { interner: self, map: &mut map }; let inner = self.replace_escaping_bound_vars_uncached(value.skip_binder(), delegate); - let bound_vars = CollectAndApply::collect_and_apply(map.into_values(), |xs| { - BoundVarKinds::new_from_iter(self, xs.iter().cloned()) - }); + let bound_vars = BoundVarKinds::new_from_iter(self, map.into_values()); Binder::bind_with_vars(inner, bound_vars) } @@ -2019,7 +2113,7 @@ impl<'db> Interner for DbInterner<'db> { }; let mut result = Vec::new(); crate::opaques::opaque_types_defined_by(self.db, def_id, &mut result); - SolverDefIds::new_from_iter(self, result) + SolverDefIds::new_from_slice(&result) } fn opaque_types_and_coroutines_defined_by(self, def_id: Self::LocalDefId) -> Self::LocalDefIds { @@ -2048,7 +2142,7 @@ impl<'db> Interner for DbInterner<'db> { } }); - SolverDefIds::new_from_iter(self, result) + SolverDefIds::new_from_slice(&result) } fn alias_has_const_conditions(self, _def_id: Self::DefId) -> bool { @@ -2093,10 +2187,10 @@ impl<'db> Interner for DbInterner<'db> { let impl_trait_id = self.db().lookup_intern_impl_trait_id(opaque); match impl_trait_id { crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { - crate::opaques::rpit_hidden_types(self.db, func)[idx] + crate::opaques::rpit_hidden_types(self.db, func)[idx].get() } crate::ImplTraitId::TypeAliasImplTrait(type_alias, idx) => { - crate::opaques::tait_hidden_types(self.db, type_alias)[idx] + crate::opaques::tait_hidden_types(self.db, type_alias)[idx].get() } } } @@ -2167,6 +2261,18 @@ impl<'db> Interner for DbInterner<'db> { Some(SolverTraitLangItem::Sized | SolverTraitLangItem::MetaSized) ) } + + fn const_of_item(self, def_id: Self::DefId) -> rustc_type_ir::EarlyBinder { + let id = match def_id { + SolverDefId::StaticId(id) => id.into(), + SolverDefId::ConstId(id) => id.into(), + _ => unreachable!(), + }; + EarlyBinder::bind(Const::new_unevaluated( + self, + UnevaluatedConst { def: GeneralConstIdWrapper(id), args: GenericArgs::empty(self) }, + )) + } } impl<'db> DbInterner<'db> { @@ -2273,6 +2379,11 @@ macro_rules! TrivialTypeTraversalImpls { ::output() } } + + impl rustc_type_ir::GenericTypeVisitable for $ty { + #[inline] + fn generic_visit_with(&self, _visitor: &mut V) {} + } )+ }; } @@ -2287,17 +2398,22 @@ TrivialTypeTraversalImpls! { AdtIdWrapper, ImplIdWrapper, GeneralConstIdWrapper, - Pattern<'db>, Safety, FnAbi, Span, ParamConst, ParamTy, BoundRegion, - BoundVar, Placeholder, Placeholder, Placeholder, + Placeholder, + BoundVarKind, + EarlyParamRegion, + LateParamRegion, + AdtDef, + BoundTy, + BoundConst, } mod tls_db { @@ -2464,3 +2580,110 @@ mod tls_cache { GLOBAL_CACHE.with_borrow_mut(|handle| *handle = None); } } + +impl WorldExposer for intern::GarbageCollector { + fn on_interned( + &mut self, + interned: InternedRef<'_, T>, + ) -> ControlFlow<()> { + self.mark_interned_alive(interned) + } + + fn on_interned_slice( + &mut self, + interned: InternedSliceRef<'_, T>, + ) -> ControlFlow<()> { + self.mark_interned_slice_alive(interned) + } +} + +/// # Safety +/// +/// This cannot be called if there are some not-yet-recorded type values. Generally, if you have a mutable +/// reference to the database, and there are no other database - then you can call this safely, but you +/// also need to make sure to maintain the mutable reference while this is running. +pub unsafe fn collect_ty_garbage() { + let mut gc = intern::GarbageCollector::default(); + + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + gc.add_storage::(); + + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + gc.add_slice_storage::(); + + // SAFETY: + // - By our precondition, there are no unrecorded types. + // - We implement `GcInternedVisit` and `GcInternedSliceVisit` correctly for all types. + // - We added all storages (FIXME: it's too easy to forget to add a new storage here). + unsafe { gc.collect() }; +} + +macro_rules! impl_gc_visit { + ( $($ty:ty),* $(,)? ) => { + $( + impl ::intern::GcInternedVisit for $ty { + #[inline] + fn visit_with(&self, gc: &mut ::intern::GarbageCollector) { + self.generic_visit_with(gc); + } + } + )* + }; +} + +impl_gc_visit!( + super::consts::ConstInterned, + super::consts::ValtreeInterned, + PatternInterned, + super::opaques::ExternalConstraintsInterned, + super::predicate::PredicateInterned, + super::region::RegionInterned, + super::ty::TyInterned, + super::predicate::ClausesCachedTypeInfo, +); + +macro_rules! impl_gc_visit_slice { + ( $($ty:ty),* $(,)? ) => { + $( + impl ::intern::GcInternedSliceVisit for $ty { + #[inline] + fn visit_header(header: &::Header, gc: &mut ::intern::GarbageCollector) { + header.generic_visit_with(gc); + } + + #[inline] + fn visit_slice(header: &[::SliceType], gc: &mut ::intern::GarbageCollector) { + header.generic_visit_with(gc); + } + } + )* + }; +} + +impl_gc_visit_slice!( + super::predicate::ClausesStorage, + super::generic_arg::GenericArgsStorage, + BoundVarKindsStorage, + VariancesOfStorage, + CanonicalVarsStorage, + PatListStorage, + super::opaques::PredefinedOpaquesStorage, + super::opaques::SolverDefIdsStorage, + super::predicate::BoundExistentialPredicatesStorage, + super::region::RegionAssumptionsStorage, + super::ty::TysStorage, +); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs index dab0fe9e4a903..998aab5a3fff0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs @@ -2,7 +2,6 @@ use std::any::type_name_of_val; -use rustc_type_ir::inherent::SliceLike; use rustc_type_ir::{self as ty, ir_print::IrPrint}; use super::SolverDefId; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs index e8f5be2eb5988..bdb3f3087103e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs @@ -1,37 +1,74 @@ //! Things related to opaques in the next-trait-solver. +use intern::{Interned, InternedRef, impl_internable}; +use macros::GenericTypeVisitable; use rustc_ast_ir::try_visit; use rustc_type_ir::inherent::SliceLike; -use super::{DbInterner, SolverDefId, Ty, interned_vec_db, interned_vec_nolifetime_salsa}; +use crate::next_solver::{impl_foldable_for_interned_slice, interned_slice}; + +use super::{DbInterner, SolverDefId, Ty}; pub type OpaqueTypeKey<'db> = rustc_type_ir::OpaqueTypeKey>; type PredefinedOpaque<'db> = (OpaqueTypeKey<'db>, Ty<'db>); -interned_vec_db!(PredefinedOpaques, PredefinedOpaque); +interned_slice!( + PredefinedOpaquesStorage, + PredefinedOpaques, + StoredPredefinedOpaques, + predefined_opaques, + PredefinedOpaque<'db>, + PredefinedOpaque<'static>, +); +impl_foldable_for_interned_slice!(PredefinedOpaques); pub type ExternalConstraintsData<'db> = rustc_type_ir::solve::ExternalConstraintsData>; -interned_vec_nolifetime_salsa!(SolverDefIds, SolverDefId); +interned_slice!( + SolverDefIdsStorage, + SolverDefIds, + StoredSolverDefIds, + def_ids, + SolverDefId, + SolverDefId, +); +impl_foldable_for_interned_slice!(SolverDefIds); -#[salsa::interned(constructor = new_, debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct ExternalConstraints<'db> { - #[returns(ref)] - kind_: rustc_type_ir::solve::ExternalConstraintsData>, + interned: InternedRef<'db, ExternalConstraintsInterned>, } +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +pub(super) struct ExternalConstraintsInterned(ExternalConstraintsData<'static>); + +impl_internable!(gc; ExternalConstraintsInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; + impl<'db> ExternalConstraints<'db> { - pub fn new(interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self { - ExternalConstraints::new_(interner.db(), data) + #[inline] + pub fn new(_interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self { + let data = unsafe { + std::mem::transmute::, ExternalConstraintsData<'static>>( + data, + ) + }; + Self { interned: Interned::new_gc(ExternalConstraintsInterned(data)) } } + #[inline] pub fn inner(&self) -> &ExternalConstraintsData<'db> { - crate::with_attached_db(|db| { - let inner = self.kind_(db); - // SAFETY: ¯\_(ツ)_/¯ - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::<&ExternalConstraintsData<'static>, &ExternalConstraintsData<'db>>( + inner, + ) + } } } @@ -43,6 +80,12 @@ impl<'db> std::ops::Deref for ExternalConstraints<'db> { } } +impl std::fmt::Debug for ExternalConstraints<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.inner().fmt(f) + } +} + impl<'db> rustc_type_ir::TypeVisitable> for ExternalConstraints<'db> { fn visit_with>>( &self, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs index 783966ee1ee12..5758e2dc7e93c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs @@ -2,20 +2,25 @@ use std::cmp::Ordering; -use macros::{TypeFoldable, TypeVisitable}; +use intern::{ + Interned, InternedRef, InternedSlice, InternedSliceRef, impl_internable, impl_slice_internable, +}; +use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable}; use rustc_type_ir::{ - self as ty, CollectAndApply, DebruijnIndex, EarlyBinder, FlagComputation, Flags, - PredicatePolarity, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, - TypeVisitable, Upcast, UpcastFrom, WithCachedTypeInfo, + self as ty, CollectAndApply, EarlyBinder, FlagComputation, Flags, GenericTypeVisitable, + PredicatePolarity, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, Upcast, + UpcastFrom, WithCachedTypeInfo, elaborate::Elaboratable, error::{ExpectedFound, TypeError}, inherent::{IntoKind, SliceLike}, }; -use smallvec::SmallVec; -use crate::next_solver::{GenericArg, InternedWrapperNoDebug, TraitIdWrapper}; +use crate::next_solver::{ + GenericArg, TraitIdWrapper, impl_foldable_for_interned_slice, impl_stored_interned_slice, + interned_slice, +}; -use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db}; +use super::{Binder, BoundVarKinds, DbInterner, Region, Ty}; pub type BoundExistentialPredicate<'db> = Binder<'db, ExistentialPredicate<'db>>; @@ -68,7 +73,15 @@ fn stable_cmp_existential_predicate<'db>( (ExistentialPredicate::AutoTrait(_), _) => Ordering::Greater, } } -interned_vec_db!(BoundExistentialPredicates, BoundExistentialPredicate); +interned_slice!( + BoundExistentialPredicatesStorage, + BoundExistentialPredicates, + StoredBoundExistentialPredicates, + bound_existential_predicates, + BoundExistentialPredicate<'db>, + BoundExistentialPredicate<'static>, +); +impl_foldable_for_interned_slice!(BoundExistentialPredicates); impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates> for BoundExistentialPredicates<'db> @@ -82,7 +95,7 @@ impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates> ) -> Option< rustc_type_ir::Binder, rustc_type_ir::ExistentialTraitRef>>, > { - self.inner()[0] + self[0] .map_bound(|this| match this { ExistentialPredicate::Trait(tr) => Some(tr), _ => None, @@ -166,74 +179,50 @@ impl<'db> rustc_type_ir::relate::Relate> for BoundExistentialPre }, ); - CollectAndApply::collect_and_apply(v, |g| { - BoundExistentialPredicates::new_from_iter(interner, g.iter().cloned()) - }) + BoundExistentialPredicates::new_from_iter(interner, v) } } -#[salsa::interned(constructor = new_)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Predicate<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>>>, + interned: InternedRef<'db, PredicateInterned>, } -impl<'db> std::fmt::Debug for Predicate<'db> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.inner().internee.fmt(f) - } -} +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +pub(super) struct PredicateInterned(WithCachedTypeInfo>>); -impl<'db> std::fmt::Debug - for InternedWrapperNoDebug>>> -{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "Binder<")?; - match self.0.internee.skip_binder() { - rustc_type_ir::PredicateKind::Clause(clause_kind) => { - write!(f, "{clause_kind:?}") - } - rustc_type_ir::PredicateKind::DynCompatible(trait_def_id) => { - write!(f, "the trait `{trait_def_id:?}` is dyn-compatible") - } - rustc_type_ir::PredicateKind::Subtype(subtype_predicate) => { - write!(f, "{subtype_predicate:?}") - } - rustc_type_ir::PredicateKind::Coerce(coerce_predicate) => { - write!(f, "{coerce_predicate:?}") - } - rustc_type_ir::PredicateKind::ConstEquate(c1, c2) => { - write!(f, "the constant `{c1:?}` equals `{c2:?}`") - } - rustc_type_ir::PredicateKind::Ambiguous => write!(f, "ambiguous"), - rustc_type_ir::PredicateKind::NormalizesTo(data) => write!(f, "{data:?}"), - rustc_type_ir::PredicateKind::AliasRelate(t1, t2, dir) => { - write!(f, "{t1:?} {dir:?} {t2:?}") - } - }?; - write!(f, ", [{:?}]>", self.0.internee.bound_vars())?; - Ok(()) - } -} +impl_internable!(gc; PredicateInterned); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; impl<'db> Predicate<'db> { - pub fn new(interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self { + pub fn new(_interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self { + let kind = unsafe { + std::mem::transmute::< + Binder<'db, PredicateKind<'db>>, + Binder<'static, PredicateKind<'static>>, + >(kind) + }; let flags = FlagComputation::for_predicate(kind); let cached = WithCachedTypeInfo { internee: kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; - Predicate::new_(interner.db(), InternedWrapperNoDebug(cached)) + Self { interned: Interned::new_gc(PredicateInterned(cached)) } } pub fn inner(&self) -> &WithCachedTypeInfo>> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Predicate<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::< + &WithCachedTypeInfo>>, + &WithCachedTypeInfo>>, + >(inner) + } } /// Flips the polarity of a Predicate. @@ -259,110 +248,135 @@ impl<'db> Predicate<'db> { } } -// FIXME: should make a "header" in interned_vec - -#[derive(Debug, Clone)] -pub struct InternedClausesWrapper<'db>(SmallVec<[Clause<'db>; 2]>, TypeFlags, DebruijnIndex); - -impl<'db> PartialEq for InternedClausesWrapper<'db> { - fn eq(&self, other: &Self) -> bool { - self.0.eq(&other.0) +impl<'db> std::fmt::Debug for Predicate<'db> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.kind().fmt(f) } } -impl<'db> Eq for InternedClausesWrapper<'db> {} +#[derive(Clone, Copy, PartialEq, Eq, Hash, GenericTypeVisitable)] +pub struct ClausesCachedTypeInfo(WithCachedTypeInfo<()>); -impl<'db> std::hash::Hash for InternedClausesWrapper<'db> { - fn hash(&self, state: &mut H) { - self.0.hash(state) - } -} +impl_slice_internable!(gc; ClausesStorage, ClausesCachedTypeInfo, Clause<'static>); +impl_stored_interned_slice!(ClausesStorage, Clauses, StoredClauses); -#[salsa::interned(constructor = new_)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Clauses<'db> { - #[returns(ref)] - inner_: InternedClausesWrapper<'db>, + interned: InternedSliceRef<'db, ClausesStorage>, } -impl<'db> Clauses<'db> { - pub fn new_from_iter( - interner: DbInterner<'db>, - data: impl IntoIterator>, - ) -> Self { - let clauses: SmallVec<_> = data.into_iter().collect(); - let flags = FlagComputation::>::for_clauses(&clauses); - let wrapper = InternedClausesWrapper(clauses, flags.flags, flags.outer_exclusive_binder); - Clauses::new_(interner.db(), wrapper) - } - - pub fn inner(&self) -> &InternedClausesWrapper<'db> { - crate::with_attached_db(|db| { - let inner = self.inner_(db); - // SAFETY: The caller already has access to a `Clauses<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) +impl<'db> std::fmt::Debug for Clauses<'db> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.as_slice().fmt(fmt) } } -impl<'db> std::fmt::Debug for Clauses<'db> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.inner().0.fmt(f) +impl<'db> Clauses<'db> { + #[inline] + pub fn empty(_interner: DbInterner<'db>) -> Self { + // FIXME: Get from a static. + Self::new_from_slice(&[]) + } + + #[inline] + pub fn new_from_slice(slice: &[Clause<'db>]) -> Self { + let slice = unsafe { ::std::mem::transmute::<&[Clause<'db>], &[Clause<'static>]>(slice) }; + let flags = FlagComputation::>::for_clauses(slice); + let flags = ClausesCachedTypeInfo(WithCachedTypeInfo { + internee: (), + flags: flags.flags, + outer_exclusive_binder: flags.outer_exclusive_binder, + }); + Self { interned: InternedSlice::from_header_and_slice(flags, slice) } } -} -impl<'db> rustc_type_ir::inherent::Clauses> for Clauses<'db> {} + #[inline] + pub fn new_from_iter(_interner: DbInterner<'db>, args: I) -> T::Output + where + I: IntoIterator, + T: CollectAndApply, Self>, + { + CollectAndApply::collect_and_apply(args.into_iter(), Self::new_from_slice) + } -impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> { - type Item = Clause<'db>; + #[inline] + pub fn as_slice(self) -> &'db [Clause<'db>] { + let slice = &self.interned.get().slice; + unsafe { ::std::mem::transmute::<&[Clause<'static>], &[Clause<'db>]>(slice) } + } - type IntoIter = ; 2]> as IntoIterator>::IntoIter; + #[inline] + pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>> { + self.as_slice().iter().copied() + } - fn iter(self) -> Self::IntoIter { - self.inner().0.clone().into_iter() + #[inline] + pub fn len(self) -> usize { + self.as_slice().len() } - fn as_slice(&self) -> &[Self::Item] { - self.inner().0.as_slice() + #[inline] + pub fn is_empty(self) -> bool { + self.as_slice().is_empty() } } impl<'db> IntoIterator for Clauses<'db> { + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>; type Item = Clause<'db>; - type IntoIter = ::IntoIter; - + #[inline] fn into_iter(self) -> Self::IntoIter { - rustc_type_ir::inherent::SliceLike::iter(self) + self.iter() + } +} + +impl<'db> std::ops::Deref for Clauses<'db> { + type Target = [Clause<'db>]; + + #[inline] + fn deref(&self) -> &Self::Target { + (*self).as_slice() + } +} + +impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> { + type Item = Clause<'db>; + + type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>; + + #[inline] + fn iter(self) -> Self::IntoIter { + self.iter() + } + + #[inline] + fn as_slice(&self) -> &[Self::Item] { + (*self).as_slice() } } impl<'db> Default for Clauses<'db> { + #[inline] fn default() -> Self { - Clauses::new_from_iter(DbInterner::conjure(), []) + Clauses::empty(DbInterner::conjure()) } } +impl<'db> rustc_type_ir::inherent::Clauses> for Clauses<'db> {} + impl<'db> rustc_type_ir::TypeSuperFoldable> for Clauses<'db> { fn try_super_fold_with>>( self, folder: &mut F, ) -> Result { - let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len()); - for c in self { - clauses.push(c.try_fold_with(folder)?); - } - Ok(Clauses::new_from_iter(folder.cx(), clauses)) + Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.try_fold_with(folder))) } fn super_fold_with>>( self, folder: &mut F, ) -> Self { - let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len()); - for c in self { - clauses.push(c.fold_with(folder)); - } - Clauses::new_from_iter(folder.cx(), clauses) + Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.fold_with(folder))) } } @@ -371,15 +385,10 @@ impl<'db> rustc_type_ir::TypeFoldable> for Clauses<'db> { self, folder: &mut F, ) -> Result { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = - self.iter().map(|v| v.try_fold_with(folder)).collect::>()?; - Ok(Clauses::new_from_iter(folder.cx(), inner)) + self.try_super_fold_with(folder) } fn fold_with>>(self, folder: &mut F) -> Self { - use rustc_type_ir::inherent::SliceLike as _; - let inner: smallvec::SmallVec<[_; 2]> = self.iter().map(|v| v.fold_with(folder)).collect(); - Clauses::new_from_iter(folder.cx(), inner) + self.super_fold_with(folder) } } @@ -389,19 +398,28 @@ impl<'db> rustc_type_ir::TypeVisitable> for Clauses<'db> { visitor: &mut V, ) -> V::Result { use rustc_ast_ir::visit::VisitorResult; - use rustc_type_ir::inherent::SliceLike as _; - rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter()); + rustc_ast_ir::walk_visitable_list!(visitor, self.iter()); V::Result::output() } } +impl<'db, V: super::WorldExposer> rustc_type_ir::GenericTypeVisitable for Clauses<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned_slice(self.interned).is_continue() { + self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor)); + } + } +} + impl<'db> rustc_type_ir::Flags for Clauses<'db> { + #[inline] fn flags(&self) -> rustc_type_ir::TypeFlags { - self.inner().1 + self.interned.header.header.0.flags } + #[inline] fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { - self.inner().2 + self.interned.header.header.0.outer_exclusive_binder } } @@ -414,18 +432,20 @@ impl<'db> rustc_type_ir::TypeSuperVisitable> for Clauses<'db> { } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] // TODO implement Debug by hand +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, GenericTypeVisitable)] // TODO implement Debug by hand pub struct Clause<'db>(pub(crate) Predicate<'db>); // We could cram the reveal into the clauses like rustc does, probably -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)] +#[derive( + Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable, +)] pub struct ParamEnv<'db> { pub(crate) clauses: Clauses<'db>, } impl<'db> ParamEnv<'db> { pub fn empty() -> Self { - ParamEnv { clauses: Clauses::new_from_iter(DbInterner::conjure(), []) } + ParamEnv { clauses: Clauses::empty(DbInterner::conjure()) } } pub fn clauses(self) -> Clauses<'db> { @@ -460,6 +480,14 @@ impl<'db> TypeVisitable> for Predicate<'db> { } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Predicate<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } + } +} + impl<'db> TypeSuperVisitable> for Predicate<'db> { fn super_visit_with>>( &self, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs index 19f3c38b673d9..dc2441f76e3ae 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs @@ -1,47 +1,53 @@ //! Things related to regions. use hir_def::LifetimeParamId; -use intern::Symbol; +use intern::{Interned, InternedRef, Symbol, impl_internable}; +use macros::GenericTypeVisitable; use rustc_type_ir::{ - BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags, - TypeFoldable, TypeVisitable, + BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, GenericTypeVisitable, INNERMOST, RegionVid, + TypeFlags, TypeFoldable, TypeVisitable, inherent::{IntoKind, PlaceholderLike, SliceLike}, relate::Relate, }; -use crate::next_solver::{GenericArg, OutlivesPredicate}; +use crate::next_solver::{ + GenericArg, OutlivesPredicate, impl_foldable_for_interned_slice, impl_stored_interned, + interned_slice, +}; use super::{ - ErrorGuaranteed, SolverDefId, interned_vec_db, + SolverDefId, interner::{BoundVarKind, DbInterner, Placeholder}, }; pub type RegionKind<'db> = rustc_type_ir::RegionKind>; -#[salsa::interned(constructor = new_)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Region<'db> { - #[returns(ref)] - kind_: RegionKind<'db>, + pub(super) interned: InternedRef<'db, RegionInterned>, } -impl std::fmt::Debug for Region<'_> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.kind().fmt(f) - } -} +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +#[repr(align(4))] // Required for `GenericArg` bit-tagging. +pub(super) struct RegionInterned(RegionKind<'static>); + +impl_internable!(gc; RegionInterned); +impl_stored_interned!(RegionInterned, Region, StoredRegion); + +const _: () = { + const fn is_copy() {} + is_copy::>(); +}; impl<'db> Region<'db> { - pub fn new(interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self { - Region::new_(interner.db(), kind) + pub fn new(_interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, RegionKind<'static>>(kind) }; + Self { interned: Interned::new_gc(RegionInterned(kind)) } } pub fn inner(&self) -> &RegionKind<'db> { - crate::with_attached_db(|db| { - let inner = self.kind_(db); - // SAFETY: The caller already has access to a `Region<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute::<&RegionKind<'_>, &RegionKind<'db>>(inner) } - }) + let inner = &self.interned.0; + unsafe { std::mem::transmute::<&RegionKind<'static>, &RegionKind<'db>>(inner) } } pub fn new_early_param( @@ -60,7 +66,7 @@ impl<'db> Region<'db> { } pub fn new_erased(interner: DbInterner<'db>) -> Region<'db> { - Region::new(interner, RegionKind::ReErased) + interner.default_types().regions.erased } pub fn new_bound( @@ -92,7 +98,7 @@ impl<'db> Region<'db> { } pub fn error(interner: DbInterner<'db>) -> Self { - Region::new(interner, RegionKind::ReError(ErrorGuaranteed)) + interner.default_types().regions.error } pub fn type_flags(&self) -> TypeFlags { @@ -256,6 +262,12 @@ impl BoundRegionKind { } } +impl std::fmt::Debug for Region<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.kind().fmt(f) + } +} + impl<'db> IntoKind for Region<'db> { type Kind = RegionKind<'db>; @@ -342,7 +354,7 @@ impl<'db> rustc_type_ir::inherent::Region> for Region<'db> { } fn new_static(interner: DbInterner<'db>) -> Self { - Region::new(interner, RegionKind::ReStatic) + interner.default_types().regions.statik } fn new_placeholder( @@ -377,6 +389,22 @@ impl<'db> PlaceholderLike> for PlaceholderRegion { } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Region<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } + } +} + type GenericArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>; -interned_vec_db!(RegionAssumptions, GenericArgOutlivesPredicate); +interned_slice!( + RegionAssumptionsStorage, + RegionAssumptions, + StoredRegionAssumptions, + region_assumptions, + GenericArgOutlivesPredicate<'db>, + GenericArgOutlivesPredicate<'static>, +); +impl_foldable_for_interned_slice!(RegionAssumptions); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs index 40a3f17cf169e..d800925ba4e9a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs @@ -5,7 +5,7 @@ use rustc_next_trait_solver::delegate::SolverDelegate; use rustc_type_ir::{ AliasTyKind, GenericArgKind, InferCtxtLike, Interner, PredicatePolarity, TypeFlags, TypeVisitableExt, - inherent::{IntoKind, SliceLike, Term as _, Ty as _}, + inherent::{IntoKind, Term as _, Ty as _}, lang_items::SolverTraitLangItem, solve::{Certainty, NoSolution}, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs index ff89f8e059a5a..c89831bd40779 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs @@ -7,13 +7,15 @@ use hir_def::{ hir::generics::{TypeOrConstParamData, TypeParamProvenance}, }; use hir_def::{TraitId, type_ref::Rawness}; +use intern::{Interned, InternedRef, impl_internable}; +use macros::GenericTypeVisitable; use rustc_abi::{Float, Integer, Size}; use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult}; use rustc_type_ir::{ AliasTyKind, BoundVar, BoundVarIndexKind, ClosureKind, CoroutineArgs, CoroutineArgsParts, - DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy, IntVid, Interner, - TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, - TypeVisitor, UintTy, Upcast, WithCachedTypeInfo, + DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, GenericTypeVisitable, InferTy, IntTy, + IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo, inherent::{ AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _, IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _, @@ -28,15 +30,15 @@ use crate::{ lower::GenericPredicates, next_solver::{ AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const, - CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper, + CoroutineIdWrapper, FnSig, GenericArgKind, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper, abi::Safety, - interner::InternedWrapperNoDebug, + impl_foldable_for_interned_slice, impl_stored_interned, interned_slice, util::{CoroutineArgsExt, IntegerTypeExt}, }, }; use super::{ - BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId, interned_vec_db, + BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId, util::{FloatExt, IntegerExt}, }; @@ -44,35 +46,45 @@ pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType = rustc_type_ir::TyKind>; pub type FnHeader<'db> = rustc_type_ir::FnHeader>; -#[salsa::interned(constructor = new_)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Ty<'db> { - #[returns(ref)] - kind_: InternedWrapperNoDebug>>, + pub(super) interned: InternedRef<'db, TyInterned>, } +#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)] +#[repr(align(4))] // Required for `GenericArg` bit-tagging. +pub(super) struct TyInterned(WithCachedTypeInfo>); + +impl_internable!(gc; TyInterned); +impl_stored_interned!(TyInterned, Ty, StoredTy); + const _: () = { const fn is_copy() {} is_copy::>(); }; impl<'db> Ty<'db> { - pub fn new(interner: DbInterner<'db>, kind: TyKind<'db>) -> Self { + #[inline] + pub fn new(_interner: DbInterner<'db>, kind: TyKind<'db>) -> Self { + let kind = unsafe { std::mem::transmute::, TyKind<'static>>(kind) }; let flags = FlagComputation::for_kind(&kind); let cached = WithCachedTypeInfo { internee: kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; - Ty::new_(interner.db(), InternedWrapperNoDebug(cached)) + Self { interned: Interned::new_gc(TyInterned(cached)) } } + #[inline] pub fn inner(&self) -> &WithCachedTypeInfo> { - crate::with_attached_db(|db| { - let inner = &self.kind_(db).0; - // SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will - // make sure that our returned value is valid for the lifetime `'db`. - unsafe { std::mem::transmute(inner) } - }) + let inner = &self.interned.0; + unsafe { + std::mem::transmute::< + &WithCachedTypeInfo>, + &WithCachedTypeInfo>, + >(inner) + } } pub fn new_adt(interner: DbInterner<'db>, adt_id: AdtId, args: GenericArgs<'db>) -> Self { @@ -99,16 +111,39 @@ impl<'db> Ty<'db> { Ty::new_infer(interner, InferTy::FloatVar(v)) } + #[inline] pub fn new_int(interner: DbInterner<'db>, i: IntTy) -> Self { - Ty::new(interner, TyKind::Int(i)) + let types = interner.default_types(); + match i { + IntTy::Isize => types.types.isize, + IntTy::I8 => types.types.i8, + IntTy::I16 => types.types.i16, + IntTy::I32 => types.types.i32, + IntTy::I64 => types.types.i64, + IntTy::I128 => types.types.i128, + } } pub fn new_uint(interner: DbInterner<'db>, ui: UintTy) -> Self { - Ty::new(interner, TyKind::Uint(ui)) + let types = interner.default_types(); + match ui { + UintTy::Usize => types.types.usize, + UintTy::U8 => types.types.u8, + UintTy::U16 => types.types.u16, + UintTy::U32 => types.types.u32, + UintTy::U64 => types.types.u64, + UintTy::U128 => types.types.u128, + } } pub fn new_float(interner: DbInterner<'db>, f: FloatTy) -> Self { - Ty::new(interner, TyKind::Float(f)) + let types = interner.default_types(); + match f { + FloatTy::F16 => types.types.f16, + FloatTy::F32 => types.types.f32, + FloatTy::F64 => types.types.f64, + FloatTy::F128 => types.types.f128, + } } pub fn new_fresh(interner: DbInterner<'db>, n: u32) -> Self { @@ -124,7 +159,7 @@ impl<'db> Ty<'db> { } pub fn new_empty_tuple(interner: DbInterner<'db>) -> Self { - Ty::new_tup(interner, &[]) + interner.default_types().types.unit } pub fn new_imm_ptr(interner: DbInterner<'db>, ty: Ty<'db>) -> Self { @@ -383,7 +418,7 @@ impl<'db> Ty<'db> { #[inline] pub fn is_unit(self) -> bool { - matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty()) + matches!(self.kind(), TyKind::Tuple(tys) if tys.is_empty()) } #[inline] @@ -555,34 +590,34 @@ impl<'db> Ty<'db> { interner: DbInterner<'db>, ty: hir_def::builtin_type::BuiltinType, ) -> Ty<'db> { - let kind = match ty { - hir_def::builtin_type::BuiltinType::Char => TyKind::Char, - hir_def::builtin_type::BuiltinType::Bool => TyKind::Bool, - hir_def::builtin_type::BuiltinType::Str => TyKind::Str, - hir_def::builtin_type::BuiltinType::Int(int) => TyKind::Int(match int { - hir_def::builtin_type::BuiltinInt::Isize => rustc_type_ir::IntTy::Isize, - hir_def::builtin_type::BuiltinInt::I8 => rustc_type_ir::IntTy::I8, - hir_def::builtin_type::BuiltinInt::I16 => rustc_type_ir::IntTy::I16, - hir_def::builtin_type::BuiltinInt::I32 => rustc_type_ir::IntTy::I32, - hir_def::builtin_type::BuiltinInt::I64 => rustc_type_ir::IntTy::I64, - hir_def::builtin_type::BuiltinInt::I128 => rustc_type_ir::IntTy::I128, - }), - hir_def::builtin_type::BuiltinType::Uint(uint) => TyKind::Uint(match uint { - hir_def::builtin_type::BuiltinUint::Usize => rustc_type_ir::UintTy::Usize, - hir_def::builtin_type::BuiltinUint::U8 => rustc_type_ir::UintTy::U8, - hir_def::builtin_type::BuiltinUint::U16 => rustc_type_ir::UintTy::U16, - hir_def::builtin_type::BuiltinUint::U32 => rustc_type_ir::UintTy::U32, - hir_def::builtin_type::BuiltinUint::U64 => rustc_type_ir::UintTy::U64, - hir_def::builtin_type::BuiltinUint::U128 => rustc_type_ir::UintTy::U128, - }), - hir_def::builtin_type::BuiltinType::Float(float) => TyKind::Float(match float { - hir_def::builtin_type::BuiltinFloat::F16 => rustc_type_ir::FloatTy::F16, - hir_def::builtin_type::BuiltinFloat::F32 => rustc_type_ir::FloatTy::F32, - hir_def::builtin_type::BuiltinFloat::F64 => rustc_type_ir::FloatTy::F64, - hir_def::builtin_type::BuiltinFloat::F128 => rustc_type_ir::FloatTy::F128, - }), - }; - Ty::new(interner, kind) + let types = interner.default_types(); + match ty { + hir_def::builtin_type::BuiltinType::Char => types.types.char, + hir_def::builtin_type::BuiltinType::Bool => types.types.bool, + hir_def::builtin_type::BuiltinType::Str => types.types.str, + hir_def::builtin_type::BuiltinType::Int(int) => match int { + hir_def::builtin_type::BuiltinInt::Isize => types.types.isize, + hir_def::builtin_type::BuiltinInt::I8 => types.types.i8, + hir_def::builtin_type::BuiltinInt::I16 => types.types.i16, + hir_def::builtin_type::BuiltinInt::I32 => types.types.i32, + hir_def::builtin_type::BuiltinInt::I64 => types.types.i64, + hir_def::builtin_type::BuiltinInt::I128 => types.types.i128, + }, + hir_def::builtin_type::BuiltinType::Uint(uint) => match uint { + hir_def::builtin_type::BuiltinUint::Usize => types.types.usize, + hir_def::builtin_type::BuiltinUint::U8 => types.types.u8, + hir_def::builtin_type::BuiltinUint::U16 => types.types.u16, + hir_def::builtin_type::BuiltinUint::U32 => types.types.u32, + hir_def::builtin_type::BuiltinUint::U64 => types.types.u64, + hir_def::builtin_type::BuiltinUint::U128 => types.types.u128, + }, + hir_def::builtin_type::BuiltinType::Float(float) => match float { + hir_def::builtin_type::BuiltinFloat::F16 => types.types.f16, + hir_def::builtin_type::BuiltinFloat::F32 => types.types.f32, + hir_def::builtin_type::BuiltinFloat::F64 => types.types.f64, + hir_def::builtin_type::BuiltinFloat::F128 => types.types.f128, + }, + } } pub fn as_builtin(self) -> Option { @@ -661,10 +696,10 @@ impl<'db> Ty<'db> { // This is only used by type walking. // Parameters will be walked outside, and projection predicate is not used. // So just provide the Future trait. - let impl_bound = TraitRef::new( + let impl_bound = TraitRef::new_from_args( interner, future_trait.into(), - GenericArgs::new_from_iter(interner, []), + GenericArgs::empty(interner), ) .upcast(interner); Some(vec![impl_bound]) @@ -730,20 +765,23 @@ impl<'db> std::fmt::Debug for Ty<'db> { } } -impl<'db> std::fmt::Debug for InternedWrapperNoDebug>> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.internee.fmt(f) - } -} - impl<'db> IntoKind for Ty<'db> { type Kind = TyKind<'db>; + #[inline] fn kind(self) -> Self::Kind { self.inner().internee } } +impl<'db, V: super::WorldExposer> GenericTypeVisitable for Ty<'db> { + fn generic_visit_with(&self, visitor: &mut V) { + if visitor.on_interned(self.interned).is_continue() { + self.kind().generic_visit_with(visitor); + } + } +} + impl<'db> TypeVisitable> for Ty<'db> { fn visit_with>>( &self, @@ -942,19 +980,19 @@ impl<'db> Flags for Ty<'db> { impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { fn new_unit(interner: DbInterner<'db>) -> Self { - Ty::new(interner, TyKind::Tuple(Default::default())) + interner.default_types().types.unit } fn new_bool(interner: DbInterner<'db>) -> Self { - Ty::new(interner, TyKind::Bool) + interner.default_types().types.bool } fn new_u8(interner: DbInterner<'db>) -> Self { - Ty::new(interner, TyKind::Uint(rustc_type_ir::UintTy::U8)) + interner.default_types().types.u8 } fn new_usize(interner: DbInterner<'db>) -> Self { - Ty::new(interner, TyKind::Uint(rustc_type_ir::UintTy::Usize)) + interner.default_types().types.usize } fn new_infer(interner: DbInterner<'db>, var: rustc_type_ir::InferTy) -> Self { @@ -1068,9 +1106,9 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { // to unnecessary overflows in async code. See the issue: // . let coroutine_args = interner.mk_args_from_iter(coroutine_args.iter().map(|arg| { - match arg { - GenericArg::Ty(_) | GenericArg::Const(_) => arg, - GenericArg::Lifetime(_) => { + match arg.kind() { + GenericArgKind::Type(_) | GenericArgKind::Const(_) => arg, + GenericArgKind::Lifetime(_) => { crate::next_solver::Region::new(interner, rustc_type_ir::RegionKind::ReErased) .into() } @@ -1105,7 +1143,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { } fn new_tup(interner: DbInterner<'db>, tys: &[ as Interner>::Ty]) -> Self { - Ty::new(interner, TyKind::Tuple(Tys::new_from_iter(interner, tys.iter().cloned()))) + Ty::new(interner, TyKind::Tuple(Tys::new_from_slice(tys))) } fn new_tup_from_iter(interner: DbInterner<'db>, iter: It) -> T::Output @@ -1177,10 +1215,11 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { } fn from_closure_kind(interner: DbInterner<'db>, kind: rustc_type_ir::ClosureKind) -> Self { + let types = interner.default_types(); match kind { - ClosureKind::Fn => Ty::new(interner, TyKind::Int(IntTy::I8)), - ClosureKind::FnMut => Ty::new(interner, TyKind::Int(IntTy::I16)), - ClosureKind::FnOnce => Ty::new(interner, TyKind::Int(IntTy::I32)), + ClosureKind::Fn => types.types.i8, + ClosureKind::FnMut => types.types.i16, + ClosureKind::FnOnce => types.types.i32, } } @@ -1188,9 +1227,10 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { interner: DbInterner<'db>, kind: rustc_type_ir::ClosureKind, ) -> Self { + let types = interner.default_types(); match kind { - ClosureKind::Fn | ClosureKind::FnMut => Ty::new(interner, TyKind::Int(IntTy::I16)), - ClosureKind::FnOnce => Ty::new(interner, TyKind::Int(IntTy::I32)), + ClosureKind::Fn | ClosureKind::FnMut => types.types.i16, + ClosureKind::FnOnce => types.types.i32, } } @@ -1237,7 +1277,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { | TyKind::Tuple(_) | TyKind::Error(_) | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => { - Ty::new(interner, TyKind::Uint(UintTy::U8)) + interner.default_types().types.u8 } TyKind::Bound(..) @@ -1254,20 +1294,19 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> { } } -interned_vec_db!(Tys, Ty); +interned_slice!(TysStorage, Tys, StoredTys, tys, Ty<'db>, Ty<'static>); +impl_foldable_for_interned_slice!(Tys); impl<'db> Tys<'db> { - pub fn inputs(&self) -> &[Ty<'db>] { + #[inline] + pub fn inputs(self) -> &'db [Ty<'db>] { self.as_slice().split_last().unwrap().1 } } impl<'db> rustc_type_ir::inherent::Tys> for Tys<'db> { fn inputs(self) -> as Interner>::FnInputTys { - Tys::new_from_iter( - DbInterner::conjure(), - self.as_slice().split_last().unwrap().1.iter().copied(), - ) + self.as_slice().split_last().unwrap().1 } fn output(self) -> as Interner>::Ty { @@ -1323,6 +1362,10 @@ pub enum BoundTyKind { #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub struct ErrorGuaranteed; +impl GenericTypeVisitable for ErrorGuaranteed { + fn generic_visit_with(&self, _visitor: &mut V) {} +} + impl<'db> TypeVisitable> for ErrorGuaranteed { fn visit_with>>( &self, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs index bc4b5fdbfc520..34ecfed08f29d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs @@ -77,9 +77,10 @@ pub trait IntegerTypeExt { impl IntegerTypeExt for IntegerType { fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> { + let types = interner.default_types(); match self { - IntegerType::Pointer(true) => Ty::new(interner, TyKind::Int(IntTy::Isize)), - IntegerType::Pointer(false) => Ty::new(interner, TyKind::Uint(UintTy::Usize)), + IntegerType::Pointer(true) => types.types.isize, + IntegerType::Pointer(false) => types.types.usize, IntegerType::Fixed(i, s) => i.to_ty(interner, *s), } } @@ -120,17 +121,18 @@ impl IntegerExt for Integer { #[inline] fn to_ty<'db>(&self, interner: DbInterner<'db>, signed: bool) -> Ty<'db> { use Integer::*; + let types = interner.default_types(); match (*self, signed) { - (I8, false) => Ty::new(interner, TyKind::Uint(UintTy::U8)), - (I16, false) => Ty::new(interner, TyKind::Uint(UintTy::U16)), - (I32, false) => Ty::new(interner, TyKind::Uint(UintTy::U32)), - (I64, false) => Ty::new(interner, TyKind::Uint(UintTy::U64)), - (I128, false) => Ty::new(interner, TyKind::Uint(UintTy::U128)), - (I8, true) => Ty::new(interner, TyKind::Int(IntTy::I8)), - (I16, true) => Ty::new(interner, TyKind::Int(IntTy::I16)), - (I32, true) => Ty::new(interner, TyKind::Int(IntTy::I32)), - (I64, true) => Ty::new(interner, TyKind::Int(IntTy::I64)), - (I128, true) => Ty::new(interner, TyKind::Int(IntTy::I128)), + (I8, false) => types.types.u8, + (I16, false) => types.types.u16, + (I32, false) => types.types.u32, + (I64, false) => types.types.u64, + (I128, false) => types.types.u128, + (I8, true) => types.types.i8, + (I16, true) => types.types.i16, + (I32, true) => types.types.i32, + (I64, true) => types.types.i64, + (I128, true) => types.types.i128, } } @@ -214,11 +216,12 @@ impl FloatExt for Float { #[inline] fn to_ty<'db>(&self, interner: DbInterner<'db>) -> Ty<'db> { use Float::*; + let types = interner.default_types(); match *self { - F16 => Ty::new(interner, TyKind::Float(FloatTy::F16)), - F32 => Ty::new(interner, TyKind::Float(FloatTy::F32)), - F64 => Ty::new(interner, TyKind::Float(FloatTy::F64)), - F128 => Ty::new(interner, TyKind::Float(FloatTy::F128)), + F16 => types.types.f16, + F32 => types.types.f32, + F64 => types.types.f64, + F128 => types.types.f128, } } @@ -244,13 +247,7 @@ impl PrimitiveExt for Primitive { match *self { Primitive::Int(i, signed) => i.to_ty(interner, signed), Primitive::Float(f) => f.to_ty(interner), - Primitive::Pointer(_) => Ty::new( - interner, - TyKind::RawPtr( - Ty::new(interner, TyKind::Tuple(Default::default())), - rustc_ast_ir::Mutability::Mut, - ), - ), + Primitive::Pointer(_) => interner.default_types().types.mut_unit_ptr, } } @@ -283,7 +280,7 @@ impl<'db> CoroutineArgsExt<'db> for CoroutineArgs> { /// The type of the state discriminant used in the coroutine type. #[inline] fn discr_ty(&self, interner: DbInterner<'db>) -> Ty<'db> { - Ty::new(interner, TyKind::Uint(UintTy::U32)) + interner.default_types().types.u32 } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs b/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs index 4c6b585016fad..27ae5e39d55bf 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/opaques.rs @@ -13,7 +13,7 @@ use crate::{ db::{HirDatabase, InternedOpaqueTyId}, lower::{ImplTraitIdx, ImplTraits}, next_solver::{ - DbInterner, EarlyBinder, ErrorGuaranteed, SolverDefId, Ty, TypingMode, + DbInterner, ErrorGuaranteed, SolverDefId, StoredEarlyBinder, StoredTy, Ty, TypingMode, infer::{DbInternerInferExt, traits::ObligationCause}, obligation_ctxt::ObligationCtxt, }, @@ -72,10 +72,10 @@ pub(crate) fn opaque_types_defined_by( // FIXME: Collect opaques from `#[define_opaque]`. - fn extend_with_opaques<'db>( - db: &'db dyn HirDatabase, - opaques: &Option>>>, - mut make_impl_trait: impl FnMut(ImplTraitIdx<'db>) -> ImplTraitId<'db>, + fn extend_with_opaques( + db: &dyn HirDatabase, + opaques: &Option>>, + mut make_impl_trait: impl FnMut(ImplTraitIdx) -> ImplTraitId, result: &mut Vec, ) { if let Some(opaques) = opaques { @@ -89,25 +89,25 @@ pub(crate) fn opaque_types_defined_by( // These are firewall queries to prevent drawing dependencies between infers: -#[salsa::tracked(returns(ref), unsafe(non_update_return_type))] +#[salsa::tracked(returns(ref))] pub(crate) fn rpit_hidden_types<'db>( db: &'db dyn HirDatabase, function: FunctionId, -) -> ArenaMap, EarlyBinder<'db, Ty<'db>>> { +) -> ArenaMap> { let infer = InferenceResult::for_body(db, function.into()); let mut result = ArenaMap::new(); for (opaque, hidden_type) in infer.return_position_impl_trait_types(db) { - result.insert(opaque, EarlyBinder::bind(hidden_type)); + result.insert(opaque, StoredEarlyBinder::bind(hidden_type.store())); } result.shrink_to_fit(); result } -#[salsa::tracked(returns(ref), unsafe(non_update_return_type))] +#[salsa::tracked(returns(ref))] pub(crate) fn tait_hidden_types<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, -) -> ArenaMap, EarlyBinder<'db, Ty<'db>>> { +) -> ArenaMap> { // Call this first, to not perform redundant work if there are no TAITs. let Some(taits_count) = ImplTraits::type_alias_impl_traits(db, type_alias) .as_deref() @@ -129,7 +129,7 @@ pub(crate) fn tait_hidden_types<'db>( let mut result = ArenaMap::with_capacity(taits_count); for defining_body in defining_bodies { let infer = InferenceResult::for_body(db, defining_body); - for (&opaque, &hidden_type) in &infer.type_of_opaque { + for (&opaque, hidden_type) in &infer.type_of_opaque { let ImplTraitId::TypeAliasImplTrait(opaque_owner, opaque_idx) = opaque.loc(db) else { continue; }; @@ -138,13 +138,18 @@ pub(crate) fn tait_hidden_types<'db>( } // In the presence of errors, we attempt to create a unified type from all // types. rustc doesn't do that, but this should improve the experience. - let hidden_type = infcx.insert_type_vars(hidden_type); + let hidden_type = infcx.insert_type_vars(hidden_type.as_ref()); match result.entry(opaque_idx) { la_arena::Entry::Vacant(entry) => { - entry.insert(EarlyBinder::bind(hidden_type)); + entry.insert(StoredEarlyBinder::bind(hidden_type.store())); } la_arena::Entry::Occupied(entry) => { - _ = ocx.eq(&cause, param_env, entry.get().instantiate_identity(), hidden_type); + _ = ocx.eq( + &cause, + param_env, + entry.get().get().instantiate_identity(), + hidden_type, + ); } } } @@ -157,12 +162,15 @@ pub(crate) fn tait_hidden_types<'db>( let idx = la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx as u32)); match result.entry(idx) { la_arena::Entry::Vacant(entry) => { - entry.insert(EarlyBinder::bind(Ty::new_error(interner, ErrorGuaranteed))); + entry.insert(StoredEarlyBinder::bind( + Ty::new_error(interner, ErrorGuaranteed).store(), + )); } la_arena::Entry::Occupied(mut entry) => { - *entry.get_mut() = entry.get().map_bound(|hidden_type| { - infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner) - }); + let hidden_type = entry.get().get().skip_binder(); + let hidden_type = + infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner); + *entry.get_mut() = StoredEarlyBinder::bind(hidden_type.store()); } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs index 9ae9a8e2a993a..d97a35549ca4d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs @@ -2,7 +2,6 @@ use hir_def::{HasModule, ImplId, nameres::crate_def_map}; use intern::sym; -use rustc_type_ir::inherent::SliceLike; use tracing::debug; use crate::{ @@ -22,6 +21,7 @@ use crate::{ // cannot create a cycle, but a cycle handler is required nevertheless. fn specializes_query_cycle( _db: &dyn HirDatabase, + _: salsa::Id, _specializing_impl_def_id: ImplId, _parent_impl_def_id: ImplId, ) -> bool { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index a31353f1e3367..67ab89f5ec8fe 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -149,9 +149,10 @@ fn check_impl( let (body, body_source_map) = db.body_with_source_map(def); let inference_result = InferenceResult::for_body(&db, def); - for (pat, mut ty) in inference_result.type_of_pat.iter() { + for (pat, ty) in inference_result.type_of_pat.iter() { + let mut ty = ty.as_ref(); if let Pat::Bind { id, .. } = body[pat] { - ty = &inference_result.type_of_binding[id]; + ty = inference_result.type_of_binding[id].as_ref(); } let node = match pat_node(&body_source_map, pat, &db) { Some(value) => value, @@ -169,6 +170,7 @@ fn check_impl( } for (expr, ty) in inference_result.type_of_expr.iter() { + let ty = ty.as_ref(); let node = match expr_node(&body_source_map, expr, &db) { Some(value) => value, None => continue, @@ -209,8 +211,8 @@ fn check_impl( let range = node.as_ref().original_file_range_rooted(&db); let actual = format!( "expected {}, got {}", - mismatch.expected.display_test(&db, display_target), - mismatch.actual.display_test(&db, display_target) + mismatch.expected.as_ref().display_test(&db, display_target), + mismatch.actual.as_ref().display_test(&db, display_target) ); match mismatches.remove(&range) { Some(annotation) => assert_eq!(actual, annotation), @@ -318,20 +320,20 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { crate::attach_db(&db, || { let mut buf = String::new(); - let mut infer_def = |inference_result: &InferenceResult<'_>, + let mut infer_def = |inference_result: &InferenceResult, body: Arc, body_source_map: Arc, krate: Crate| { let display_target = DisplayTarget::from_crate(&db, krate); - let mut types: Vec<(InFile, &Ty<'_>)> = Vec::new(); - let mut mismatches: Vec<(InFile, &TypeMismatch<'_>)> = Vec::new(); + let mut types: Vec<(InFile, Ty<'_>)> = Vec::new(); + let mut mismatches: Vec<(InFile, &TypeMismatch)> = Vec::new(); if let Some(self_param) = body.self_param { let ty = &inference_result.type_of_binding[self_param]; if let Some(syntax_ptr) = body_source_map.self_param_syntax() { let root = db.parse_or_expand(syntax_ptr.file_id); let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone()); - types.push((node, ty)); + types.push((node, ty.as_ref())); } } @@ -346,7 +348,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { } Err(SyntheticSyntax) => continue, }; - types.push((node.clone(), ty)); + types.push((node.clone(), ty.as_ref())); if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) { mismatches.push((node, mismatch)); } @@ -360,7 +362,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { } Err(SyntheticSyntax) => continue, }; - types.push((node.clone(), ty)); + types.push((node.clone(), ty.as_ref())); if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) { mismatches.push((node, mismatch)); } @@ -401,8 +403,8 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { "{}{:?}: expected {}, got {}\n", macro_prefix, range, - mismatch.expected.display_test(&db, display_target), - mismatch.actual.display_test(&db, display_target), + mismatch.expected.as_ref().display_test(&db, display_target), + mismatch.actual.as_ref().display_test(&db, display_target), ); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs index ff55ff54cefbc..3bdc72d015006 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs @@ -74,6 +74,7 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec let place = capture.display_place(closure.0, db); let capture_ty = capture .ty + .get() .skip_binder() .display_test(db, DisplayTarget::from_crate(db, module.krate(db))) .to_string(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index 7b0c1d35442ec..6558d2179fba6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -28,7 +28,7 @@ fn foo() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 1)], + &[("InferenceResult::for_body_", 1)], expect_test::expect![[r#" [ "crate_local_def_map", @@ -36,17 +36,17 @@ fn foo() -> i32 { "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "AttrFlags::query_", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", + "trait_environment_query", "lang_items", "crate_lang_items", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", ] "#]], @@ -72,7 +72,7 @@ fn foo() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 0)], + &[("InferenceResult::for_body_", 0)], expect_test::expect![[r#" [ "parse_shim", @@ -115,7 +115,7 @@ fn baz() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 3)], + &[("InferenceResult::for_body_", 3)], expect_test::expect![[r#" [ "crate_local_def_map", @@ -123,37 +123,37 @@ fn baz() -> i32 { "ast_id_map_shim", "parse_shim", "real_span_map_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "AttrFlags::query_", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", + "trait_environment_query", "lang_items", "crate_lang_items", "AttrFlags::query_", "AttrFlags::query_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "trait_environment_query", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "trait_environment_query", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", ] "#]], @@ -184,7 +184,7 @@ fn baz() -> i32 { } }); }, - &[("InferenceResult < 'db >::for_body_", 1)], + &[("InferenceResult::for_body_", 1)], expect_test::expect![[r#" [ "parse_shim", @@ -202,7 +202,7 @@ fn baz() -> i32 { "function_signature_shim", "body_with_source_map_shim", "body_shim", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "expr_scopes_shim", "function_signature_with_source_map_shim", "function_signature_shim", @@ -502,7 +502,7 @@ impl SomeStruct { "crate_local_def_map", "TraitImpls::for_crate_", "AttrFlags::query_", - "impl_trait_with_diagnostics_shim", + "impl_trait_with_diagnostics_query", "impl_signature_shim", "impl_signature_with_source_map_shim", "lang_items", @@ -512,7 +512,7 @@ impl SomeStruct { "AttrFlags::query_", "AttrFlags::query_", "AttrFlags::query_", - "impl_self_ty_with_diagnostics_shim", + "impl_self_ty_with_diagnostics_query", "struct_signature_shim", "struct_signature_with_source_map_shim", ] @@ -574,7 +574,7 @@ fn main() { "body_with_source_map_shim", "AttrFlags::query_", "ImplItems::of_", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "trait_signature_shim", "trait_signature_with_source_map_shim", "AttrFlags::query_", @@ -583,36 +583,36 @@ fn main() { "AttrFlags::query_", "body_shim", "body_with_source_map_shim", - "trait_environment_shim", + "trait_environment_query", "lang_items", "crate_lang_items", "AttrFlags::query_", "AttrFlags::query_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", - "InferenceResult < 'db >::for_body_", + "GenericPredicates::query_with_diagnostics_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", + "InferenceResult::for_body_", "function_signature_shim", "function_signature_with_source_map_shim", - "trait_environment_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "trait_environment_query", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", "struct_signature_shim", "struct_signature_with_source_map_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "value_ty_shim", + "GenericPredicates::query_with_diagnostics_", + "value_ty_query", "VariantFields::firewall_", "VariantFields::query_", "InherentImpls::for_crate_", "impl_signature_shim", "impl_signature_with_source_map_shim", - "callable_item_signature_shim", + "callable_item_signature_query", "TraitImpls::for_crate_and_deps_", "TraitImpls::for_crate_", - "impl_trait_with_diagnostics_shim", - "impl_self_ty_with_diagnostics_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", + "impl_trait_with_diagnostics_query", + "impl_self_ty_with_diagnostics_query", + "GenericPredicates::query_with_diagnostics_", ] "#]], ); @@ -671,7 +671,7 @@ fn main() { "AttrFlags::query_", "body_shim", "ImplItems::of_", - "InferenceResult < 'db >::for_body_", + "InferenceResult::for_body_", "AttrFlags::query_", "trait_signature_with_source_map_shim", "AttrFlags::query_", @@ -683,25 +683,25 @@ fn main() { "AttrFlags::query_", "AttrFlags::query_", "AttrFlags::query_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", - "InferenceResult < 'db >::for_body_", + "GenericPredicates::query_with_diagnostics_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", + "InferenceResult::for_body_", "function_signature_with_source_map_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", - "ImplTraits < 'db >::return_type_impl_traits_", + "GenericPredicates::query_with_diagnostics_", + "ImplTraits::return_type_impl_traits_", "expr_scopes_shim", "struct_signature_with_source_map_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", + "GenericPredicates::query_with_diagnostics_", "VariantFields::query_", "InherentImpls::for_crate_", "impl_signature_with_source_map_shim", "impl_signature_shim", - "callable_item_signature_shim", + "callable_item_signature_query", "TraitImpls::for_crate_", - "impl_trait_with_diagnostics_shim", - "impl_self_ty_with_diagnostics_shim", - "GenericPredicates < 'db >::query_with_diagnostics_", + "impl_trait_with_diagnostics_query", + "impl_self_ty_with_diagnostics_query", + "GenericPredicates::query_with_diagnostics_", ] "#]], ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 2f8c31ec60744..fb598fe5acb0d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -21,9 +21,12 @@ use rustc_type_ir::{ use crate::{ db::HirDatabase, next_solver::{ - Canonical, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, SolverContext, Span, Ty, - TyKind, - infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause}, + Canonical, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, SolverContext, Span, + StoredClauses, Ty, TyKind, + infer::{ + DbInternerInferExt, InferCtxt, + traits::{Obligation, ObligationCause}, + }, obligation_ctxt::ObligationCtxt, }, }; @@ -35,6 +38,31 @@ pub struct ParamEnvAndCrate<'db> { pub krate: Crate, } +impl<'db> ParamEnvAndCrate<'db> { + #[inline] + pub fn store(self) -> StoredParamEnvAndCrate { + StoredParamEnvAndCrate { param_env: self.param_env.clauses.store(), krate: self.krate } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct StoredParamEnvAndCrate { + param_env: StoredClauses, + pub krate: Crate, +} + +impl StoredParamEnvAndCrate { + #[inline] + pub fn param_env(&self) -> ParamEnv<'_> { + ParamEnv { clauses: self.param_env.as_ref() } + } + + #[inline] + pub fn as_ref(&self) -> ParamEnvAndCrate<'_> { + ParamEnvAndCrate { param_env: self.param_env(), krate: self.krate } + } +} + /// This should be used in `hir` only. pub fn structurally_normalize_ty<'db>( infcx: &InferCtxt<'db>, @@ -82,6 +110,16 @@ pub fn next_trait_solve_canonical_in_ctxt<'db>( let res = context.evaluate_root_goal(goal, Span::dummy(), None); + let obligation = Obligation { + cause: ObligationCause::dummy(), + param_env: goal.param_env, + recursion_depth: 0, + predicate: goal.predicate, + }; + infer_ctxt.inspect_evaluated_obligation(&obligation, &res, || { + Some(context.evaluate_root_goal_for_proof_tree(goal, Span::dummy()).1) + }); + let res = res.map(|r| (r.has_changed, r.certainty)); tracing::debug!("solve_nextsolver({:?}) => {:?}", goal, res); @@ -105,6 +143,16 @@ pub fn next_trait_solve_in_ctxt<'db, 'a>( let res = context.evaluate_root_goal(goal, Span::dummy(), None); + let obligation = Obligation { + cause: ObligationCause::dummy(), + param_env: goal.param_env, + recursion_depth: 0, + predicate: goal.predicate, + }; + infer_ctxt.inspect_evaluated_obligation(&obligation, &res, || { + Some(context.evaluate_root_goal_for_proof_tree(goal, Span::dummy()).1) + }); + let res = res.map(|r| (r.has_changed, r.certainty)); tracing::debug!("solve_nextsolver({:?}) => {:?}", goal, res); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs index 5c0af6dafb526..5b8122a0a5dfc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs @@ -17,7 +17,7 @@ use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId, signatures::Struct use rustc_ast_ir::Mutability; use rustc_type_ir::{ Variance, - inherent::{AdtDef, IntoKind, SliceLike}, + inherent::{AdtDef, IntoKind}, }; use stdx::never; @@ -25,12 +25,22 @@ use crate::{ db::HirDatabase, generics::{Generics, generics}, next_solver::{ - Const, ConstKind, DbInterner, ExistentialPredicate, GenericArg, GenericArgs, Region, - RegionKind, Term, Ty, TyKind, VariancesOf, + Const, ConstKind, DbInterner, ExistentialPredicate, GenericArgKind, GenericArgs, Region, + RegionKind, StoredVariancesOf, TermKind, Ty, TyKind, VariancesOf, }, }; pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> VariancesOf<'_> { + variances_of_query(db, def).as_ref() +} + +#[salsa::tracked( + returns(ref), + // cycle_fn = crate::variance::variances_of_cycle_fn, + // cycle_initial = crate::variance::variances_of_cycle_initial, + cycle_result = crate::variance::variances_of_cycle_initial, +)] +fn variances_of_query(db: &dyn HirDatabase, def: GenericDefId) -> StoredVariancesOf { tracing::debug!("variances_of(def={:?})", def); let interner = DbInterner::new_no_crate(db); match def { @@ -38,20 +48,21 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Variances GenericDefId::AdtId(adt) => { if let AdtId::StructId(id) = adt { let flags = &db.struct_signature(id).flags; + let types = || crate::next_solver::default_types(db); if flags.contains(StructFlags::IS_UNSAFE_CELL) { - return VariancesOf::new_from_iter(interner, [Variance::Invariant]); + return types().one_invariant.store(); } else if flags.contains(StructFlags::IS_PHANTOM_DATA) { - return VariancesOf::new_from_iter(interner, [Variance::Covariant]); + return types().one_covariant.store(); } } } - _ => return VariancesOf::new_from_iter(interner, []), + _ => return VariancesOf::empty(interner).store(), } let generics = generics(db, def); let count = generics.len(); if count == 0 { - return VariancesOf::new_from_iter(interner, []); + return VariancesOf::empty(interner).store(); } let mut variances = Context { generics, variances: vec![Variance::Bivariant; count], db }.solve(); @@ -71,7 +82,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Variances } } - VariancesOf::new_from_iter(interner, variances) + VariancesOf::new_from_slice(&variances).store() } // pub(crate) fn variances_of_cycle_fn( @@ -105,14 +116,15 @@ fn glb(v1: Variance, v2: Variance) -> Variance { pub(crate) fn variances_of_cycle_initial( db: &dyn HirDatabase, + _: salsa::Id, def: GenericDefId, -) -> VariancesOf<'_> { +) -> StoredVariancesOf { let interner = DbInterner::new_no_crate(db); let generics = generics(db, def); let count = generics.len(); // FIXME(next-solver): Returns `Invariance` and not `Bivariance` here, see the comment in the main query. - VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count)) + VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count)).store() } struct Context<'db> { @@ -130,7 +142,7 @@ impl<'db> Context<'db> { let mut add_constraints_from_variant = |variant| { for (_, field) in db.field_types(variant).iter() { self.add_constraints_from_ty( - field.instantiate_identity(), + field.get().instantiate_identity(), Variance::Covariant, ); } @@ -232,11 +244,11 @@ impl<'db> Context<'db> { } ExistentialPredicate::Projection(projection) => { self.add_constraints_from_invariant_args(projection.args); - match projection.term { - Term::Ty(ty) => { + match projection.term.kind() { + TermKind::Ty(ty) => { self.add_constraints_from_ty(ty, Variance::Invariant) } - Term::Const(konst) => self.add_constraints_from_const(konst), + TermKind::Const(konst) => self.add_constraints_from_const(konst), } } ExistentialPredicate::AutoTrait(_) => {} @@ -266,12 +278,12 @@ impl<'db> Context<'db> { fn add_constraints_from_invariant_args(&mut self, args: GenericArgs<'db>) { for k in args.iter() { - match k { - GenericArg::Lifetime(lt) => { + match k.kind() { + GenericArgKind::Lifetime(lt) => { self.add_constraints_from_region(lt, Variance::Invariant) } - GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, Variance::Invariant), - GenericArg::Const(val) => self.add_constraints_from_const(val), + GenericArgKind::Type(ty) => self.add_constraints_from_ty(ty, Variance::Invariant), + GenericArgKind::Const(val) => self.add_constraints_from_const(val), } } } @@ -290,10 +302,12 @@ impl<'db> Context<'db> { let variances = self.db.variances_of(def_id); for (k, v) in args.iter().zip(variances) { - match k { - GenericArg::Lifetime(lt) => self.add_constraints_from_region(lt, variance.xform(v)), - GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, variance.xform(v)), - GenericArg::Const(val) => self.add_constraints_from_const(val), + match k.kind() { + GenericArgKind::Lifetime(lt) => { + self.add_constraints_from_region(lt, variance.xform(v)) + } + GenericArgKind::Type(ty) => self.add_constraints_from_ty(ty, variance.xform(v)), + GenericArgKind::Const(val) => self.add_constraints_from_const(val), } } } @@ -387,7 +401,7 @@ mod tests { AdtId, GenericDefId, ModuleDefId, hir::generics::GenericParamDataRef, src::HasSource, }; use itertools::Itertools; - use rustc_type_ir::{Variance, inherent::SliceLike}; + use rustc_type_ir::Variance; use stdx::format_to; use syntax::{AstNode, ast::HasName}; use test_fixture::WithFixture; diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index 24b2bd9150ee3..d20ee1546fa48 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -17,6 +17,7 @@ rustc-hash.workspace = true either.workspace = true arrayvec.workspace = true itertools.workspace = true +serde_json.workspace = true smallvec.workspace = true tracing = { workspace = true, features = ["attributes"] } triomphe.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 6ef6ea272e58c..050777a4806da 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -617,7 +617,7 @@ impl<'db> AnyDiagnostic<'db> { pub(crate) fn inference_diagnostic( db: &'db dyn HirDatabase, def: DefWithBodyId, - d: &InferenceDiagnostic<'db>, + d: &InferenceDiagnostic, source_map: &hir_def::expr_store::BodySourceMap, sig_map: &hir_def::expr_store::ExpressionStoreSourceMap, ) -> Option> { @@ -663,7 +663,8 @@ impl<'db> AnyDiagnostic<'db> { } InferenceDiagnostic::ExpectedFunction { call_expr, found } => { let call_expr = expr_syntax(*call_expr)?; - ExpectedFunction { call: call_expr, found: Type::new(db, def, *found) }.into() + ExpectedFunction { call: call_expr, found: Type::new(db, def, found.as_ref()) } + .into() } InferenceDiagnostic::UnresolvedField { expr, @@ -675,7 +676,7 @@ impl<'db> AnyDiagnostic<'db> { UnresolvedField { expr, name: name.clone(), - receiver: Type::new(db, def, *receiver), + receiver: Type::new(db, def, receiver.as_ref()), method_with_same_name_exists: *method_with_same_name_exists, } .into() @@ -691,8 +692,10 @@ impl<'db> AnyDiagnostic<'db> { UnresolvedMethodCall { expr, name: name.clone(), - receiver: Type::new(db, def, *receiver), - field_with_same_name: (*field_with_same_name).map(|ty| Type::new(db, def, ty)), + receiver: Type::new(db, def, receiver.as_ref()), + field_with_same_name: field_with_same_name + .as_ref() + .map(|ty| Type::new(db, def, ty.as_ref())), assoc_func_with_same_name: assoc_func_with_same_name.map(Into::into), } .into() @@ -719,7 +722,7 @@ impl<'db> AnyDiagnostic<'db> { } InferenceDiagnostic::TypedHole { expr, expected } => { let expr = expr_syntax(*expr)?; - TypedHole { expr, expected: Type::new(db, def, *expected) }.into() + TypedHole { expr, expected: Type::new(db, def, expected.as_ref()) }.into() } &InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => { let expr_or_pat = match pat { @@ -736,12 +739,12 @@ impl<'db> AnyDiagnostic<'db> { } InferenceDiagnostic::CastToUnsized { expr, cast_ty } => { let expr = expr_syntax(*expr)?; - CastToUnsized { expr, cast_ty: Type::new(db, def, *cast_ty) }.into() + CastToUnsized { expr, cast_ty: Type::new(db, def, cast_ty.as_ref()) }.into() } InferenceDiagnostic::InvalidCast { expr, error, expr_ty, cast_ty } => { let expr = expr_syntax(*expr)?; - let expr_ty = Type::new(db, def, *expr_ty); - let cast_ty = Type::new(db, def, *cast_ty); + let expr_ty = Type::new(db, def, expr_ty.as_ref()); + let cast_ty = Type::new(db, def, cast_ty.as_ref()); InvalidCast { expr, error: *error, expr_ty, cast_ty }.into() } InferenceDiagnostic::TyDiagnostic { source, diag } => { diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index a50a736ccd0eb..9fc29de4a11c4 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -102,7 +102,7 @@ use rustc_type_ir::{ }; use smallvec::SmallVec; use span::{AstIdNode, Edition, FileId}; -use stdx::{format_to, impl_from, never}; +use stdx::{format_to, impl_from, never, variance::PhantomCovariantLifetime}; use syntax::{ AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, ast::{self, HasName, HasVisibility as _}, @@ -175,7 +175,7 @@ pub use { layout::LayoutError, mir::{MirEvalError, MirLowerError}, next_solver::abi::Safety, - next_solver::clear_tls_solver_cache, + next_solver::{clear_tls_solver_cache, collect_ty_garbage}, }, // FIXME: These are needed for import assets, properly encapsulate them. hir_ty::{method_resolution::TraitImpls, next_solver::SimplifiedType}, @@ -697,7 +697,7 @@ impl Module { push_ty_diagnostics( db, acc, - db.field_types_with_diagnostics(s.id.into()).1, + db.field_types_with_diagnostics(s.id.into()).1.clone(), source_map, ); } @@ -709,7 +709,7 @@ impl Module { push_ty_diagnostics( db, acc, - db.field_types_with_diagnostics(u.id.into()).1, + db.field_types_with_diagnostics(u.id.into()).1.clone(), source_map, ); } @@ -739,7 +739,7 @@ impl Module { push_ty_diagnostics( db, acc, - db.field_types_with_diagnostics(v.into()).1, + db.field_types_with_diagnostics(v.into()).1.clone(), source_map, ); expr_store_diagnostics(db, acc, source_map); @@ -1219,7 +1219,7 @@ impl<'db> InstantiatedField<'db> { let interner = DbInterner::new_no_crate(db); let var_id = self.inner.parent.into(); - let field = db.field_types(var_id)[self.inner.id]; + let field = db.field_types(var_id)[self.inner.id].get(); let ty = field.instantiate(interner, self.args); TypeNs::new(db, var_id, ty) } @@ -1297,7 +1297,7 @@ impl Field { /// context of the field definition. pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> TypeNs<'db> { let var_id = self.parent.into(); - let ty = db.field_types(var_id)[self.id].skip_binder(); + let ty = db.field_types(var_id)[self.id].get().skip_binder(); TypeNs::new(db, var_id, ty) } @@ -1315,13 +1315,13 @@ impl Field { }; let interner = DbInterner::new_no_crate(db); let args = generic_args_from_tys(interner, def_id.into(), generics.map(|ty| ty.ty)); - let ty = db.field_types(var_id)[self.id].instantiate(interner, args); + let ty = db.field_types(var_id)[self.id].get().instantiate(interner, args); Type::new(db, var_id, ty) } pub fn layout(&self, db: &dyn HirDatabase) -> Result { db.layout_of_ty( - self.ty(db).ty, + self.ty(db).ty.store(), param_env_from_has_crate( db, match hir_def::VariantId::from(self.parent) { @@ -1331,7 +1331,8 @@ impl Field { hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()), hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()), }, - ), + ) + .store(), ) .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).into()).unwrap())) } @@ -1662,7 +1663,7 @@ impl Variant { self.source(db)?.value.expr() } - pub fn eval(self, db: &dyn HirDatabase) -> Result> { + pub fn eval(self, db: &dyn HirDatabase) -> Result { db.const_eval_discriminant(self.into()) } @@ -1753,7 +1754,7 @@ impl Adt { let args = GenericArgs::for_item_with_defaults(interner, adt_id.into(), |_, id, _| { GenericArg::error_from_id(interner, id) }); - db.layout_of_adt(adt_id, args, param_env_from_has_crate(db, adt_id)) + db.layout_of_adt(adt_id, args.store(), param_env_from_has_crate(db, adt_id).store()) .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).id).unwrap())) } @@ -1988,8 +1989,8 @@ impl DefWithBody { acc.push( TypeMismatch { expr_or_pat, - expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected), - actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual), + expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.as_ref()), + actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.as_ref()), } .into(), ); @@ -2059,7 +2060,10 @@ impl DefWithBody { } mir::MirSpan::Unknown => continue, }; - acc.push(MovedOutOfRef { ty: Type::new_for_crate(krate, moof.ty), span }.into()) + acc.push( + MovedOutOfRef { ty: Type::new_for_crate(krate, moof.ty.as_ref()), span } + .into(), + ) } let mol = &borrowck_result.mutability_of_locals; for (binding_id, binding_data) in body.bindings() { @@ -2286,7 +2290,7 @@ impl Function { .inputs() .iter() .enumerate() - .map(|(idx, ty)| { + .map(|(idx, &ty)| { let ty = Type { env: environment, ty }; Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx } }) @@ -2313,7 +2317,7 @@ impl Function { .iter() .enumerate() .skip(skip) - .map(|(idx, ty)| { + .map(|(idx, &ty)| { let ty = Type { env: environment, ty }; Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx } }) @@ -2337,7 +2341,7 @@ impl Function { .iter() .enumerate() .skip(skip) - .map(|(idx, ty)| { + .map(|(idx, &ty)| { let ty = Type { env: environment, ty }; Param { func: Callee::Def(CallableDefId::FunctionId(self.id)), ty, idx } }) @@ -2468,15 +2472,16 @@ impl Function { self, db: &dyn HirDatabase, span_formatter: impl Fn(FileId, TextRange) -> String, - ) -> Result> { + ) -> Result { let interner = DbInterner::new_no_crate(db); let body = db.monomorphized_mir_body( self.id.into(), - GenericArgs::new_from_iter(interner, []), + GenericArgs::empty(interner).store(), ParamEnvAndCrate { param_env: db.trait_environment(self.id.into()), krate: self.id.module(db).krate(db), - }, + } + .store(), )?; let (result, output) = interpret_mir(db, body, false, None)?; let mut text = match result { @@ -2619,7 +2624,7 @@ impl SelfParam { let callable_sig = db.callable_item_signature(self.func.into()).instantiate_identity().skip_binder(); let environment = param_env_from_has_crate(db, self.func); - let ty = callable_sig.inputs().as_slice()[0]; + let ty = rustc_type_ir::inherent::SliceLike::as_slice(&callable_sig.inputs())[0]; Type { env: environment, ty } } @@ -2634,7 +2639,7 @@ impl SelfParam { let callable_sig = db.callable_item_signature(self.func.into()).instantiate(interner, args).skip_binder(); let environment = param_env_from_has_crate(db, self.func); - let ty = callable_sig.inputs().as_slice()[0]; + let ty = rustc_type_ir::inherent::SliceLike::as_slice(&callable_sig.inputs())[0]; Type { env: environment, ty } } } @@ -2728,11 +2733,14 @@ impl Const { } /// Evaluate the constant. - pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError<'_>> { + pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError> { let interner = DbInterner::new_no_crate(db); let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity(); - db.const_eval(self.id, GenericArgs::new_from_iter(interner, []), None) - .map(|it| EvaluatedConst { const_: it, def: self.id.into(), ty }) + db.const_eval(self.id, GenericArgs::empty(interner), None).map(|it| EvaluatedConst { + const_: it, + def: self.id.into(), + ty, + }) } } @@ -2753,7 +2761,7 @@ impl<'db> EvaluatedConst<'db> { format!("{}", self.const_.display(db, display_target)) } - pub fn render_debug(&self, db: &'db dyn HirDatabase) -> Result> { + pub fn render_debug(&self, db: &'db dyn HirDatabase) -> Result { let kind = self.const_.kind(); if let ConstKind::Value(c) = kind && let ty = c.ty.kind() @@ -2809,7 +2817,7 @@ impl Static { } /// Evaluate the static initializer. - pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError<'_>> { + pub fn eval(self, db: &dyn HirDatabase) -> Result, ConstEvalError> { let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity(); db.const_eval_static(self.id).map(|it| EvaluatedConst { const_: it, @@ -3847,7 +3855,7 @@ impl Local { pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> { let def = self.parent; let infer = InferenceResult::for_body(db, def); - let ty = infer[self.binding_id]; + let ty = infer.binding_ty(self.binding_id); Type::new(db, def, ty) } @@ -4152,8 +4160,8 @@ impl TypeParam { pub fn default(self, db: &dyn HirDatabase) -> Option> { let ty = generic_arg_from_param(db, self.id.into())?; let resolver = self.id.parent().resolver(db); - match ty { - GenericArg::Ty(it) if !it.is_ty_error() => { + match ty.kind() { + rustc_type_ir::GenericArgKind::Type(it) if !it.is_ty_error() => { Some(Type::new_with_resolver_inner(db, &resolver, it)) } _ => None, @@ -4331,10 +4339,7 @@ impl Impl { /// blanket impls, and only does a shallow type constructor check. In fact, this should've probably been on `Adt` /// etc., and not on `Type`. If you would want to create a precise list of all impls applying to a type, /// you would need to include blanket impls, and try to prove to predicates for each candidate. - pub fn all_for_type<'db>( - db: &'db dyn HirDatabase, - Type { ty, env: _ }: Type<'db>, - ) -> Vec { + pub fn all_for_type<'db>(db: &'db dyn HirDatabase, Type { ty, env }: Type<'db>) -> Vec { let mut result = Vec::new(); let interner = DbInterner::new_no_crate(db); let Some(simplified_ty) = @@ -4344,7 +4349,12 @@ impl Impl { }; let mut extend_with_impls = |impls: &[ImplId]| result.extend(impls.iter().copied().map(Impl::from)); - extend_with_impls(method_resolution::incoherent_inherent_impls(db, simplified_ty)); + method_resolution::with_incoherent_inherent_impls( + db, + env.krate, + &simplified_ty, + &mut extend_with_impls, + ); if let Some(module) = method_resolution::simplified_type_module(db, &simplified_ty) { InherentImpls::for_each_crate_and_block( db, @@ -4543,7 +4553,12 @@ impl<'db> Closure<'db> { info.0 .iter() .cloned() - .map(|capture| ClosureCapture { owner, closure: id, capture }) + .map(|capture| ClosureCapture { + owner, + closure: id, + capture, + _marker: PhantomCovariantLifetime::new(), + }) .collect() } @@ -4648,7 +4663,8 @@ impl FnTrait { pub struct ClosureCapture<'db> { owner: DefWithBodyId, closure: InternedClosureId, - capture: hir_ty::CapturedItem<'db>, + capture: hir_ty::CapturedItem, + _marker: PhantomCovariantLifetime<'db>, } impl<'db> ClosureCapture<'db> { @@ -4915,7 +4931,7 @@ impl<'db> Type<'db> { .fields() .iter() .map(|(idx, _)| { - field_types[idx].instantiate(self.interner, args) + field_types[idx].get().instantiate(self.interner, args) }) .filter(|it| !it.references_non_lt_error()) .collect() @@ -5132,7 +5148,7 @@ impl<'db> Type<'db> { let projection = Ty::new_alias( interner, AliasTyKind::Projection, - AliasTy::new(interner, alias.id.into(), args), + AliasTy::new_from_args(interner, alias.id.into(), args), ); let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis); @@ -5239,7 +5255,7 @@ impl<'db> Type<'db> { .iter() .map(|(local_id, ty)| { let def = Field { parent: variant_id.into(), id: local_id }; - let ty = ty.instantiate(interner, substs); + let ty = ty.get().instantiate(interner, substs); (def, self.derived(ty)) }) .collect() @@ -5322,7 +5338,12 @@ impl<'db> Type<'db> { return; }; - handle_impls(method_resolution::incoherent_inherent_impls(db, simplified_type)); + method_resolution::with_incoherent_inherent_impls( + db, + self.env.krate, + &simplified_type, + &mut handle_impls, + ); if let Some(module) = method_resolution::simplified_type_module(db, &simplified_type) { InherentImpls::for_each_crate_and_block( @@ -5392,12 +5413,14 @@ impl<'db> Type<'db> { .as_adt() .into_iter() .flat_map(|(_, substs)| substs.iter()) - .filter_map(move |arg| match arg { - GenericArg::Ty(ty) => Some(format_smolstr!("{}", ty.display(db, display_target))), - GenericArg::Const(const_) => { + .filter_map(move |arg| match arg.kind() { + rustc_type_ir::GenericArgKind::Type(ty) => { + Some(format_smolstr!("{}", ty.display(db, display_target))) + } + rustc_type_ir::GenericArgKind::Const(const_) => { Some(format_smolstr!("{}", const_.display(db, display_target))) } - GenericArg::Lifetime(_) => None, + rustc_type_ir::GenericArgKind::Lifetime(_) => None, }) } @@ -5801,7 +5824,7 @@ impl<'db> Type<'db> { } pub fn layout(&self, db: &'db dyn HirDatabase) -> Result { - db.layout_of_ty(self.ty, self.env) + db.layout_of_ty(self.ty.store(), self.env.store()) .map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap())) } @@ -5833,9 +5856,10 @@ impl<'db> TypeNs<'db> { pub fn impls_trait(&self, infcx: InferCtxt<'db>, trait_: Trait, args: &[TypeNs<'db>]) -> bool { let args = GenericArgs::new_from_iter( infcx.interner, - [self.ty].into_iter().chain(args.iter().map(|t| t.ty)).map(|t| t.into()), + [self.ty].into_iter().chain(args.iter().map(|t| t.ty)).map(GenericArg::from), ); - let trait_ref = hir_ty::next_solver::TraitRef::new(infcx.interner, trait_.id.into(), args); + let trait_ref = + hir_ty::next_solver::TraitRef::new_from_args(infcx.interner, trait_.id.into(), args); let pred_kind = rustc_type_ir::Binder::dummy(rustc_type_ir::PredicateKind::Clause( rustc_type_ir::ClauseKind::Trait(rustc_type_ir::TraitPredicate { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index b15e642daae76..fcb97ab34e93f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -10,13 +10,15 @@ use std::{ ops::{self, ControlFlow, Not}, }; +use base_db::FxIndexSet; use either::Either; use hir_def::{ DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId, + attrs::parse_extra_crate_attrs, expr_store::{Body, ExprOrPatSource, HygieneId, path::Path}, hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat}, nameres::{ModuleOrigin, crate_def_map}, - resolver::{self, HasResolver, Resolver, TypeNs}, + resolver::{self, HasResolver, Resolver, TypeNs, ValueNs}, type_ref::Mutability, }; use hir_expand::{ @@ -30,11 +32,16 @@ use hir_expand::{ use hir_ty::{ InferenceResult, diagnostics::{unsafe_operations, unsafe_operations_for_body}, - next_solver::DbInterner, + infer_query_with_inspect, + next_solver::{ + DbInterner, Span, + format_proof_tree::{ProofTreeData, dump_proof_tree_structured}, + }, }; use intern::{Interned, Symbol, sym}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_type_ir::inherent::Span as _; use smallvec::{SmallVec, smallvec}; use span::{FileId, SyntaxContext}; use stdx::{TupleExt, always}; @@ -265,14 +272,27 @@ impl Semantics<'_, DB> { pub fn lint_attrs( &self, + file_id: FileId, krate: Crate, item: ast::AnyHasAttrs, ) -> impl DoubleEndedIterator { let mut cfg_options = None; let cfg_options = || *cfg_options.get_or_insert_with(|| krate.id.cfg_options(self.db)); + + let is_crate_root = file_id == krate.root_file(self.imp.db); + let is_source_file = ast::SourceFile::can_cast(item.syntax().kind()); + let extra_crate_attrs = (is_crate_root && is_source_file) + .then(|| { + parse_extra_crate_attrs(self.imp.db, krate.id) + .into_iter() + .flat_map(|src| src.attrs()) + }) + .into_iter() + .flatten(); + let mut result = Vec::new(); hir_expand::attrs::expand_cfg_attr::( - ast::attrs_including_inner(&item), + extra_crate_attrs.chain(ast::attrs_including_inner(&item)), cfg_options, |attr, _, _, _| { let hir_expand::attrs::Meta::TokenTree { path, tt } = attr else { @@ -1638,8 +1658,11 @@ impl<'db> SemanticsImpl<'db> { analyzer.expr_adjustments(expr).map(|it| { it.iter() .map(|adjust| { - let target = - Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target); + let target = Type::new_with_resolver( + self.db, + &analyzer.resolver, + adjust.target.as_ref(), + ); let kind = match adjust.kind { hir_ty::Adjust::NeverToAny => Adjust::NeverToAny, hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => { @@ -2192,6 +2215,138 @@ impl<'db> SemanticsImpl<'db> { self.cache(adt_source.value.syntax().ancestors().last().unwrap(), adt_source.file_id); ToDef::to_def(self, adt_source.as_ref()) } + + pub fn locals_used( + &self, + element: Either<&ast::Expr, &ast::StmtList>, + text_range: TextRange, + ) -> Option> { + let sa = self.analyze(element.either(|e| e.syntax(), |s| s.syntax()))?; + let store = sa.store()?; + let mut resolver = sa.resolver.clone(); + let def = resolver.body_owner()?; + + let is_not_generated = |path: &Path| { + !path.mod_path().and_then(|path| path.as_ident()).is_some_and(Name::is_generated) + }; + + let exprs = element.either( + |e| vec![e.clone()], + |stmts| { + let mut exprs: Vec<_> = stmts + .statements() + .filter(|stmt| text_range.contains_range(stmt.syntax().text_range())) + .filter_map(|stmt| match stmt { + ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr().map(|e| vec![e]), + ast::Stmt::Item(_) => None, + ast::Stmt::LetStmt(let_stmt) => { + let init = let_stmt.initializer(); + let let_else = let_stmt + .let_else() + .and_then(|le| le.block_expr()) + .map(ast::Expr::BlockExpr); + + match (init, let_else) { + (Some(i), Some(le)) => Some(vec![i, le]), + (Some(i), _) => Some(vec![i]), + (_, Some(le)) => Some(vec![le]), + _ => None, + } + } + }) + .flatten() + .collect(); + + if let Some(tail_expr) = stmts.tail_expr() + && text_range.contains_range(tail_expr.syntax().text_range()) + { + exprs.push(tail_expr); + } + exprs + }, + ); + let mut exprs: Vec<_> = + exprs.into_iter().filter_map(|e| sa.expr_id(e).and_then(|e| e.as_expr())).collect(); + + let mut locals: FxIndexSet = FxIndexSet::default(); + let mut add_to_locals_used = |id, parent_expr| { + let path = match id { + ExprOrPatId::ExprId(expr_id) => { + if let Expr::Path(path) = &store[expr_id] { + Some(path) + } else { + None + } + } + ExprOrPatId::PatId(pat_id) => { + if let Pat::Path(path) = &store[pat_id] { + Some(path) + } else { + None + } + } + }; + + if let Some(path) = path + && is_not_generated(path) + { + let _ = resolver.update_to_inner_scope(self.db, def, parent_expr); + let hygiene = store.expr_or_pat_path_hygiene(id); + resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).inspect(|value| { + if let ValueNs::LocalBinding(id) = value { + locals.insert((def, *id).into()); + } + }); + } + }; + + while let Some(expr_id) = exprs.pop() { + if let Expr::Assignment { target, .. } = store[expr_id] { + store.walk_pats(target, &mut |id| { + add_to_locals_used(ExprOrPatId::PatId(id), expr_id) + }); + }; + store.walk_child_exprs(expr_id, |id| { + exprs.push(id); + }); + + add_to_locals_used(ExprOrPatId::ExprId(expr_id), expr_id) + } + + Some(locals) + } + + pub fn get_failed_obligations(&self, token: SyntaxToken) -> Option { + let node = token.parent()?; + let node = self.find_file(&node); + + let container = self.with_ctx(|ctx| ctx.find_container(node))?; + + match container { + ChildContainer::DefWithBodyId(def) => { + thread_local! { + static RESULT: RefCell> = const { RefCell::new(Vec::new()) }; + } + infer_query_with_inspect( + self.db, + def, + Some(|infer_ctxt, _obligation, result, proof_tree| { + if result.is_err() + && let Some(tree) = proof_tree + { + let data = dump_proof_tree_structured(tree, Span::dummy(), infer_ctxt); + RESULT.with(|ctx| ctx.borrow_mut().push(data)); + } + }), + ); + let data: Vec = + RESULT.with(|data| data.borrow_mut().drain(..).collect()); + let data = serde_json::to_string_pretty(&data).unwrap_or_else(|_| "[]".to_owned()); + Some(data) + } + _ => None, + } + } } // FIXME This can't be the best way to do this diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 901c9e1575b26..848ad33801330 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -79,7 +79,7 @@ pub(crate) enum BodyOrSig<'db> { def: DefWithBodyId, body: Arc, source_map: Arc, - infer: Option<&'db InferenceResult<'db>>, + infer: Option<&'db InferenceResult>, }, // To be folded into body once it is considered one VariantFields { @@ -119,7 +119,7 @@ impl<'db> SourceAnalyzer<'db> { def: DefWithBodyId, node @ InFile { file_id, .. }: InFile<&SyntaxNode>, offset: Option, - infer: Option<&'db InferenceResult<'db>>, + infer: Option<&'db InferenceResult>, ) -> SourceAnalyzer<'db> { let (body, source_map) = db.body_with_source_map(def); let scopes = db.expr_scopes(def); @@ -185,9 +185,7 @@ impl<'db> SourceAnalyzer<'db> { } // FIXME: Remove this - fn body_( - &self, - ) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult<'db>>)> { + fn body_(&self) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult>)> { self.body_or_sig.as_ref().and_then(|it| match it { BodyOrSig::Body { def, body, source_map, infer } => { Some((*def, &**body, &**source_map, infer.as_deref())) @@ -196,7 +194,7 @@ impl<'db> SourceAnalyzer<'db> { }) } - fn infer(&self) -> Option<&InferenceResult<'db>> { + fn infer(&self) -> Option<&InferenceResult> { self.body_or_sig.as_ref().and_then(|it| match it { BodyOrSig::Sig { .. } => None, BodyOrSig::VariantFields { .. } => None, @@ -240,7 +238,7 @@ impl<'db> SourceAnalyzer<'db> { ) } - fn expr_id(&self, expr: ast::Expr) -> Option { + pub(crate) fn expr_id(&self, expr: ast::Expr) -> Option { let src = InFile { file_id: self.file_id, value: expr }; self.store_sm()?.node_expr(src.as_ref()) } @@ -260,7 +258,7 @@ impl<'db> SourceAnalyzer<'db> { if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None } } - pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment<'db>]> { + pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> { // It is safe to omit destructuring assignments here because they have no adjustments (neither // expressions nor patterns). let expr_id = self.expr_id(expr.clone())?.as_expr()?; @@ -326,8 +324,8 @@ impl<'db> SourceAnalyzer<'db> { let coerced = expr_id .as_expr() .and_then(|expr_id| infer.expr_adjustment(expr_id)) - .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target)); - let ty = infer[expr_id]; + .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.as_ref())); + let ty = infer.expr_or_pat_ty(expr_id); let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty); Some((mk_ty(ty), coerced.map(mk_ty))) } @@ -342,14 +340,15 @@ impl<'db> SourceAnalyzer<'db> { let coerced = match expr_or_pat_id { ExprOrPatId::ExprId(idx) => infer .expr_adjustment(idx) - .and_then(|adjusts| adjusts.last().cloned()) - .map(|adjust| adjust.target), - ExprOrPatId::PatId(idx) => { - infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned()) - } + .and_then(|adjusts| adjusts.last()) + .map(|adjust| adjust.target.as_ref()), + ExprOrPatId::PatId(idx) => infer + .pat_adjustment(idx) + .and_then(|adjusts| adjusts.last()) + .map(|adjust| adjust.as_ref()), }; - let ty = infer[expr_or_pat_id]; + let ty = infer.expr_or_pat_ty(expr_or_pat_id); let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty); Some((mk_ty(ty), coerced.map(mk_ty))) } @@ -361,7 +360,7 @@ impl<'db> SourceAnalyzer<'db> { ) -> Option> { let binding_id = self.binding_id_of_pat(pat)?; let infer = self.infer()?; - let ty = infer[binding_id]; + let ty = infer.binding_ty(binding_id); let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty); Some(mk_ty(ty)) } @@ -372,7 +371,7 @@ impl<'db> SourceAnalyzer<'db> { _param: &ast::SelfParam, ) -> Option> { let binding = self.body()?.self_param?; - let ty = self.infer()?[binding]; + let ty = self.infer()?.binding_ty(binding); Some(Type::new_with_resolver(db, &self.resolver, ty)) } @@ -404,7 +403,7 @@ impl<'db> SourceAnalyzer<'db> { infer .pat_adjustment(pat_id.as_pat()?)? .iter() - .map(|ty| Type::new_with_resolver(db, &self.resolver, *ty)) + .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.as_ref())) .collect(), ) } @@ -482,7 +481,7 @@ impl<'db> SourceAnalyzer<'db> { fn field_subst( &self, field_expr: ExprId, - infer: &InferenceResult<'db>, + infer: &InferenceResult, db: &'db dyn HirDatabase, ) -> Option> { let body = self.store()?; @@ -598,8 +597,7 @@ impl<'db> SourceAnalyzer<'db> { let poll_fn = self.lang_items(db).FuturePoll?; // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself // doesn't have any generic parameters, so we skip building another subst for `poll()`. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [ty.into()]); + let substs = GenericArgs::new_from_slice(&[ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, poll_fn, substs)) } @@ -641,10 +639,9 @@ impl<'db> SourceAnalyzer<'db> { let ty = self.ty_of_expr(prefix_expr.expr()?)?; - let interner = DbInterner::new_no_crate(db); // HACK: subst for all methods coincides with that for their trait because the methods // don't have any generic parameters, so we skip building another subst for the methods. - let substs = GenericArgs::new_from_iter(interner, [ty.into()]); + let substs = GenericArgs::new_from_slice(&[ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -674,8 +671,7 @@ impl<'db> SourceAnalyzer<'db> { .unwrap_or(index_fn); // HACK: subst for all methods coincides with that for their trait because the methods // don't have any generic parameters, so we skip building another subst for the methods. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [base_ty.into(), index_ty.into()]); + let substs = GenericArgs::new_from_slice(&[base_ty.into(), index_ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -694,8 +690,7 @@ impl<'db> SourceAnalyzer<'db> { })?; // HACK: subst for `index()` coincides with that for `Index` because `index()` itself // doesn't have any generic parameters, so we skip building another subst for `index()`. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [lhs.into(), rhs.into()]); + let substs = GenericArgs::new_from_slice(&[lhs.into(), rhs.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -710,8 +705,7 @@ impl<'db> SourceAnalyzer<'db> { let op_fn = self.lang_items(db).TryTraitBranch?; // HACK: subst for `branch()` coincides with that for `Try` because `branch()` itself // doesn't have any generic parameters, so we skip building another subst for `branch()`. - let interner = DbInterner::new_no_crate(db); - let substs = GenericArgs::new_from_iter(interner, [ty.into()]); + let substs = GenericArgs::new_from_slice(&[ty.into()]); Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs)) } @@ -751,7 +745,8 @@ impl<'db> SourceAnalyzer<'db> { let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?; let variant_data = variant.fields(db); let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? }; - let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst); + let field_ty = + (*db.field_types(variant).get(field.local_id)?).get().instantiate(interner, subst); Some(( field.into(), local, @@ -772,8 +767,9 @@ impl<'db> SourceAnalyzer<'db> { let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?; let variant_data = variant.fields(db); let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? }; - let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?; - let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst); + let (adt, subst) = self.infer()?.pat_ty(pat_id.as_pat()?).as_adt()?; + let field_ty = + (*db.field_types(variant).get(field.local_id)?).get().instantiate(interner, subst); Some(( field.into(), Type::new_with_resolver(db, &self.resolver, field_ty), @@ -835,23 +831,24 @@ impl<'db> SourceAnalyzer<'db> { if let Either::Right(container) = &mut container { *container = structurally_normalize_ty(&infcx, *container, trait_env.param_env); } - let handle_variants = - |variant: VariantId, subst: GenericArgs<'db>, container: &mut _| { - let fields = variant.fields(db); - let field = fields.field(&field_name.as_name())?; - let field_types = db.field_types(variant); - *container = Either::Right(field_types[field].instantiate(interner, subst)); - let generic_def = match variant { - VariantId::EnumVariantId(it) => it.loc(db).parent.into(), - VariantId::StructId(it) => it.into(), - VariantId::UnionId(it) => it.into(), - }; - Some(( - Either::Right(Field { parent: variant.into(), id: field }), - generic_def, - subst, - )) + let handle_variants = |variant: VariantId, + subst: GenericArgs<'db>, + container: &mut _| { + let fields = variant.fields(db); + let field = fields.field(&field_name.as_name())?; + let field_types = db.field_types(variant); + *container = Either::Right(field_types[field].get().instantiate(interner, subst)); + let generic_def = match variant { + VariantId::EnumVariantId(it) => it.loc(db).parent.into(), + VariantId::StructId(it) => it.into(), + VariantId::UnionId(it) => it.into(), }; + Some(( + Either::Right(Field { parent: variant.into(), id: field }), + generic_def, + subst, + )) + }; let temp_ty = Ty::new_error(interner, ErrorGuaranteed); let (field_def, generic_def, subst) = match std::mem::replace(&mut container, Either::Right(temp_ty)) { @@ -1173,7 +1170,7 @@ impl<'db> SourceAnalyzer<'db> { self.infer()?.type_of_expr_or_pat(expr_id)? } else if let Some(pat) = ast::Pat::cast(parent) { let pat_id = self.pat_id(&pat)?; - self.infer()?[pat_id] + self.infer()?.expr_or_pat_ty(pat_id) } else { return None; }; @@ -1245,7 +1242,7 @@ impl<'db> SourceAnalyzer<'db> { let infer = self.infer()?; let expr_id = self.expr_id(literal.clone().into())?; - let substs = infer[expr_id].as_adt()?.1; + let substs = infer.expr_or_pat_ty(expr_id).as_adt()?.1; let (variant, missing_fields, _exhaustive) = match expr_id { ExprOrPatId::ExprId(expr_id) => { @@ -1268,7 +1265,7 @@ impl<'db> SourceAnalyzer<'db> { let infer = self.infer()?; let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?; - let substs = infer[pat_id].as_adt()?.1; + let substs = infer.pat_ty(pat_id).as_adt()?.1; let (variant, missing_fields, _exhaustive) = record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?; @@ -1290,7 +1287,7 @@ impl<'db> SourceAnalyzer<'db> { .into_iter() .map(|local_id| { let field = FieldId { parent: variant, local_id }; - let ty = field_types[local_id].instantiate(interner, substs); + let ty = field_types[local_id].get().instantiate(interner, substs); (field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty)) }) .collect() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_dot_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_dot_deref.rs new file mode 100644 index 0000000000000..d27a6b4ce7709 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_explicit_dot_deref.rs @@ -0,0 +1,214 @@ +use hir::{Adjust, Mutability}; +use ide_db::assists::AssistId; +use itertools::Itertools; +use syntax::{ + AstNode, T, + ast::{self, syntax_factory::SyntaxFactory}, +}; + +use crate::{AssistContext, Assists}; + +// Assist: add_explicit_method_call_deref +// +// Insert explicit method call reference and dereferences. +// +// ``` +// struct Foo; +// impl Foo { fn foo(&self) {} } +// fn test() { +// Foo$0.$0foo(); +// } +// ``` +// -> +// ``` +// struct Foo; +// impl Foo { fn foo(&self) {} } +// fn test() { +// (&Foo).foo(); +// } +// ``` +pub(crate) fn add_explicit_method_call_deref( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + if ctx.has_empty_selection() { + return None; + } + let dot_token = ctx.find_token_syntax_at_offset(T![.])?; + if ctx.selection_trimmed() != dot_token.text_range() { + return None; + } + let method_call_expr = dot_token.parent().and_then(ast::MethodCallExpr::cast)?; + let receiver = method_call_expr.receiver()?; + + let adjustments = ctx.sema.expr_adjustments(&receiver)?; + let adjustments = + adjustments.into_iter().filter_map(|adjust| simple_adjust_kind(adjust.kind)).collect_vec(); + if adjustments.is_empty() { + return None; + } + + acc.add( + AssistId::refactor_rewrite("add_explicit_method_call_deref"), + "Insert explicit method call derefs", + dot_token.text_range(), + |builder| { + let mut edit = builder.make_editor(method_call_expr.syntax()); + let make = SyntaxFactory::without_mappings(); + let mut expr = receiver.clone(); + + for adjust_kind in adjustments { + expr = adjust_kind.wrap_expr(expr, &make); + } + + expr = make.expr_paren(expr).into(); + edit.replace(receiver.syntax(), expr.syntax()); + + builder.add_file_edits(ctx.vfs_file_id(), edit); + }, + ) +} + +fn simple_adjust_kind(adjust: Adjust) -> Option { + match adjust { + Adjust::NeverToAny | Adjust::Pointer(_) => None, + Adjust::Deref(_) => Some(AdjustKind::Deref), + Adjust::Borrow(hir::AutoBorrow::Ref(mutability)) => Some(AdjustKind::Ref(mutability)), + Adjust::Borrow(hir::AutoBorrow::RawPtr(mutability)) => Some(AdjustKind::RefRaw(mutability)), + } +} + +enum AdjustKind { + Deref, + Ref(Mutability), + RefRaw(Mutability), +} + +impl AdjustKind { + fn wrap_expr(self, expr: ast::Expr, make: &SyntaxFactory) -> ast::Expr { + match self { + AdjustKind::Deref => make.expr_prefix(T![*], expr).into(), + AdjustKind::Ref(mutability) => make.expr_ref(expr, mutability.is_mut()), + AdjustKind::RefRaw(mutability) => make.expr_raw_ref(expr, mutability.is_mut()), + } + } +} + +#[cfg(test)] +mod tests { + use crate::tests::check_assist; + + use super::*; + + #[test] + fn works_ref() { + check_assist( + add_explicit_method_call_deref, + r#" + struct Foo; + impl Foo { fn foo(&self) {} } + fn test() { + Foo$0.$0foo(); + }"#, + r#" + struct Foo; + impl Foo { fn foo(&self) {} } + fn test() { + (&Foo).foo(); + }"#, + ); + } + + #[test] + fn works_ref_mut() { + check_assist( + add_explicit_method_call_deref, + r#" + struct Foo; + impl Foo { fn foo(&mut self) {} } + fn test() { + Foo$0.$0foo(); + }"#, + r#" + struct Foo; + impl Foo { fn foo(&mut self) {} } + fn test() { + (&mut Foo).foo(); + }"#, + ); + } + + #[test] + fn works_deref() { + check_assist( + add_explicit_method_call_deref, + r#" + struct Foo; + impl Foo { fn foo(self) {} } + fn test() { + let foo = &Foo; + foo$0.$0foo(); + }"#, + r#" + struct Foo; + impl Foo { fn foo(self) {} } + fn test() { + let foo = &Foo; + (*foo).foo(); + }"#, + ); + } + + #[test] + fn works_reborrow() { + check_assist( + add_explicit_method_call_deref, + r#" + struct Foo; + impl Foo { fn foo(&self) {} } + fn test() { + let foo = &mut Foo; + foo$0.$0foo(); + }"#, + r#" + struct Foo; + impl Foo { fn foo(&self) {} } + fn test() { + let foo = &mut Foo; + (&*foo).foo(); + }"#, + ); + } + + #[test] + fn works_deref_reborrow() { + check_assist( + add_explicit_method_call_deref, + r#" + //- minicore: deref + struct Foo; + struct Bar; + impl core::ops::Deref for Foo { + type Target = Bar; + fn deref(&self) -> &Self::Target {} + } + impl Bar { fn bar(&self) {} } + fn test() { + let foo = &mut Foo; + foo$0.$0bar(); + }"#, + r#" + struct Foo; + struct Bar; + impl core::ops::Deref for Foo { + type Target = Bar; + fn deref(&self) -> &Self::Target {} + } + impl Bar { fn bar(&self) {} } + fn test() { + let foo = &mut Foo; + (&**foo).bar(); + }"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs index c9022f66d1e20..7934a80bfabbc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_return_type.rs @@ -1,3 +1,4 @@ +use either::Either; use hir::HirDisplay; use syntax::{AstNode, SyntaxKind, SyntaxToken, TextRange, TextSize, ast, match_ast}; @@ -133,8 +134,9 @@ fn peel_blocks(mut expr: ast::Expr) -> ast::Expr { } fn extract_tail(ctx: &AssistContext<'_>) -> Option<(FnType, ast::Expr, InsertOrReplace)> { - let (fn_type, tail_expr, return_type_range, action) = - if let Some(closure) = ctx.find_node_at_offset::() { + let node = ctx.find_node_at_offset::>()?; + let (fn_type, tail_expr, return_type_range, action) = match node { + Either::Left(closure) => { let rpipe = closure.param_list()?.syntax().last_token()?; let rpipe_pos = rpipe.text_range().end(); @@ -149,9 +151,8 @@ fn extract_tail(ctx: &AssistContext<'_>) -> Option<(FnType, ast::Expr, InsertOrR let ret_range = TextRange::new(rpipe_pos, body_start); (FnType::Closure { wrap_expr }, tail_expr, ret_range, action) - } else { - let func = ctx.find_node_at_offset::()?; - + } + Either::Right(func) => { let rparen = func.param_list()?.r_paren_token()?; let rparen_pos = rparen.text_range().end(); let action = ret_ty_to_action(func.ret_type(), rparen)?; @@ -163,7 +164,8 @@ fn extract_tail(ctx: &AssistContext<'_>) -> Option<(FnType, ast::Expr, InsertOrR let ret_range_end = stmt_list.l_curly_token()?.text_range().start(); let ret_range = TextRange::new(rparen_pos, ret_range_end); (FnType::Function, tail_expr, ret_range, action) - }; + } + }; let range = ctx.selection_trimmed(); if return_type_range.contains_range(range) { cov_mark::hit!(cursor_in_ret_position); @@ -239,6 +241,24 @@ mod tests { ); } + #[test] + fn infer_return_type_cursor_at_return_type_pos_fn_inside_closure() { + cov_mark::check!(cursor_in_ret_position); + check_assist( + add_return_type, + r#"const _: fn() = || { + fn foo() $0{ + 45 + } +};"#, + r#"const _: fn() = || { + fn foo() -> i32 { + 45 + } +};"#, + ); + } + #[test] fn infer_return_type() { cov_mark::check!(cursor_on_tail); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs index 2eea4f71ed34f..63b1a0193bd6d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs @@ -3,7 +3,7 @@ use ide_db::famous_defs::FamousDefs; use stdx::format_to; use syntax::{ AstNode, - ast::{self, HasArgList, HasLoopBody, edit_in_place::Indent, syntax_factory::SyntaxFactory}, + ast::{self, HasArgList, HasLoopBody, edit::AstNodeEdit, syntax_factory::SyntaxFactory}, }; use crate::{AssistContext, AssistId, Assists}; @@ -62,10 +62,10 @@ pub(crate) fn convert_iter_for_each_to_for( stmt.as_ref().map_or_else(|| method.indent_level(), ast::ExprStmt::indent_level); let block = match body { - ast::Expr::BlockExpr(block) => block.clone_for_update(), - _ => make.block_expr(Vec::new(), Some(body)), - }; - block.reindent_to(indent); + ast::Expr::BlockExpr(block) => block.reset_indent(), + _ => make.block_expr(Vec::new(), Some(body.reset_indent().indent(1.into()))), + } + .indent(indent); let expr_for_loop = make.expr_for_loop(param, receiver, block); @@ -285,15 +285,23 @@ fn main() { r#" //- minicore: iterators fn main() { - let it = core::iter::repeat(92); - it.$0for_each(|(x, y)| println!("x: {}, y: {}", x, y)); + { + let it = core::iter::repeat(92); + it.$0for_each(|param| match param { + (x, y) => println!("x: {}, y: {}", x, y), + }); + } } "#, r#" fn main() { - let it = core::iter::repeat(92); - for (x, y) in it { - println!("x: {}, y: {}", x, y) + { + let it = core::iter::repeat(92); + for param in it { + match param { + (x, y) => println!("x: {}, y: {}", x, y), + } + } } } "#, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs index 7f4fb4c694d77..05ccd5b9bff69 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs @@ -42,6 +42,21 @@ use crate::{ // bar(); // } // ``` +// --- +// ``` +// //- minicore: option +// fn foo() -> Option { None } +// fn main() { +// $0let x = foo(); +// } +// ``` +// -> +// ``` +// fn foo() -> Option { None } +// fn main() { +// let Some(x) = foo() else { return }; +// } +// ``` pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { match ctx.find_node_at_offset::>()? { Either::Left(let_stmt) => let_stmt_to_guarded_return(let_stmt, acc, ctx), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 4b7314be46093..231df9b5b3e15 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -9,14 +9,14 @@ use hir::{ use ide_db::{ FxIndexSet, RootDatabase, assists::GroupLabel, - defs::{Definition, NameRefClass}, + defs::Definition, famous_defs::FamousDefs, helpers::mod_path_to_ast, imports::insert_use::{ImportScope, insert_use}, search::{FileReference, ReferenceCategory, SearchScope}, source_change::SourceChangeBuilder, syntax_helpers::node_ext::{ - for_each_tail_expr, preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr, + for_each_tail_expr, preorder_expr, walk_pat, walk_patterns_in_expr, }, }; use itertools::Itertools; @@ -687,29 +687,6 @@ impl FunctionBody { } } - fn walk_expr(&self, cb: &mut dyn FnMut(ast::Expr)) { - match self { - FunctionBody::Expr(expr) => walk_expr(expr, cb), - FunctionBody::Span { parent, text_range, .. } => { - parent - .statements() - .filter(|stmt| text_range.contains_range(stmt.syntax().text_range())) - .filter_map(|stmt| match stmt { - ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(), - ast::Stmt::Item(_) => None, - ast::Stmt::LetStmt(stmt) => stmt.initializer(), - }) - .for_each(|expr| walk_expr(&expr, cb)); - if let Some(expr) = parent - .tail_expr() - .filter(|it| text_range.contains_range(it.syntax().text_range())) - { - walk_expr(&expr, cb); - } - } - } - } - fn preorder_expr(&self, cb: &mut dyn FnMut(WalkEvent) -> bool) { match self { FunctionBody::Expr(expr) => preorder_expr(expr, cb), @@ -718,10 +695,24 @@ impl FunctionBody { .statements() .filter(|stmt| text_range.contains_range(stmt.syntax().text_range())) .filter_map(|stmt| match stmt { - ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(), + ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr().map(|e| vec![e]), ast::Stmt::Item(_) => None, - ast::Stmt::LetStmt(stmt) => stmt.initializer(), + ast::Stmt::LetStmt(stmt) => { + let init = stmt.initializer(); + let let_else = stmt + .let_else() + .and_then(|le| le.block_expr()) + .map(ast::Expr::BlockExpr); + + match (init, let_else) { + (Some(i), Some(le)) => Some(vec![i, le]), + (Some(i), _) => Some(vec![i]), + (_, Some(le)) => Some(vec![le]), + _ => None, + } + } }) + .flatten() .for_each(|expr| preorder_expr(&expr, cb)); if let Some(expr) = parent .tail_expr() @@ -799,22 +790,14 @@ impl FunctionBody { let mut self_param = None; let mut res = FxIndexSet::default(); - fn local_from_name_ref( - sema: &Semantics<'_, RootDatabase>, - name_ref: ast::NameRef, - ) -> Option { - match NameRefClass::classify(sema, &name_ref) { - Some( - NameRefClass::Definition(Definition::Local(local_ref), _) - | NameRefClass::FieldShorthand { local_ref, field_ref: _, adt_subst: _ }, - ) => Some(local_ref), - _ => None, - } - } + let (text_range, element) = match self { + FunctionBody::Expr(expr) => (expr.syntax().text_range(), Either::Left(expr)), + FunctionBody::Span { parent, text_range, .. } => (*text_range, Either::Right(parent)), + }; let mut add_name_if_local = |local_ref: Local| { - let InFile { file_id, value } = local_ref.primary_source(sema.db).source; // locals defined inside macros are not relevant to us + let InFile { file_id, value } = local_ref.primary_source(sema.db).source; if !file_id.is_macro() { match value { Either::Right(it) => { @@ -826,59 +809,11 @@ impl FunctionBody { } } }; - self.walk_expr(&mut |expr| match expr { - ast::Expr::PathExpr(path_expr) => { - if let Some(local) = path_expr - .path() - .and_then(|it| it.as_single_name_ref()) - .and_then(|name_ref| local_from_name_ref(sema, name_ref)) - { - add_name_if_local(local); - } - } - ast::Expr::ClosureExpr(closure_expr) => { - if let Some(body) = closure_expr.body() { - body.syntax() - .descendants() - .filter_map(ast::NameRef::cast) - .filter_map(|name_ref| local_from_name_ref(sema, name_ref)) - .for_each(&mut add_name_if_local); - } - } - ast::Expr::MacroExpr(expr) => { - if let Some(tt) = expr.macro_call().and_then(|call| call.token_tree()) { - tt.syntax() - .descendants_with_tokens() - .filter_map(SyntaxElement::into_token) - .filter(|it| { - matches!(it.kind(), SyntaxKind::STRING | SyntaxKind::IDENT | T![self]) - }) - .for_each(|t| { - if ast::String::can_cast(t.kind()) { - if let Some(parts) = - ast::String::cast(t).and_then(|s| sema.as_format_args_parts(&s)) - { - parts - .into_iter() - .filter_map(|(_, value)| value.and_then(|it| it.left())) - .filter_map(|path| match path { - PathResolution::Local(local) => Some(local), - _ => None, - }) - .for_each(&mut add_name_if_local); - } - } else { - sema.descend_into_macros_exact(t) - .into_iter() - .filter_map(|t| t.parent().and_then(ast::NameRef::cast)) - .filter_map(|name_ref| local_from_name_ref(sema, name_ref)) - .for_each(&mut add_name_if_local); - } - }); - } - } - _ => (), - }); + + if let Some(locals) = sema.locals_used(element, text_range) { + locals.into_iter().for_each(&mut add_name_if_local); + } + (res, self_param) } @@ -6294,4 +6229,150 @@ fn $0fun_name(v: i32) { }"#, ); } + + #[test] + fn no_parameter_for_variable_used_only_let_else() { + check_assist( + extract_function, + r#" +fn foo() -> u32 { + let x = 5; + + $0let Some(y) = Some(1) else { + return x * 2; + };$0 + + y +}"#, + r#" +fn foo() -> u32 { + let x = 5; + + let y = match fun_name(x) { + Ok(value) => value, + Err(value) => return value, + }; + + y +} + +fn $0fun_name(x: u32) -> Result<_, u32> { + let Some(y) = Some(1) else { + return Err(x * 2); + }; + Ok(y) +}"#, + ); + } + + #[test] + fn deeply_nested_macros() { + check_assist( + extract_function, + r#" +macro_rules! m { + ($val:ident) => { $val }; +} + +macro_rules! n { + ($v1:ident, $v2:ident) => { m!($v1) + $v2 }; +} + +macro_rules! o { + ($v1:ident, $v2:ident, $v3:ident) => { n!($v1, $v2) + $v3 }; +} + +fn foo() -> u32 { + let v1 = 1; + let v2 = 2; + $0let v3 = 3; + o!(v1, v2, v3)$0 +}"#, + r#" +macro_rules! m { + ($val:ident) => { $val }; +} + +macro_rules! n { + ($v1:ident, $v2:ident) => { m!($v1) + $v2 }; +} + +macro_rules! o { + ($v1:ident, $v2:ident, $v3:ident) => { n!($v1, $v2) + $v3 }; +} + +fn foo() -> u32 { + let v1 = 1; + let v2 = 2; + fun_name(v1, v2) +} + +fn $0fun_name(v1: u32, v2: u32) -> u32 { + let v3 = 3; + o!(v1, v2, v3) +}"#, + ); + } + + #[test] + fn pattern_assignment() { + check_assist( + extract_function, + r#" +struct Point {x: u32, y: u32}; + +fn point() -> Point { + Point { x: 45, y: 50 }; +} + +fn foo() { + let mut a = 1; + let mut b = 3; + $0Point { x: a, y: b } = point();$0 +} +"#, + r#" +struct Point {x: u32, y: u32}; + +fn point() -> Point { + Point { x: 45, y: 50 }; +} + +fn foo() { + let mut a = 1; + let mut b = 3; + fun_name(a, b); +} + +fn $0fun_name(mut a: u32, mut b: u32) { + Point { x: a, y: b } = point(); +} +"#, + ); + } + + #[test] + fn tuple_assignment() { + check_assist( + extract_function, + r#" +fn foo() { + let mut a = 3; + let mut b = 4; + $0(a, b) = (b, a);$0 +} +"#, + r#" +fn foo() { + let mut a = 3; + let mut b = 4; + fun_name(a, b); +} + +fn $0fun_name(mut a: i32, mut b: i32) { + (a, b) = (b, a); +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs index 73cb8204f2096..e491c043e1c11 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_nested_if.rs @@ -1,7 +1,6 @@ -use ide_db::syntax_helpers::node_ext::is_pattern_cond; use syntax::{ T, - ast::{self, AstNode, BinaryOp}, + ast::{self, AstNode, BinaryOp, edit::AstNodeEdit}, }; use crate::{ @@ -39,10 +38,6 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt } let cond = expr.condition()?; - //should not apply for if-let - if is_pattern_cond(cond.clone()) { - return None; - } let cond_range = cond.syntax().text_range(); @@ -62,12 +57,8 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt return None; } let nested_if_cond = nested_if_to_merge.condition()?; - if is_pattern_cond(nested_if_cond.clone()) { - return None; - } let nested_if_then_branch = nested_if_to_merge.then_branch()?; - let then_branch_range = then_branch.syntax().text_range(); acc.add(AssistId::refactor_rewrite("merge_nested_if"), "Merge nested if", if_range, |edit| { let cond_text = if has_logic_op_or(&cond) { @@ -85,7 +76,7 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt let replace_cond = format!("{cond_text} && {nested_if_cond_text}"); edit.replace(cond_range, replace_cond); - edit.replace(then_branch_range, nested_if_then_branch.syntax().text()); + edit.replace_ast(then_branch, nested_if_then_branch.dedent(1.into())); }) } @@ -112,8 +103,20 @@ mod tests { fn merge_nested_if_test1() { check_assist( merge_nested_if, - "fn f() { i$0f x == 3 { if y == 4 { 1 } } }", - "fn f() { if x == 3 && y == 4 { 1 } }", + " + fn f() { + i$0f x == 3 { + if y == 4 { + 1 + } + } + }", + " + fn f() { + if x == 3 && y == 4 { + 1 + } + }", ) } @@ -172,34 +175,36 @@ mod tests { } #[test] - fn merge_nested_if_do_not_apply_to_if_with_else_branch() { - check_assist_not_applicable( + fn merge_nested_if_test8() { + check_assist( merge_nested_if, - "fn f() { i$0f x == 3 { if y == 4 { 1 } } else { 2 } }", + "fn f() { i$0f let Some(x) = y { if x == 4 { 1 } } }", + "fn f() { if let Some(x) = y && x == 4 { 1 } }", ) } #[test] - fn merge_nested_if_do_not_apply_to_nested_if_with_else_branch() { - check_assist_not_applicable( + fn merge_nested_if_test9() { + check_assist( merge_nested_if, - "fn f() { i$0f x == 3 { if y == 4 { 1 } else { 2 } } }", + "fn f() { i$0f y == 0 { if let Some(x) = y { 1 } } }", + "fn f() { if y == 0 && let Some(x) = y { 1 } }", ) } #[test] - fn merge_nested_if_do_not_apply_to_if_let() { + fn merge_nested_if_do_not_apply_to_if_with_else_branch() { check_assist_not_applicable( merge_nested_if, - "fn f() { i$0f let Some(x) = y { if x == 4 { 1 } } }", + "fn f() { i$0f x == 3 { if y == 4 { 1 } } else { 2 } }", ) } #[test] - fn merge_nested_if_do_not_apply_to_nested_if_let() { + fn merge_nested_if_do_not_apply_to_nested_if_with_else_branch() { check_assist_not_applicable( merge_nested_if, - "fn f() { i$0f y == 0 { if let Some(x) = y { 1 } } }", + "fn f() { i$0f x == 3 { if y == 4 { 1 } else { 2 } } }", ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs index 6b50718424c72..8daf86923d921 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_guard.rs @@ -108,6 +108,10 @@ pub(crate) fn move_arm_cond_to_match_guard( let mut replace_node = None; let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone()).or_else(|| { let block_expr = BlockExpr::cast(arm_body.syntax().clone())?; + if block_expr.statements().next().is_some() { + cov_mark::hit!(move_guard_non_naked_if); + return None; + } if let Expr::IfExpr(e) = block_expr.tail_expr()? { replace_node = Some(block_expr.syntax().clone()); Some(e) @@ -238,6 +242,46 @@ fn main() { "#, ); } + + #[test] + fn move_non_naked_arm_cond_to_guard() { + cov_mark::check!(move_guard_non_naked_if); + check_assist_not_applicable( + move_arm_cond_to_match_guard, + r#" +fn main() { + match 92 { + _ => { + let cond = true; + $0if cond { + foo() + } + }, + _ => true + } +} +"#, + ); + check_assist_not_applicable( + move_arm_cond_to_match_guard, + r#" +fn main() { + match 92 { + _ => { + let cond = true; + $0if cond { + foo() + } else { + bar() + } + }, + _ => true + } +} +"#, + ); + } + #[test] fn move_guard_to_arm_body_target() { check_assist_target( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs index 90f4ff7ad2511..b95e9b52b0538 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs @@ -60,11 +60,13 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_> } } }; + let init_expr = + if let_expr_needs_paren(&init) { make.expr_paren(init).into() } else { init }; let block = make.block_expr([], None); block.indent(IndentLevel::from_node(let_stmt.syntax())); let if_expr = make.expr_if( - make.expr_let(pat, init).into(), + make.expr_let(pat, init_expr).into(), block, let_stmt .let_else() @@ -79,6 +81,16 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_> ) } +fn let_expr_needs_paren(expr: &ast::Expr) -> bool { + let fake_expr_let = + ast::make::expr_let(ast::make::tuple_pat(None).into(), ast::make::ext::expr_unit()); + let Some(fake_expr) = fake_expr_let.expr() else { + stdx::never!(); + return false; + }; + expr.needs_parens_in_place_of(fake_expr_let.syntax(), fake_expr.syntax()) +} + #[cfg(test)] mod tests { use crate::tests::check_assist; @@ -107,6 +119,42 @@ fn main() { ) } + #[test] + fn replace_let_logic_and() { + check_assist( + replace_let_with_if_let, + r" +fn main() { + $0let x = true && false; +} + ", + r" +fn main() { + if let x = (true && false) { + } +} + ", + ) + } + + #[test] + fn replace_let_logic_or() { + check_assist( + replace_let_with_if_let, + r" +fn main() { + $0let x = true || false; +} + ", + r" +fn main() { + if let x = (true || false) { + } +} + ", + ) + } + #[test] fn replace_let_else() { check_assist( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index 47cb4c8e74cb0..80f05caf4e0dc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -105,6 +105,7 @@ mod handlers { pub(crate) type Handler = fn(&mut Assists, &AssistContext<'_>) -> Option<()>; mod add_braces; + mod add_explicit_dot_deref; mod add_explicit_enum_discriminant; mod add_explicit_type; mod add_label_to_loop; @@ -242,6 +243,7 @@ mod handlers { &[ // These are alphabetic for the foolish consistency add_braces::add_braces, + add_explicit_dot_deref::add_explicit_method_call_deref, add_explicit_enum_discriminant::add_explicit_enum_discriminant, add_explicit_type::add_explicit_type, add_label_to_loop::add_label_to_loop, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 7eef257b95f16..30405090002ab 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -69,6 +69,27 @@ enum TheEnum { ) } +#[test] +fn doctest_add_explicit_method_call_deref() { + check_doc_test( + "add_explicit_method_call_deref", + r#####" +struct Foo; +impl Foo { fn foo(&self) {} } +fn test() { + Foo$0.$0foo(); +} +"#####, + r#####" +struct Foo; +impl Foo { fn foo(&self) {} } +fn test() { + (&Foo).foo(); +} +"#####, + ) +} + #[test] fn doctest_add_explicit_type() { check_doc_test( @@ -791,6 +812,26 @@ fn main() { ) } +#[test] +fn doctest_convert_to_guarded_return_1() { + check_doc_test( + "convert_to_guarded_return", + r#####" +//- minicore: option +fn foo() -> Option { None } +fn main() { + $0let x = foo(); +} +"#####, + r#####" +fn foo() -> Option { None } +fn main() { + let Some(x) = foo() else { return }; +} +"#####, + ) +} + #[test] fn doctest_convert_tuple_return_type_to_struct() { check_doc_test( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index 9c2e0dcf1c625..18cfa53f8e960 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -4,6 +4,7 @@ use std::ops::ControlFlow; use hir::{Complete, Function, HasContainer, ItemContainer, MethodCandidateCallback}; use ide_db::FxHashSet; +use itertools::Either; use syntax::SmolStr; use crate::{ @@ -146,11 +147,14 @@ pub(crate) fn complete_undotted_self( _ => return, }; - let ty = self_param.ty(ctx.db); + let (param_name, ty) = match self_param { + Either::Left(self_param) => ("self", &self_param.ty(ctx.db)), + Either::Right(this_param) => ("this", this_param.ty()), + }; complete_fields( acc, ctx, - &ty, + ty, |acc, field, ty| { acc.add_field( ctx, @@ -163,15 +167,17 @@ pub(crate) fn complete_undotted_self( in_breakable: expr_ctx.in_breakable, }, }, - Some(SmolStr::new_static("self")), + Some(SmolStr::new_static(param_name)), field, &ty, ) }, - |acc, field, ty| acc.add_tuple_field(ctx, Some(SmolStr::new_static("self")), field, &ty), + |acc, field, ty| { + acc.add_tuple_field(ctx, Some(SmolStr::new_static(param_name)), field, &ty) + }, false, ); - complete_methods(ctx, &ty, &ctx.traits_in_scope(), |func| { + complete_methods(ctx, ty, &ctx.traits_in_scope(), |func| { acc.add_method( ctx, &DotAccess { @@ -184,7 +190,7 @@ pub(crate) fn complete_undotted_self( }, }, func, - Some(SmolStr::new_static("self")), + Some(SmolStr::new_static(param_name)), None, ) }); @@ -1073,6 +1079,96 @@ impl Foo { fn foo(&mut self) { $0 } }"#, ); } + #[test] + fn completes_bare_fields_and_methods_in_this_closure() { + check_no_kw( + r#" +//- minicore: fn +struct Foo { field: i32 } + +impl Foo { fn foo(&mut self) { let _: fn(&mut Self) = |this| { $0 } } }"#, + expect![[r#" + fd this.field i32 + me this.foo() fn(&mut self) + lc self &mut Foo + lc this &mut Foo + md core + sp Self Foo + st Foo Foo + tt Fn + tt FnMut + tt FnOnce + bt u32 u32 + "#]], + ); + } + + #[test] + fn completes_bare_fields_and_methods_in_other_closure() { + check_no_kw( + r#" +//- minicore: fn +struct Foo { field: i32 } + +impl Foo { fn foo(&self) { let _: fn(&Self) = |foo| { $0 } } }"#, + expect![[r#" + fd self.field i32 + me self.foo() fn(&self) + lc foo &Foo + lc self &Foo + md core + sp Self Foo + st Foo Foo + tt Fn + tt FnMut + tt FnOnce + bt u32 u32 + "#]], + ); + + check_no_kw( + r#" +//- minicore: fn +struct Foo { field: i32 } + +impl Foo { fn foo(&self) { let _: fn(&Self) = || { $0 } } }"#, + expect![[r#" + fd self.field i32 + me self.foo() fn(&self) + lc self &Foo + md core + sp Self Foo + st Foo Foo + tt Fn + tt FnMut + tt FnOnce + bt u32 u32 + "#]], + ); + + check_no_kw( + r#" +//- minicore: fn +struct Foo { field: i32 } + +impl Foo { fn foo(&self) { let _: fn(&Self, &Self) = |foo, other| { $0 } } }"#, + expect![[r#" + fd self.field i32 + me self.foo() fn(&self) + lc foo &Foo + lc other &Foo + lc self &Foo + md core + sp Self Foo + st Foo Foo + tt Fn + tt FnMut + tt FnOnce + bt u32 u32 + "#]], + ); + } + #[test] fn macro_completion_after_dot() { check_no_kw( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 4dd84daf06494..7f67ef848ecec 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -8,16 +8,18 @@ use ide_db::{ RootDatabase, SnippetCap, documentation::{Documentation, HasDocs}, imports::insert_use::ImportScope, + syntax_helpers::suggest_name::NameGenerator, text_edit::TextEdit, ty_filter::TryEnum, }; use itertools::Itertools; use stdx::never; use syntax::{ + SmolStr, SyntaxKind::{EXPR_STMT, STMT_LIST}, T, TextRange, TextSize, ToSmolStr, ast::{self, AstNode, AstToken}, - match_ast, + format_smolstr, match_ast, }; use crate::{ @@ -117,15 +119,20 @@ pub(crate) fn complete_postfix( if let Some(parent_expr) = ast::Expr::cast(parent) { is_in_cond = is_in_condition(&parent_expr); } + let placeholder = suggest_receiver_name(dot_receiver, "0", &ctx.sema); match &try_enum { Some(try_enum) if is_in_cond => match try_enum { TryEnum::Result => { - postfix_snippet("let", "let Ok(_)", &format!("let Ok($0) = {receiver_text}")) - .add_to(acc, ctx.db); + postfix_snippet( + "let", + "let Ok(_)", + &format!("let Ok({placeholder}) = {receiver_text}"), + ) + .add_to(acc, ctx.db); postfix_snippet( "letm", "let Ok(mut _)", - &format!("let Ok(mut $0) = {receiver_text}"), + &format!("let Ok(mut {placeholder}) = {receiver_text}"), ) .add_to(acc, ctx.db); } @@ -133,13 +140,13 @@ pub(crate) fn complete_postfix( postfix_snippet( "let", "let Some(_)", - &format!("let Some($0) = {receiver_text}"), + &format!("let Some({placeholder}) = {receiver_text}"), ) .add_to(acc, ctx.db); postfix_snippet( "letm", "let Some(mut _)", - &format!("let Some(mut $0) = {receiver_text}"), + &format!("let Some(mut {placeholder}) = {receiver_text}"), ) .add_to(acc, ctx.db); } @@ -186,26 +193,29 @@ pub(crate) fn complete_postfix( } } if let Some(try_enum) = &try_enum { + let placeholder = suggest_receiver_name(dot_receiver, "1", &ctx.sema); match try_enum { TryEnum::Result => { postfix_snippet( "ifl", "if let Ok {}", - &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"), + &format!("if let Ok({placeholder}) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc, ctx.db); postfix_snippet( "lete", "let Ok else {}", - &format!("let Ok($1) = {receiver_text} else {{\n $2\n}};\n$0"), + &format!( + "let Ok({placeholder}) = {receiver_text} else {{\n $2\n}};\n$0" + ), ) .add_to(acc, ctx.db); postfix_snippet( "while", "while let Ok {}", - &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"), + &format!("while let Ok({placeholder}) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc, ctx.db); } @@ -213,21 +223,23 @@ pub(crate) fn complete_postfix( postfix_snippet( "ifl", "if let Some {}", - &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"), + &format!("if let Some({placeholder}) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc, ctx.db); postfix_snippet( "lete", "let Some else {}", - &format!("let Some($1) = {receiver_text} else {{\n $2\n}};\n$0"), + &format!( + "let Some({placeholder}) = {receiver_text} else {{\n $2\n}};\n$0" + ), ) .add_to(acc, ctx.db); postfix_snippet( "while", "while let Some {}", - &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"), + &format!("while let Some({placeholder}) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc, ctx.db); } @@ -302,6 +314,34 @@ pub(crate) fn complete_postfix( } } +fn suggest_receiver_name( + receiver: &ast::Expr, + n: &str, + sema: &Semantics<'_, RootDatabase>, +) -> SmolStr { + let placeholder = |name| format_smolstr!("${{{n}:{name}}}"); + + match receiver { + ast::Expr::PathExpr(path) => { + if let Some(name) = path.path().and_then(|it| it.as_single_name_ref()) { + return placeholder(name.text().as_str()); + } + } + ast::Expr::RefExpr(it) => { + if let Some(receiver) = it.expr() { + return suggest_receiver_name(&receiver, n, sema); + } + } + _ => {} + } + + let name = NameGenerator::new_with_names([].into_iter()).try_for_variable(receiver, sema); + match name { + Some(name) => placeholder(&name), + None => format_smolstr!("${n}"), + } +} + fn get_receiver_text( sema: &Semantics<'_, RootDatabase>, receiver: &ast::Expr, @@ -616,7 +656,7 @@ fn main() { r#" fn main() { let bar = Some(true); - if let Some($1) = bar { + if let Some(${1:bar}) = bar { $0 } } @@ -666,7 +706,7 @@ fn main() { r#" fn main() { let bar = Some(true); - if let Some($0) = bar + if let Some(${0:bar}) = bar } "#, ); @@ -682,7 +722,7 @@ fn main() { r#" fn main() { let bar = Some(true); - if true && let Some($0) = bar + if true && let Some(${0:bar}) = bar } "#, ); @@ -698,7 +738,7 @@ fn main() { r#" fn main() { let bar = Some(true); - if true && true && let Some($0) = bar + if true && true && let Some(${0:bar}) = bar } "#, ); @@ -718,7 +758,7 @@ fn main() { r#" fn main() { let bar = Some(true); - let Some($1) = bar else { + let Some(${1:bar}) = bar else { $2 }; $0 @@ -792,7 +832,7 @@ fn main() { r#" fn main() { let bar = &Some(true); - if let Some($1) = bar { + if let Some(${1:bar}) = bar { $0 } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index ffffc2635e03c..963e39670473d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -4,7 +4,7 @@ mod analysis; #[cfg(test)] mod tests; -use std::{iter, ops::ControlFlow}; +use std::iter; use base_db::RootQueryDb as _; use hir::{ @@ -21,7 +21,6 @@ use syntax::{ SyntaxKind::{self, *}, SyntaxToken, T, TextRange, TextSize, ast::{self, AttrKind, NameOrNameRef}, - match_ast, }; use crate::{ @@ -157,7 +156,7 @@ pub(crate) struct PathExprCtx<'db> { pub(crate) after_amp: bool, /// The surrounding RecordExpression we are completing a functional update pub(crate) is_func_update: Option, - pub(crate) self_param: Option, + pub(crate) self_param: Option>>, pub(crate) innermost_ret_ty: Option>, pub(crate) innermost_breakable_ty: Option>, pub(crate) impl_: Option, @@ -817,48 +816,20 @@ impl<'db> CompletionContext<'db> { .extend(exclude_traits.iter().map(|&t| (t.into(), AutoImportExclusionType::Always))); // FIXME: This should be part of `CompletionAnalysis` / `expand_and_analyze` - let complete_semicolon = if config.add_semicolon_to_unit { - let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| { - match_ast! { - match ancestor { - ast::BlockExpr(_) => ControlFlow::Break(false), - ast::ClosureExpr(_) => ControlFlow::Break(true), - _ => ControlFlow::Continue(()) - } - } - }); - - if inside_closure_ret == ControlFlow::Break(true) { - CompleteSemicolon::DoNotComplete - } else { - let next_non_trivia_token = - std::iter::successors(token.next_token(), |it| it.next_token()) - .find(|it| !it.kind().is_trivia()); - let in_match_arm = token.parent_ancestors().try_for_each(|ancestor| { - if ast::MatchArm::can_cast(ancestor.kind()) { - ControlFlow::Break(true) - } else if matches!( - ancestor.kind(), - SyntaxKind::EXPR_STMT | SyntaxKind::BLOCK_EXPR - ) { - ControlFlow::Break(false) - } else { - ControlFlow::Continue(()) - } - }); - // FIXME: This will assume expr macros are not inside match, we need to somehow go to the "parent" of the root node. - let in_match_arm = match in_match_arm { - ControlFlow::Continue(()) => false, - ControlFlow::Break(it) => it, - }; - let complete_token = if in_match_arm { T![,] } else { T![;] }; - if next_non_trivia_token.map(|it| it.kind()) == Some(complete_token) { - CompleteSemicolon::DoNotComplete - } else if in_match_arm { - CompleteSemicolon::CompleteComma - } else { - CompleteSemicolon::CompleteSemi - } + let complete_semicolon = if !config.add_semicolon_to_unit { + CompleteSemicolon::DoNotComplete + } else if let Some(term_node) = + sema.token_ancestors_with_macros(token.clone()).find(|node| { + matches!(node.kind(), BLOCK_EXPR | MATCH_ARM | CLOSURE_EXPR | ARG_LIST | PAREN_EXPR) + }) + { + let next_token = iter::successors(token.next_token(), |it| it.next_token()) + .map(|it| it.kind()) + .find(|kind| !kind.is_trivia()); + match term_node.kind() { + MATCH_ARM if next_token != Some(T![,]) => CompleteSemicolon::CompleteComma, + BLOCK_EXPR if next_token != Some(T![;]) => CompleteSemicolon::CompleteSemi, + _ => CompleteSemicolon::DoNotComplete, } } else { CompleteSemicolon::DoNotComplete diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index add637a16fdf7..49fb36ad04f89 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -2,7 +2,9 @@ use std::iter; use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant}; -use ide_db::{RootDatabase, active_parameter::ActiveParameter}; +use ide_db::{ + RootDatabase, active_parameter::ActiveParameter, syntax_helpers::node_ext::find_loops, +}; use itertools::Either; use stdx::always; use syntax::{ @@ -88,15 +90,9 @@ pub(super) fn expand_and_analyze<'db>( let original_offset = expansion.original_offset + relative_offset; let token = expansion.original_file.token_at_offset(original_offset).left_biased()?; - hir::attach_db(sema.db, || analyze(sema, expansion, original_token, &token)).map( - |(analysis, expected, qualifier_ctx)| AnalysisResult { - analysis, - expected, - qualifier_ctx, - token, - original_offset, - }, - ) + analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| { + AnalysisResult { analysis, expected, qualifier_ctx, token, original_offset } + }) } fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option { @@ -600,10 +596,26 @@ fn expected_type_and_name<'db>( Some(it) => it, None => return ty, }; - for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) { + let refs_level = top_syn + .ancestors() + .skip(1) + .map_while(Either::::cast) + .take_while(|it| match it { + Either::Left(_) => true, + Either::Right(prefix) => prefix.op_kind() == Some(ast::UnaryOp::Deref), + }) + .fold(0i32, |level, expr| match expr { + Either::Left(_) => level + 1, + Either::Right(_) => level - 1, + }); + for _ in 0..refs_level { cov_mark::hit!(expected_type_fn_param_ref); ty = ty.strip_reference(); } + for _ in refs_level..0 { + cov_mark::hit!(expected_type_fn_param_deref); + ty = ty.add_reference(hir::Mutability::Shared); + } ty } _ => ty, @@ -718,6 +730,23 @@ fn expected_type_and_name<'db>( }.map(TypeInfo::original); (ty, None) }, + ast::MatchArm(it) => { + let on_arrow = previous_non_trivia_token(token.clone()).is_some_and(|it| T![=>] == it.kind()); + let in_body = it.expr().is_some_and(|it| it.syntax().text_range().contains_range(token.text_range())); + let match_expr = it.parent_match(); + + let ty = if on_arrow || in_body { + // match foo { ..., pat => $0 } + cov_mark::hit!(expected_type_match_arm_body_without_leading_char); + cov_mark::hit!(expected_type_match_arm_body_with_leading_char); + sema.type_of_expr(&match_expr.into()) + } else { + // match foo { $0 } + cov_mark::hit!(expected_type_match_arm_without_leading_char); + match_expr.expr().and_then(|e| sema.type_of_expr(&e)) + }.map(TypeInfo::original); + (ty, None) + }, ast::IfExpr(it) => { let ty = if let Some(body) = it.then_branch() && token.text_range().end() > body.syntax().text_range().start() @@ -752,6 +781,12 @@ fn expected_type_and_name<'db>( }); (ty, None) }, + ast::BreakExpr(it) => { + let ty = it.break_token() + .and_then(|it| find_loops(sema, &it)?.next()) + .and_then(|expr| sema.type_of_expr(&expr)); + (ty.map(TypeInfo::original), None) + }, ast::ClosureExpr(it) => { let ty = sema.type_of_expr(&it.into()); ty.and_then(|ty| ty.original.as_callable(sema.db)) @@ -1286,10 +1321,26 @@ fn classify_name_ref<'db>( ) } }; - let find_fn_self_param = |it| match it { - ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))), - ast::Item::MacroCall(_) => None, - _ => Some(None), + let fn_self_param = + |fn_: ast::Fn| sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)); + let closure_this_param = |closure: ast::ClosureExpr| { + if closure.param_list()?.params().next()?.pat()?.syntax().text() != "this" { + return None; + } + sema.type_of_expr(&closure.into()) + .and_then(|it| it.original.as_callable(sema.db)) + .and_then(|it| it.params().into_iter().next()) + }; + let find_fn_self_param = |it: SyntaxNode| { + match_ast! { + match it { + ast::Fn(fn_) => Some(fn_self_param(fn_).map(Either::Left)), + ast::ClosureExpr(f) => closure_this_param(f).map(Either::Right).map(Some), + ast::MacroCall(_) => None, + ast::Item(_) => Some(None), + _ => None, + } + } }; match find_node_in_file_compensated(sema, original_file, &expr) { @@ -1302,7 +1353,6 @@ fn classify_name_ref<'db>( let self_param = sema .ancestors_with_macros(it.syntax().clone()) - .filter_map(ast::Item::cast) .find_map(find_fn_self_param) .flatten(); (innermost_ret_ty, self_param) @@ -2017,7 +2067,8 @@ fn prev_special_biased_token_at_trivia(mut token: SyntaxToken) -> SyntaxToken { | T![|] | T![return] | T![break] - | T![continue] = prev.kind() + | T![continue] + | T![lifetime_ident] = prev.kind() { token = prev } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs index 41f0db3c52823..e97d9720e3f34 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs @@ -146,6 +146,18 @@ fn bar(x: &u32) {} ); } +#[test] +fn expected_type_fn_param_deref() { + cov_mark::check!(expected_type_fn_param_deref); + check_expected_type_and_name( + r#" +fn foo() { bar(*$0); } +fn bar(x: &u32) {} +"#, + expect!["ty: &'_ &'_ u32, name: x"], + ); +} + #[test] fn expected_type_struct_field_without_leading_char() { cov_mark::check!(expected_type_struct_field_without_leading_char); @@ -244,6 +256,22 @@ fn foo() -> Foo { ); } +#[test] +fn expected_type_match_arm_block_body_without_leading_char() { + cov_mark::check!(expected_type_match_arm_body_without_leading_char); + cov_mark::check!(expected_type_match_arm_body_with_leading_char); + check_expected_type_and_name( + r#" +struct Foo; +enum E { X } +fn foo() -> Foo { + match E::X { Foo::X => { $0 } } +} +"#, + expect![[r#"ty: Foo, name: ?"#]], + ); +} + #[test] fn expected_type_match_body_arm_with_leading_char() { cov_mark::check!(expected_type_match_arm_body_with_leading_char); @@ -607,6 +635,125 @@ fn foo() { ); } +#[test] +fn expected_type_break_expr_in_loop() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let _x: State = loop { + { + break State::Stop; + break $0; + } + }; +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); + + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let _x: State = 'a: loop { + { + break State::Stop; + break $0; + } + }; +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); + + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let _x: State = 'a: loop { + while true { + break $0; + } + }; +} +"#, + expect![[r#"ty: (), name: ?"#]], + ); +} + +#[test] +fn expected_type_break_expr_in_labeled_loop() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let _x: State = 'a: loop { + let _y: i32 = loop { + { + break 'a State::Stop; + break 'a $0; + } + }; + }; +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); + + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let _x: State = 'a: loop { + let _y: i32 = loop { + while true { + break 'a State::Stop; + break 'a $0; + } + }; + }; +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); + + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + 'a: while true { + let _x: State = loop { + break State::Stop; + break 'a $0; + }; + } +} +"#, + expect![[r#"ty: (), name: ?"#]], + ); +} + +#[test] +fn expected_type_break_expr_in_labeled_block() { + check_expected_type_and_name( + r#" +enum State { Stop } +fn foo() { + let _x: State = 'a: { + let _y: i32 = 'b: { + { + break 'a State::Stop; + break 'a $0; + }; + }; + }; +} +"#, + expect![[r#"ty: State, name: ?"#]], + ); +} + #[test] fn expected_type_logic_op() { check_expected_type_and_name( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index bc5589a64550b..765304d8187de 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -604,6 +604,14 @@ fn compute_type_match( return None; } + // &mut ty -> &ty + if completion_ty.is_mutable_reference() + && let Some(expected_type) = expected_type.remove_ref() + && let Some(completion_ty) = completion_ty.remove_ref() + { + return match_types(ctx, &expected_type, &completion_ty); + } + match_types(ctx, expected_type, completion_ty) } @@ -622,6 +630,8 @@ fn compute_ref_match( return None; } if let Some(expected_without_ref) = &expected_without_ref + && (completion_without_ref.is_none() + || completion_ty.could_unify_with(ctx.db, expected_without_ref)) && completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) { cov_mark::hit!(suggest_ref); @@ -2049,6 +2059,17 @@ fn go(world: &WorldSnapshot) { go(w$0) } ); } + #[test] + fn prioritize_mutable_ref_as_immutable_ref_match() { + check_relevance( + r#"fn foo(r: &mut i32) -> &i32 { $0 }"#, + expect![[r#" + lc r &mut i32 [type+local] + fn foo(…) fn(&mut i32) -> &i32 [type] + "#]], + ); + } + #[test] fn too_many_arguments() { cov_mark::check!(too_many_arguments); @@ -2212,6 +2233,24 @@ fn main() { fn main() fn() [] "#]], ); + check_relevance( + r#" +struct S; +fn foo(s: &&S) {} +fn main() { + let mut ssss = &S; + foo($0); +} + "#, + expect![[r#" + st S S [] + lc ssss &S [local] + lc &ssss [type+local] + st S S [] + fn foo(…) fn(&&S) [] + fn main() fn() [] + "#]], + ); } #[test] @@ -3240,6 +3279,48 @@ impl S { ) } + #[test] + fn field_access_includes_closure_this_param() { + check_edit( + "length", + r#" +//- minicore: fn +struct S { + length: i32 +} + +impl S { + fn pack(&mut self, f: impl FnOnce(&mut Self, i32)) { + self.length += 1; + f(self, 3); + self.length -= 1; + } + + fn some_fn(&mut self) { + self.pack(|this, n| len$0); + } +} +"#, + r#" +struct S { + length: i32 +} + +impl S { + fn pack(&mut self, f: impl FnOnce(&mut Self, i32)) { + self.length += 1; + f(self, 3); + self.length -= 1; + } + + fn some_fn(&mut self) { + self.pack(|this, n| this.length); + } +} +"#, + ) + } + #[test] fn notable_traits_method_relevance() { check_kinds( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 3235323b3a590..4713b1f1afa77 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -884,6 +884,27 @@ fn baz(_: impl FnOnce()) {} fn bar() { baz(|| foo()$0); } +"#, + ); + } + + #[test] + fn no_semicolon_in_arg_list() { + check_edit( + r#"foo"#, + r#" +fn foo() {} +fn baz(_: impl FnOnce()) {} +fn bar() { + baz(fo$0); +} +"#, + r#" +fn foo() {} +fn baz(_: impl FnOnce()) {} +fn bar() { + baz(foo()$0); +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs index 35fe407b2e68f..6efa8a84553e5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs @@ -46,12 +46,17 @@ fn render( ctx.source_range() }; - let (name, escaped_name) = - (name.as_str(), name.display(ctx.db(), completion.edition).to_smolstr()); + let orig_name = macro_.name(ctx.db()); + let (name, orig_name, escaped_name) = ( + name.as_str(), + orig_name.as_str(), + name.display(ctx.db(), completion.edition).to_smolstr(), + ); let docs = ctx.docs(macro_); let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default(); let is_fn_like = macro_.is_fn_like(completion.db); - let (bra, ket) = if is_fn_like { guess_macro_braces(name, docs_str) } else { ("", "") }; + let (bra, ket) = + if is_fn_like { guess_macro_braces(name, orig_name, docs_str) } else { ("", "") }; let needs_bang = is_fn_like && !is_use_path && !has_macro_bang; @@ -109,9 +114,13 @@ fn banged_name(name: &str) -> SmolStr { SmolStr::from_iter([name, "!"]) } -fn guess_macro_braces(macro_name: &str, docs: &str) -> (&'static str, &'static str) { +fn guess_macro_braces( + macro_name: &str, + orig_name: &str, + docs: &str, +) -> (&'static str, &'static str) { let mut votes = [0, 0, 0]; - for (idx, s) in docs.match_indices(¯o_name) { + for (idx, s) in docs.match_indices(macro_name).chain(docs.match_indices(orig_name)) { let (before, after) = (&docs[..idx], &docs[idx + s.len()..]); // Ensure to match the full word if after.starts_with('!') @@ -240,7 +249,25 @@ fn main() { $0 } macro_rules! foo { () => {} } fn main() { foo! {$0} } "#, - ) + ); + + check_edit( + "bar!", + r#" +/// `foo![]` +#[macro_export] +macro_rules! foo { () => {} } +pub use crate::foo as bar; +fn main() { $0 } +"#, + r#" +/// `foo![]` +#[macro_export] +macro_rules! foo { () => {} } +pub use crate::foo as bar; +fn main() { bar![$0] } +"#, + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index cf86618de69e8..c9755525a5ded 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -16,11 +16,11 @@ fn check_with_config( expect: Expect, ) { let (db, position) = crate::tests::position(ra_fixture); - let (ctx, analysis) = - crate::context::CompletionContext::new(&db, position, &config, None).unwrap(); + hir::attach_db(&db, || { + let (ctx, analysis) = + crate::context::CompletionContext::new(&db, position, &config, None).unwrap(); - let mut acc = crate::completions::Completions::default(); - hir::attach_db(ctx.db, || { + let mut acc = crate::completions::Completions::default(); if let CompletionAnalysis::Name(NameContext { kind: NameKind::IdentPat(pat_ctx), .. }) = &analysis { @@ -42,9 +42,9 @@ fn check_with_config( _ => (), } } - }); - expect.assert_eq(&super::render_completion_list(Vec::from(acc))); + expect.assert_eq(&super::render_completion_list(Vec::from(acc))); + }); } #[test] diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index a48438cfa86fd..1d865892a22b5 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -1345,7 +1345,7 @@ impl ReferenceCategory { // If the variable or field ends on the LHS's end then it's a Write // (covers fields and locals). FIXME: This is not terribly accurate. if let Some(lhs) = expr.lhs() - && lhs.syntax().text_range().end() == r.syntax().text_range().end() { + && lhs.syntax().text_range().contains_range(r.syntax().text_range()) { return Some(ReferenceCategory::WRITE) } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index e1d140730edce..acce066b8323f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -419,6 +419,48 @@ pub fn eq_label_lt(lt1: &Option, lt2: &Option) -> lt1.as_ref().zip(lt2.as_ref()).is_some_and(|(lt, lbl)| lt.text() == lbl.text()) } +/// Find the loop or block to break or continue, multiple results may be caused by macros. +pub fn find_loops( + sema: &hir::Semantics<'_, crate::RootDatabase>, + token: &syntax::SyntaxToken, +) -> Option> { + let parent = token.parent()?; + let lbl = syntax::match_ast! { + match parent { + ast::BreakExpr(break_) => break_.lifetime(), + ast::ContinueExpr(continue_) => continue_.lifetime(), + _ => None, + } + }; + let label_matches = + move |it: Option| match (lbl.as_ref(), it.and_then(|it| it.lifetime())) { + (Some(lbl), Some(it)) => lbl.text() == it.text(), + (None, _) => true, + (Some(_), None) => false, + }; + + let find_ancestors = move |token| { + for anc in sema.token_ancestors_with_macros(token).filter_map(ast::Expr::cast) { + let node = match &anc { + ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => anc, + ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => anc, + ast::Expr::ForExpr(for_) if label_matches(for_.label()) => anc, + ast::Expr::BlockExpr(blk) + if blk.label().is_some() && label_matches(blk.label()) => + { + anc + } + _ => continue, + }; + + return Some(node); + } + None + }; + + sema.descend_into_macros(token.clone()).into_iter().filter_map(find_ancestors).into() +} + struct TreeWithDepthIterator { preorder: Preorder, depth: u32, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs index 1a0ef55a8b259..273328a8d2700 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs @@ -197,10 +197,19 @@ impl NameGenerator { expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>, ) -> SmolStr { + self.try_for_variable(expr, sema).unwrap_or(SmolStr::new_static("var_name")) + } + + /// Similar to `for_variable`, but fallback returns `None` + pub fn try_for_variable( + &mut self, + expr: &ast::Expr, + sema: &Semantics<'_, RootDatabase>, + ) -> Option { // `from_param` does not benefit from stripping it need the largest // context possible so we check firstmost if let Some(name) = from_param(expr, sema) { - return self.suggest_name(&name); + return Some(self.suggest_name(&name)); } let mut next_expr = Some(expr.clone()); @@ -209,7 +218,7 @@ impl NameGenerator { .or_else(|| from_type(&expr, sema)) .or_else(|| from_field_name(&expr)); if let Some(name) = name { - return self.suggest_name(&name); + return Some(self.suggest_name(&name)); } match expr { @@ -229,7 +238,7 @@ impl NameGenerator { } } - self.suggest_name("var_name") + None } /// Insert a name into the pool diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs index b7ec8fa53fa72..52a2f44fd0f8c 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs @@ -387,6 +387,46 @@ struct S { field : u32 } fn f(S { field }: error) { // ^^^^^ 💡 warn: unused variable } +"#, + ); + } + + #[test] + fn crate_attrs_lint_smoke_test() { + check_diagnostics( + r#" +//- /lib.rs crate:foo crate-attr:deny(unused_variables) +fn main() { + let x = 2; + //^ 💡 error: unused variable +} +"#, + ); + } + + #[test] + fn crate_attrs_should_not_override_lints_in_source() { + check_diagnostics( + r#" +//- /lib.rs crate:foo crate-attr:allow(unused_variables) +#![deny(unused_variables)] +fn main() { + let x = 2; + //^ 💡 error: unused variable +} +"#, + ); + } + + #[test] + fn crate_attrs_should_preserve_lint_order() { + check_diagnostics( + r#" +//- /lib.rs crate:foo crate-attr:allow(unused_variables) crate-attr:warn(unused_variables) +fn main() { + let x = 2; + //^ 💡 warn: unused variable +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 2b8474c3163b7..0b321442497bd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -485,7 +485,7 @@ pub fn semantic_diagnostics( // The edition isn't accurate (each diagnostics may have its own edition due to macros), // but it's okay as it's only being used for error recovery. - handle_lints(&ctx.sema, krate, &mut lints, editioned_file_id.edition(db)); + handle_lints(&ctx.sema, file_id, krate, &mut lints, editioned_file_id.edition(db)); res.retain(|d| d.severity != Severity::Allow); @@ -593,6 +593,7 @@ fn build_lints_map( fn handle_lints( sema: &Semantics<'_, RootDatabase>, + file_id: FileId, krate: hir::Crate, diagnostics: &mut [(InFile, &mut Diagnostic)], edition: Edition, @@ -609,10 +610,10 @@ fn handle_lints( } let mut diag_severity = - lint_severity_at(sema, krate, node, &lint_groups(&diag.code, edition)); + lint_severity_at(sema, file_id, krate, node, &lint_groups(&diag.code, edition)); if let outline_diag_severity @ Some(_) = - find_outline_mod_lint_severity(sema, krate, node, diag, edition) + find_outline_mod_lint_severity(sema, file_id, krate, node, diag, edition) { diag_severity = outline_diag_severity; } @@ -635,6 +636,7 @@ fn default_lint_severity(lint: &Lint, edition: Edition) -> Severity { fn find_outline_mod_lint_severity( sema: &Semantics<'_, RootDatabase>, + file_id: FileId, krate: hir::Crate, node: &InFile, diag: &Diagnostic, @@ -651,6 +653,7 @@ fn find_outline_mod_lint_severity( let lint_groups = lint_groups(&diag.code, edition); lint_attrs( sema, + file_id, krate, ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"), ) @@ -659,6 +662,7 @@ fn find_outline_mod_lint_severity( fn lint_severity_at( sema: &Semantics<'_, RootDatabase>, + file_id: FileId, krate: hir::Crate, node: &InFile, lint_groups: &LintGroups, @@ -667,21 +671,28 @@ fn lint_severity_at( .ancestors() .filter_map(ast::AnyHasAttrs::cast) .find_map(|ancestor| { - lint_attrs(sema, krate, ancestor) + lint_attrs(sema, file_id, krate, ancestor) .find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity)) }) .or_else(|| { - lint_severity_at(sema, krate, &sema.find_parent_file(node.file_id)?, lint_groups) + lint_severity_at( + sema, + file_id, + krate, + &sema.find_parent_file(node.file_id)?, + lint_groups, + ) }) } // FIXME: Switch this to analysis' `expand_cfg_attr`. fn lint_attrs( sema: &Semantics<'_, RootDatabase>, + file_id: FileId, krate: hir::Crate, ancestor: ast::AnyHasAttrs, ) -> impl Iterator { - sema.lint_attrs(krate, ancestor).rev().map(|(lint_attr, lint)| { + sema.lint_attrs(file_id, krate, ancestor).rev().map(|(lint_attr, lint)| { let severity = match lint_attr { hir::LintAttr::Allow | hir::LintAttr::Expect => Severity::Allow, hir::LintAttr::Warn => Severity::Warning, diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index 0ed91cf7f5885..d854c1c450449 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -8,7 +8,7 @@ mod intra_doc_links; use std::ops::Range; use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag}; -use pulldown_cmark_to_cmark::{Options as CMarkOptions, cmark_resume_with_options}; +use pulldown_cmark_to_cmark::{Options as CMarkOptions, cmark_with_options}; use stdx::format_to; use url::Url; @@ -89,10 +89,9 @@ pub(crate) fn rewrite_links( } }); let mut out = String::new(); - cmark_resume_with_options( + cmark_with_options( doc, &mut out, - None, CMarkOptions { code_block_token_count: 3, ..Default::default() }, ) .ok(); @@ -125,10 +124,9 @@ pub(crate) fn remove_links(markdown: &str) -> String { }); let mut out = String::new(); - cmark_resume_with_options( + cmark_with_options( doc, &mut out, - None, CMarkOptions { code_block_token_count: 3, ..Default::default() }, ) .ok(); diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs index 34ffc11c4b5f3..a61a6c677f655 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs @@ -658,6 +658,21 @@ pub struct B$0ar ); } +#[test] +fn rewrite_html_root_url_using_crate_attr() { + check_rewrite( + r#" +//- /main.rs crate:foo crate-attr:doc(arbitrary_attribute="test",html_root_url="https:/example.com",arbitrary_attribute2) +pub mod foo { + pub struct Foo; +} +/// [Foo](foo::Foo) +pub struct B$0ar +"#, + expect![[r#"[Foo](https://example.com/foo/foo/struct.Foo.html)"#]], + ); +} + #[test] fn rewrite_on_field() { check_rewrite( @@ -724,7 +739,10 @@ pub struct $0Foo; /// [`foo`]: Foo pub struct $0Foo; "#, - expect![["[`foo`]"]], + expect![[r#" + [`foo`] + + [`foo`]: https://docs.rs/foo/*/foo/struct.Foo.html"#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index b00aa4d0ca67f..c0a74380810b7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -16,15 +16,12 @@ use ide_db::{ defs::{Definition, IdentClass}, famous_defs::FamousDefs, helpers::pick_best_token, + syntax_helpers::node_ext::find_loops, }; use itertools::Itertools; use span::FileId; use syntax::{ - AstNode, AstToken, - SyntaxKind::*, - SyntaxNode, SyntaxToken, T, TextRange, - ast::{self, HasLoopBody}, - match_ast, + AstNode, AstToken, SyntaxKind::*, SyntaxNode, SyntaxToken, T, TextRange, ast, match_ast, }; #[derive(Debug)] @@ -510,51 +507,6 @@ fn nav_for_branch_exit_points( Some(navs) } -pub(crate) fn find_loops( - sema: &Semantics<'_, RootDatabase>, - token: &SyntaxToken, -) -> Option> { - let parent = token.parent()?; - let lbl = match_ast! { - match parent { - ast::BreakExpr(break_) => break_.lifetime(), - ast::ContinueExpr(continue_) => continue_.lifetime(), - _ => None, - } - }; - let label_matches = - |it: Option| match (lbl.as_ref(), it.and_then(|it| it.lifetime())) { - (Some(lbl), Some(it)) => lbl.text() == it.text(), - (None, _) => true, - (Some(_), None) => false, - }; - - let find_ancestors = |token: SyntaxToken| { - for anc in sema.token_ancestors_with_macros(token).filter_map(ast::Expr::cast) { - let node = match &anc { - ast::Expr::LoopExpr(loop_) if label_matches(loop_.label()) => anc, - ast::Expr::WhileExpr(while_) if label_matches(while_.label()) => anc, - ast::Expr::ForExpr(for_) if label_matches(for_.label()) => anc, - ast::Expr::BlockExpr(blk) - if blk.label().is_some() && label_matches(blk.label()) => - { - anc - } - _ => continue, - }; - - return Some(node); - } - None - }; - - sema.descend_into_macros(token.clone()) - .into_iter() - .filter_map(find_ancestors) - .collect_vec() - .into() -} - fn nav_for_break_points( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, @@ -562,7 +514,6 @@ fn nav_for_break_points( let db = sema.db; let navs = find_loops(sema, token)? - .into_iter() .filter_map(|expr| { let file_id = sema.hir_file_for(expr.syntax()); let expr_in_file = InFile::new(file_id, expr.clone()); diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index acba573cc00e7..fce033382b4bf 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -7,8 +7,8 @@ use ide_db::{ helpers::pick_best_token, search::{FileReference, ReferenceCategory, SearchScope}, syntax_helpers::node_ext::{ - eq_label_lt, for_each_tail_expr, full_path_of_name_ref, is_closure_or_blk_with_modif, - preorder_expr_with_ctx_checker, + eq_label_lt, find_loops, for_each_tail_expr, full_path_of_name_ref, + is_closure_or_blk_with_modif, preorder_expr_with_ctx_checker, }, }; use syntax::{ @@ -562,7 +562,7 @@ pub(crate) fn highlight_break_points( Some(highlights) } - let Some(loops) = goto_definition::find_loops(sema, &token) else { + let Some(loops) = find_loops(sema, &token) else { return FxHashMap::default(); }; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 03674978d53d7..f57f2883b1c38 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -108,16 +108,14 @@ pub(crate) fn inlay_hints( } }; let mut preorder = file.preorder(); - hir::attach_db(sema.db, || { - while let Some(event) = preorder.next() { - if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none()) - { - preorder.skip_subtree(); - continue; - } - hints(event); + while let Some(event) = preorder.next() { + if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none()) + { + preorder.skip_subtree(); + continue; } - }); + hints(event); + } if let Some(range_limit) = range_limit { acc.retain(|hint| range_limit.contains_range(hint.range)); } @@ -307,6 +305,7 @@ pub struct InlayHintsConfig<'a> { pub sized_bound: bool, pub discriminant_hints: DiscriminantHints, pub parameter_hints: bool, + pub parameter_hints_for_missing_arguments: bool, pub generic_parameter_hints: GenericParameterHints, pub chaining_hints: bool, pub adjustment_hints: AdjustmentHints, @@ -748,46 +747,44 @@ fn label_of_ty( config: &InlayHintsConfig<'_>, display_target: DisplayTarget, ) -> Result<(), HirDisplayError> { - hir::attach_db(sema.db, || { - let iter_item_type = hint_iterator(sema, famous_defs, ty); - match iter_item_type { - Some((iter_trait, item, ty)) => { - const LABEL_START: &str = "impl "; - const LABEL_ITERATOR: &str = "Iterator"; - const LABEL_MIDDLE: &str = "<"; - const LABEL_ITEM: &str = "Item"; - const LABEL_MIDDLE2: &str = " = "; - const LABEL_END: &str = ">"; - - max_length = max_length.map(|len| { - len.saturating_sub( - LABEL_START.len() - + LABEL_ITERATOR.len() - + LABEL_MIDDLE.len() - + LABEL_MIDDLE2.len() - + LABEL_END.len(), - ) - }); - - label_builder.write_str(LABEL_START)?; - label_builder.start_location_link(ModuleDef::from(iter_trait).into()); - label_builder.write_str(LABEL_ITERATOR)?; - label_builder.end_location_link(); - label_builder.write_str(LABEL_MIDDLE)?; - label_builder.start_location_link(ModuleDef::from(item).into()); - label_builder.write_str(LABEL_ITEM)?; - label_builder.end_location_link(); - label_builder.write_str(LABEL_MIDDLE2)?; - rec(sema, famous_defs, max_length, &ty, label_builder, config, display_target)?; - label_builder.write_str(LABEL_END)?; - Ok(()) - } - None => ty - .display_truncated(sema.db, max_length, display_target) - .with_closure_style(config.closure_style) - .write_to(label_builder), + let iter_item_type = hint_iterator(sema, famous_defs, ty); + match iter_item_type { + Some((iter_trait, item, ty)) => { + const LABEL_START: &str = "impl "; + const LABEL_ITERATOR: &str = "Iterator"; + const LABEL_MIDDLE: &str = "<"; + const LABEL_ITEM: &str = "Item"; + const LABEL_MIDDLE2: &str = " = "; + const LABEL_END: &str = ">"; + + max_length = max_length.map(|len| { + len.saturating_sub( + LABEL_START.len() + + LABEL_ITERATOR.len() + + LABEL_MIDDLE.len() + + LABEL_MIDDLE2.len() + + LABEL_END.len(), + ) + }); + + label_builder.write_str(LABEL_START)?; + label_builder.start_location_link(ModuleDef::from(iter_trait).into()); + label_builder.write_str(LABEL_ITERATOR)?; + label_builder.end_location_link(); + label_builder.write_str(LABEL_MIDDLE)?; + label_builder.start_location_link(ModuleDef::from(item).into()); + label_builder.write_str(LABEL_ITEM)?; + label_builder.end_location_link(); + label_builder.write_str(LABEL_MIDDLE2)?; + rec(sema, famous_defs, max_length, &ty, label_builder, config, display_target)?; + label_builder.write_str(LABEL_END)?; + Ok(()) } - }) + None => ty + .display_truncated(sema.db, max_length, display_target) + .with_closure_style(config.closure_style) + .write_to(label_builder), + } } let mut label_builder = InlayHintLabelBuilder { @@ -886,6 +883,7 @@ mod tests { render_colons: false, type_hints: false, parameter_hints: false, + parameter_hints_for_missing_arguments: false, sized_bound: false, generic_parameter_hints: GenericParameterHints { type_hints: false, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index b2584b6f75d20..283ec29dc05b0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -216,15 +216,13 @@ pub(super) fn hints( text: if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() }, linked_location: None, tooltip: Some(config.lazy_tooltip(|| { - hir::attach_db(sema.db, || { - InlayTooltip::Markdown(format!( - "`{}` → `{}`\n\n**{}**\n\n{}", - source.display(sema.db, display_target), - target.display(sema.db, display_target), - coercion, - detailed_tooltip - )) - }) + InlayTooltip::Markdown(format!( + "`{}` → `{}`\n\n**{}**\n\n{}", + source.display(sema.db, display_target), + target.display(sema.db, display_target), + coercion, + detailed_tooltip + )) })), }; if postfix { &mut post } else { &mut pre }.label.append_part(label); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index 951a672d4b793..1317684a08779 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -46,7 +46,7 @@ pub(super) fn hints( if !place.projection.is_empty() { continue; // Ignore complex cases for now } - if mir.locals[place.local].ty.as_adt().is_none() { + if mir.locals[place.local].ty.as_ref().as_adt().is_none() { continue; // Arguably only ADTs have significant drop impls } let Some(&binding_idx) = local_to_binding.get(place.local) else { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 8d03487673167..f1e62a5ab8ac3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -11,6 +11,7 @@ use hir::{EditionedFileId, Semantics}; use ide_db::{RootDatabase, famous_defs::FamousDefs}; use stdx::to_lower_snake_case; +use syntax::T; use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp}; use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; @@ -88,9 +89,75 @@ pub(super) fn hints( }); acc.extend(hints); + + // Show hint for the next expected (missing) argument if enabled + if config.parameter_hints_for_missing_arguments { + let provided_args_count = arg_list.args().count(); + let params = callable.params(); + let total_params = params.len(); + + if provided_args_count < total_params + && let Some(next_param) = params.get(provided_args_count) + && let Some(param_name) = next_param.name(sema.db) + { + // Apply heuristics to hide obvious parameter hints + if should_hide_missing_param_hint(unary_function, function_name, param_name.as_str()) { + return Some(()); + } + + // Determine the position for the hint + if let Some(hint_range) = missing_arg_hint_position(&arg_list) { + let colon = if config.render_colons { ":" } else { "" }; + let label = InlayHintLabel::simple( + format!("{}{}", param_name.display(sema.db, krate.edition(sema.db)), colon), + None, + config.lazy_location_opt(|| { + let source = sema.source(next_param.clone())?; + let name_syntax = match source.value.as_ref() { + Either::Left(pat) => pat.name(), + Either::Right(param) => match param.pat()? { + ast::Pat::IdentPat(it) => it.name(), + _ => None, + }, + }?; + sema.original_range_opt(name_syntax.syntax()).map(|frange| { + ide_db::FileRange { + file_id: frange.file_id.file_id(sema.db), + range: frange.range, + } + }) + }), + ); + acc.push(InlayHint { + range: hint_range, + kind: InlayKind::Parameter, + label, + text_edit: None, + position: InlayHintPosition::Before, + pad_left: true, + pad_right: false, + resolve_parent: Some(expr.syntax().text_range()), + }); + } + } + } + Some(()) } +/// Determines the position where the hint for a missing argument should be placed. +/// Returns the range of the token where the hint should appear. +fn missing_arg_hint_position(arg_list: &ast::ArgList) -> Option { + // Always place the hint on the closing paren, so it appears before `)`. + // This way `foo()` becomes `foo(a)` visually with the hint. + arg_list + .syntax() + .children_with_tokens() + .filter_map(|it| it.into_token()) + .find(|t| t.kind() == T![')']) + .map(|t| t.text_range()) +} + fn get_callable<'db>( sema: &Semantics<'db, RootDatabase>, expr: &ast::Expr, @@ -153,6 +220,37 @@ fn should_hide_param_name_hint( is_argument_expr_similar_to_param_name(sema, argument, param_name) } +/// Determines whether to hide the parameter hint for a missing argument. +/// This is a simplified version of `should_hide_param_name_hint` that doesn't +/// require an actual argument expression. +fn should_hide_missing_param_hint( + unary_function: bool, + function_name: Option<&str>, + param_name: &str, +) -> bool { + let param_name = param_name.trim_matches('_'); + if param_name.is_empty() { + return true; + } + + if param_name.starts_with("ra_fixture") { + return true; + } + + if unary_function { + if let Some(function_name) = function_name + && is_param_name_suffix_of_fn_name(param_name, function_name) + { + return true; + } + if is_obvious_param(param_name) { + return true; + } + } + + false +} + /// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal. /// /// `fn strip_suffix(suffix)` will be hidden. @@ -606,6 +704,103 @@ fn main() { // ^^^^^^ a_d_e baz(a.d.ec); // ^^^^^^ a_d_e +}"#, + ); + } + + #[track_caller] + fn check_missing_params(#[rust_analyzer::rust_fixture] ra_fixture: &str) { + check_with_config( + InlayHintsConfig { + parameter_hints: true, + parameter_hints_for_missing_arguments: true, + ..DISABLED_CONFIG + }, + ra_fixture, + ); + } + + #[test] + fn missing_param_hint_empty_call() { + // When calling foo() with no args, show hint for first param on the closing paren + check_missing_params( + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + foo(); + //^ a +}"#, + ); + } + + #[test] + fn missing_param_hint_after_first_arg() { + // foo(1,) - show hint for 'a' on '1', and 'b' on the trailing comma + check_missing_params( + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + foo(1,); + //^ a + //^ b +}"#, + ); + } + + #[test] + fn missing_param_hint_partial_args() { + // foo(1, 2,) - show hints for a, b on args, and c on trailing comma + check_missing_params( + r#" +fn foo(a: i32, b: i32, c: i32) -> i32 { a + b + c } +fn main() { + foo(1, 2,); + //^ a + //^ b + //^ c +}"#, + ); + } + + #[test] + fn missing_param_hint_method_call() { + // S.foo(1,) - show hint for 'a' on '1', and 'b' on trailing comma + check_missing_params( + r#" +struct S; +impl S { + fn foo(&self, a: i32, b: i32) -> i32 { a + b } +} +fn main() { + S.foo(1,); + //^ a + //^ b +}"#, + ); + } + + #[test] + fn missing_param_hint_no_hint_when_complete() { + // When all args provided, no missing hint - just regular param hints + check_missing_params( + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + foo(1, 2); + //^ a + //^ b +}"#, + ); + } + + #[test] + fn missing_param_hint_respects_heuristics() { + // The hint should be hidden if it matches heuristics (e.g., single param unary fn with same name) + check_missing_params( + r#" +fn foo(foo: i32) -> i32 { foo } +fn main() { + foo(); }"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret.rs b/src/tools/rust-analyzer/crates/ide/src/interpret.rs index 791da00bb695b..3741822547e45 100644 --- a/src/tools/rust-analyzer/crates/ide/src/interpret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/interpret.rs @@ -60,7 +60,7 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura pub(crate) fn render_const_eval_error( db: &RootDatabase, - e: ConstEvalError<'_>, + e: ConstEvalError, display_target: DisplayTarget, ) -> String { let span_formatter = |file_id, text_range: TextRange| { diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 94362649043d7..5e4d930393af1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -73,7 +73,7 @@ use ide_db::{ }; use ide_db::{MiniCore, ra_fixture::RaFixtureAnalysis}; use macros::UpmapFromRaFixture; -use syntax::{SourceFile, ast}; +use syntax::{AstNode, SourceFile, ast}; use triomphe::Arc; use view_memory_layout::{RecursiveMemoryLayout, view_memory_layout}; @@ -254,6 +254,7 @@ impl Analysis { TryFrom::try_from(&*std::env::current_dir().unwrap().as_path().to_string_lossy()) .unwrap(), ); + let crate_attrs = Vec::new(); cfg_options.insert_atom(sym::test); crate_graph.add_crate_root( file_id, @@ -264,6 +265,7 @@ impl Analysis { None, Env::default(), CrateOrigin::Local { repo: None, name: None }, + crate_attrs, false, proc_macro_cwd, Arc::new(CrateWorkspaceData { @@ -903,6 +905,18 @@ impl Analysis { self.with_db(|db| view_memory_layout(db, position)) } + pub fn get_failed_obligations(&self, offset: TextSize, file_id: FileId) -> Cancellable { + self.with_db(|db| { + let sema = Semantics::new(db); + let source_file = sema.parse_guess_edition(file_id); + + let Some(token) = source_file.syntax().token_at_offset(offset).next() else { + return String::new(); + }; + sema.get_failed_obligations(token).unwrap_or_default() + }) + } + pub fn editioned_file_id_to_vfs(&self, file_id: hir::EditionedFileId) -> FileId { file_id.file_id(&self.db) } diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index 29530ed02bb6d..020f235d3a929 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -431,9 +431,8 @@ where ) .map(|mut res| { res.docs = self.docs(db).map(Documentation::into_owned); - res.description = hir::attach_db(db, || { - Some(self.display(db, self.krate(db).to_display_target(db)).to_string()) - }); + res.description = + Some(self.display(db, self.krate(db).to_display_target(db)).to_string()); res.container_name = self.container_name(db); res }), diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index a61be93ea9684..c562a9b30b040 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -412,13 +412,11 @@ pub(crate) fn runnable_impl( let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); let mut ty_args = ty.generic_parameters(sema.db, display_target).peekable(); - let params = hir::attach_db(sema.db, || { - if ty_args.peek().is_some() { - format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))) - } else { - String::new() - } - }); + let params = if ty_args.peek().is_some() { + format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))) + } else { + String::new() + }; let mut test_id = format!("{}{params}", adt_name.display(sema.db, edition)); test_id.retain(|c| c != ' '); let test_id = TestId::Path(test_id); @@ -528,9 +526,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op let mut ty_args = ty.generic_parameters(db, display_target).peekable(); format_to!(path, "{}", name.display(db, edition)); if ty_args.peek().is_some() { - hir::attach_db(db, || { - format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); - }); + format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); } format_to!(path, "::{}", def_name.display(db, edition)); path.retain(|c| c != ' '); diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index 78dc3f7e862c8..9ab07565e9efc 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -268,12 +268,12 @@ fn signature_help_for_call( // In that case, fall back to render definitions of the respective parameters. // This is overly conservative: we do not substitute known type vars // (see FIXME in tests::impl_trait) and falling back on any unknowns. - hir::attach_db(db, || match (p.ty().contains_unknown(), fn_params.as_deref()) { + match (p.ty().contains_unknown(), fn_params.as_deref()) { (true, Some(fn_params)) => { format_to!(buf, "{}", fn_params[idx].ty().display(db, display_target)) } _ => format_to!(buf, "{}", p.ty().display(db, display_target)), - }); + } res.push_call_param(&buf); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 7749f8e2f2ea9..aba6b64f977a5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -169,6 +169,7 @@ impl StaticIndex<'_> { type_hints: true, sized_bound: false, parameter_hints: true, + parameter_hints_for_missing_arguments: false, generic_parameter_hints: crate::GenericParameterHints { type_hints: false, lifetime_hints: false, @@ -325,12 +326,12 @@ impl StaticIndex<'_> { }; let mut visited_files = FxHashSet::default(); for module in work { - let file_id = module.definition_source_file_id(db).original_file(db); + let file_id = + module.definition_source_file_id(db).original_file(db).file_id(&analysis.db); if visited_files.contains(&file_id) { continue; } - this.add_file(file_id.file_id(&analysis.db)); - // mark the file + this.add_file(file_id); visited_files.insert(file_id); } this diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index cfcd76d2aa3bd..7f377e416b322 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -40,6 +40,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { edition, dependencies, origin, + crate_attrs, is_proc_macro, proc_macro_cwd, } = crate_id.data(db); @@ -62,6 +63,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { format_to!(buf, " Potential cfgs: {:?}\n", potential_cfg_options); format_to!(buf, " Env: {:?}\n", env); format_to!(buf, " Origin: {:?}\n", origin); + format_to!(buf, " Extra crate-level attrs: {:?}\n", crate_attrs); format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro); format_to!(buf, " Proc macro cwd: {:?}\n", proc_macro_cwd); let deps = dependencies diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index fd5ede865f14a..e7c5f95a250ee 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -435,17 +435,15 @@ fn traverse( |node| unsafe_ops.contains(&InFile::new(descended_element.file_id, node)); let element = match descended_element.value { NodeOrToken::Node(name_like) => { - let hl = hir::attach_db(sema.db, || { - highlight::name_like( - sema, - krate, - bindings_shadow_count, - &is_unsafe_node, - config.syntactic_name_ref_highlighting, - name_like, - edition, - ) - }); + let hl = highlight::name_like( + sema, + krate, + bindings_shadow_count, + &is_unsafe_node, + config.syntactic_name_ref_highlighting, + name_like, + edition, + ); if hl.is_some() && !in_macro { // skip highlighting the contained token of our name-like node // as that would potentially overwrite our result @@ -453,10 +451,10 @@ fn traverse( } hl } - NodeOrToken::Token(token) => hir::attach_db(sema.db, || { + NodeOrToken::Token(token) => { highlight::token(sema, token, edition, &is_unsafe_node, tt_level > 0) .zip(Some(None)) - }), + } }; if let Some((mut highlight, binding_hash)) = element { if is_unlinked && highlight.tag == HlTag::UnresolvedReference { diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml index 81b6703deef55..6414f091783c8 100644 --- a/src/tools/rust-analyzer/crates/intern/Cargo.toml +++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml @@ -18,6 +18,8 @@ dashmap.workspace = true hashbrown.workspace = true rustc-hash.workspace = true triomphe.workspace = true +smallvec.workspace = true +rayon.workspace = true [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/intern/src/gc.rs b/src/tools/rust-analyzer/crates/intern/src/gc.rs new file mode 100644 index 0000000000000..0d500a9714e49 --- /dev/null +++ b/src/tools/rust-analyzer/crates/intern/src/gc.rs @@ -0,0 +1,330 @@ +//! Garbage collection of interned values. +//! +//! The GC is a simple mark-and-sweep GC: you first mark all storages, then the +//! GC visits them, and each live value they refer, recursively, then removes +//! those not marked. The sweep phase is done in parallel. + +use std::{hash::Hash, marker::PhantomData, ops::ControlFlow}; + +use dashmap::DashMap; +use hashbrown::raw::RawTable; +use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; +use rustc_hash::{FxBuildHasher, FxHashSet}; +use triomphe::{Arc, ThinArc}; + +use crate::{Internable, InternedRef, InternedSliceRef, SliceInternable}; + +trait Storage { + fn len(&self) -> usize; + + fn mark(&self, gc: &mut GarbageCollector); + + fn sweep(&self, gc: &GarbageCollector); +} + +struct InternedStorage(PhantomData T>); + +impl Storage for InternedStorage { + fn len(&self) -> usize { + T::storage().get().len() + } + + fn mark(&self, gc: &mut GarbageCollector) { + let storage = T::storage().get(); + for item in storage { + let item = item.key(); + let addr = Arc::as_ptr(item).addr(); + if Arc::strong_count(item) > 1 { + // The item is referenced from the outside. + gc.alive.insert(addr); + item.visit_with(gc); + } + } + } + + fn sweep(&self, gc: &GarbageCollector) { + let storage = T::storage().get(); + gc.sweep_storage(storage, |item| item.as_ptr().addr()); + } +} + +struct InternedSliceStorage(PhantomData T>); + +impl Storage for InternedSliceStorage { + fn len(&self) -> usize { + T::storage().get().len() + } + + fn mark(&self, gc: &mut GarbageCollector) { + let storage = T::storage().get(); + for item in storage { + let item = item.key(); + let addr = ThinArc::as_ptr(item).addr(); + if ThinArc::strong_count(item) > 1 { + // The item is referenced from the outside. + gc.alive.insert(addr); + T::visit_header(&item.header.header, gc); + T::visit_slice(&item.slice, gc); + } + } + } + + fn sweep(&self, gc: &GarbageCollector) { + let storage = T::storage().get(); + gc.sweep_storage(storage, |item| item.as_ptr().addr()); + } +} + +pub trait GcInternedVisit { + fn visit_with(&self, gc: &mut GarbageCollector); +} + +pub trait GcInternedSliceVisit: SliceInternable { + fn visit_header(header: &Self::Header, gc: &mut GarbageCollector); + fn visit_slice(header: &[Self::SliceType], gc: &mut GarbageCollector); +} + +#[derive(Default)] +pub struct GarbageCollector { + alive: FxHashSet, + storages: Vec>, +} + +impl GarbageCollector { + pub fn add_storage(&mut self) { + const { assert!(T::USE_GC) }; + + self.storages.push(Box::new(InternedStorage::(PhantomData))); + } + + pub fn add_slice_storage(&mut self) { + const { assert!(T::USE_GC) }; + + self.storages.push(Box::new(InternedSliceStorage::(PhantomData))); + } + + /// # Safety + /// + /// - This cannot be called if there are some not-yet-recorded type values. + /// - All relevant storages must have been added; that is, within the full graph of values, + /// the added storages must form a DAG. + /// - [`GcInternedVisit`] and [`GcInternedSliceVisit`] must mark all values reachable from the node. + pub unsafe fn collect(mut self) { + let total_nodes = self.storages.iter().map(|storage| storage.len()).sum(); + self.alive = FxHashSet::with_capacity_and_hasher(total_nodes, FxBuildHasher); + + let storages = std::mem::take(&mut self.storages); + + for storage in &storages { + storage.mark(&mut self); + } + + // Miri doesn't support rayon. + if cfg!(miri) { + storages.iter().for_each(|storage| storage.sweep(&self)); + } else { + storages.par_iter().for_each(|storage| storage.sweep(&self)); + } + } + + pub fn mark_interned_alive( + &mut self, + interned: InternedRef<'_, T>, + ) -> ControlFlow<()> { + if interned.strong_count() > 1 { + // It will be visited anyway, so short-circuit + return ControlFlow::Break(()); + } + let addr = interned.as_raw().addr(); + if !self.alive.insert(addr) { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } + } + + pub fn mark_interned_slice_alive( + &mut self, + interned: InternedSliceRef<'_, T>, + ) -> ControlFlow<()> { + if interned.strong_count() > 1 { + // It will be visited anyway, so short-circuit + return ControlFlow::Break(()); + } + let addr = interned.as_raw().addr(); + if !self.alive.insert(addr) { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } + } + + fn sweep_storage( + &self, + storage: &DashMap, + get_addr: impl Fn(&T) -> usize + Send + Sync, + ) { + // Miri doesn't support rayon. + if cfg!(miri) { + storage.shards().iter().for_each(|shard| { + self.retain_only_alive(&mut *shard.write(), |item| get_addr(&item.0)) + }); + } else { + storage.shards().par_iter().for_each(|shard| { + self.retain_only_alive(&mut *shard.write(), |item| get_addr(&item.0)) + }); + } + } + + #[inline] + fn retain_only_alive(&self, map: &mut RawTable, mut get_addr: impl FnMut(&T) -> usize) { + // This code was copied from DashMap's retain() - which we can't use because we want to run in parallel. + unsafe { + // Here we only use `iter` as a temporary, preventing use-after-free + for bucket in map.iter() { + let item = bucket.as_mut(); + let addr = get_addr(item); + if !self.alive.contains(&addr) { + map.erase(bucket); + } + } + } + } +} + +#[cfg(test)] +mod tests { + use crate::{ + GarbageCollector, GcInternedSliceVisit, GcInternedVisit, Interned, InternedSliceRef, + }; + + crate::impl_internable!(String); + + #[test] + fn simple_interned() { + let a = Interned::new("abc".to_owned()); + let b = Interned::new("abc".to_owned()); + assert_eq!(a, b); + assert_eq!(a.as_ref(), b.as_ref()); + assert_eq!(a.as_ref(), a.as_ref()); + assert_eq!(a, a.clone()); + assert_eq!(a, a.clone().clone()); + assert_eq!(b.clone(), a.clone().clone()); + assert_eq!(*a, "abc"); + assert_eq!(*b, "abc"); + assert_eq!(b.as_ref().to_owned(), a); + let c = Interned::new("def".to_owned()); + assert_ne!(a, c); + assert_ne!(b, c); + assert_ne!(b.as_ref(), c.as_ref()); + assert_eq!(*c.as_ref(), "def"); + drop(c); + assert_eq!(*a, "abc"); + assert_eq!(*b, "abc"); + drop(a); + assert_eq!(*b, "abc"); + drop(b); + } + + #[test] + fn simple_gc() { + #[derive(Debug, PartialEq, Eq, Hash)] + struct GcString(String); + + crate::impl_internable!(gc; GcString); + + impl GcInternedVisit for GcString { + fn visit_with(&self, _gc: &mut GarbageCollector) {} + } + + crate::impl_slice_internable!(gc; StringSlice, String, u32); + type InternedSlice = crate::InternedSlice; + + impl GcInternedSliceVisit for StringSlice { + fn visit_header(_header: &Self::Header, _gc: &mut GarbageCollector) {} + + fn visit_slice(_header: &[Self::SliceType], _gc: &mut GarbageCollector) {} + } + + let (a, d) = { + let a = Interned::new_gc(GcString("abc".to_owned())).to_owned(); + let b = Interned::new_gc(GcString("abc".to_owned())).to_owned(); + assert_eq!(a, b); + assert_eq!(a.as_ref(), b.as_ref()); + assert_eq!(a.as_ref(), a.as_ref()); + assert_eq!(a, a.clone()); + assert_eq!(a, a.clone().clone()); + assert_eq!(b.clone(), a.clone().clone()); + assert_eq!(a.0, "abc"); + assert_eq!(b.0, "abc"); + assert_eq!(b.as_ref().to_owned(), a); + let c = Interned::new_gc(GcString("def".to_owned())).to_owned(); + assert_ne!(a, c); + assert_ne!(b, c); + assert_ne!(b.as_ref(), c.as_ref()); + assert_eq!(c.as_ref().0, "def"); + + let d = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456]); + let e = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456]); + assert_eq!(d, e); + assert_eq!(d.to_owned(), e.to_owned()); + assert_eq!(d.header.length, 2); + assert_eq!(d.header.header, "abc"); + assert_eq!(d.slice, [123, 456]); + (a, d.to_owned()) + }; + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + + assert_eq!(a.0, "abc"); + assert_eq!(d.header.length, 2); + assert_eq!(d.header.header, "abc"); + assert_eq!(d.slice, [123, 456]); + + drop(a); + drop(d); + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + } + + #[test] + fn gc_visit() { + #[derive(PartialEq, Eq, Hash)] + struct GcInterned(InternedSliceRef<'static, StringSlice>); + + crate::impl_internable!(gc; GcInterned); + + impl GcInternedVisit for GcInterned { + fn visit_with(&self, gc: &mut GarbageCollector) { + _ = gc.mark_interned_slice_alive(self.0); + } + } + + crate::impl_slice_internable!(gc; StringSlice, String, i32); + type InternedSlice = crate::InternedSlice; + + impl GcInternedSliceVisit for StringSlice { + fn visit_header(_header: &Self::Header, _gc: &mut GarbageCollector) {} + + fn visit_slice(_header: &[Self::SliceType], _gc: &mut GarbageCollector) {} + } + + let outer = { + let inner = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456, 789]); + Interned::new_gc(GcInterned(inner)).to_owned() + }; + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + + assert_eq!(outer.0.header.header, "abc"); + assert_eq!(outer.0.slice, [123, 456, 789]); + + drop(outer); + + let mut gc = GarbageCollector::default(); + gc.add_slice_storage::(); + gc.add_storage::(); + unsafe { gc.collect() }; + } +} diff --git a/src/tools/rust-analyzer/crates/intern/src/intern.rs b/src/tools/rust-analyzer/crates/intern/src/intern.rs new file mode 100644 index 0000000000000..b7acd6624b99a --- /dev/null +++ b/src/tools/rust-analyzer/crates/intern/src/intern.rs @@ -0,0 +1,372 @@ +//! Interning of single values. +//! +//! Interning supports two modes: GC and non-GC. +//! +//! In non-GC mode, you create [`Interned`]s, and can create `Copy` handles to them +//! that can still be upgraded back to [`Interned`] ([`InternedRef`]) via [`Interned::as_ref`]. +//! Generally, letting the [`InternedRef`] to outlive the [`Interned`] is a soundness bug and can +//! lead to UB. When all [`Interned`]s of some value are dropped, the value is freed (newer interns +//! may re-create it, not necessarily in the same place). +//! +//! In GC mode, you generally operate on [`InternedRef`]s. They are `Copy` and comfortable. To intern +//! a value you call [`Interned::new_gc`], which returns an [`InternedRef`]. Having all [`Interned`]s +//! of some value be dropped will *not* immediately free the value. Instead, a mark-and-sweep GC can +//! be initiated, which will free all values which have no live [`Interned`]s. +//! +//! Generally, in GC mode, you operate on [`InternedRef`], but when you need to store some long-term +//! value (e.g. a Salsa query output), you convert it to an [`Interned`]. This ensures that an eventual +//! GC will not free it as long as it is alive. +//! +//! Making mistakes is hard due to GC [`InternedRef`] wrappers not implementing `salsa::Update`, meaning +//! Salsa will ensure you do not store them in queries or Salsa-interneds. However it's still *possible* +//! without unsafe code (for example, by storing them in a `static`), which is why triggering GC is unsafe. +//! +//! For more information about GC see [`crate::gc`]. + +use std::{ + fmt::{self, Debug, Display}, + hash::{BuildHasher, Hash, Hasher}, + ops::Deref, + ptr, + sync::OnceLock, +}; + +use dashmap::{DashMap, SharedValue}; +use hashbrown::raw::RawTable; +use rustc_hash::FxBuildHasher; +use triomphe::{Arc, ArcBorrow}; + +type InternMap = DashMap, (), FxBuildHasher>; +type Guard = dashmap::RwLockWriteGuard<'static, RawTable<(Arc, SharedValue<()>)>>; + +pub struct Interned { + arc: Arc, +} + +impl Interned { + #[inline] + pub fn new(obj: T) -> Self { + const { assert!(!T::USE_GC) }; + + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &obj); + // Atomically, + // - check if `obj` is already in the map + // - if so, clone its `Arc` and return it + // - if not, box it up, insert it, and return a clone + // This needs to be atomic (locking the shard) to avoid races with other thread, which could + // insert the same object between us looking it up and inserting it. + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| **other == obj, + |(x, _)| Self::hash(storage, x), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot(hash, insert_slot, (Arc::new(obj), SharedValue::new(()))) + }, + }; + // SAFETY: We just retrieved/inserted this bucket. + unsafe { Self { arc: bucket.as_ref().0.clone() } } + } + + #[inline] + pub fn new_gc<'a>(obj: T) -> InternedRef<'a, T> { + const { assert!(T::USE_GC) }; + + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &obj); + // Atomically, + // - check if `obj` is already in the map + // - if so, clone its `Arc` and return it + // - if not, box it up, insert it, and return a clone + // This needs to be atomic (locking the shard) to avoid races with other thread, which could + // insert the same object between us looking it up and inserting it. + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| **other == obj, + |(x, _)| Self::hash(storage, x), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot(hash, insert_slot, (Arc::new(obj), SharedValue::new(()))) + }, + }; + // SAFETY: We just retrieved/inserted this bucket. + unsafe { InternedRef { arc: Arc::borrow_arc(&bucket.as_ref().0) } } + } + + #[inline] + fn select(storage: &'static InternMap, obj: &T) -> (Guard, u64) { + let hash = Self::hash(storage, obj); + let shard_idx = storage.determine_shard(hash as usize); + let shard = &storage.shards()[shard_idx]; + (shard.write(), hash) + } + + #[inline] + fn hash(storage: &'static InternMap, obj: &T) -> u64 { + storage.hasher().hash_one(obj) + } + + /// # Safety + /// + /// The pointer should originate from an `Interned` or an `InternedRef`. + #[inline] + pub unsafe fn from_raw(ptr: *const T) -> Self { + // SAFETY: Our precondition. + Self { arc: unsafe { Arc::from_raw(ptr) } } + } + + #[inline] + pub fn as_ref(&self) -> InternedRef<'_, T> { + InternedRef { arc: self.arc.borrow_arc() } + } +} + +impl Drop for Interned { + #[inline] + fn drop(&mut self) { + // When the last `Ref` is dropped, remove the object from the global map. + if !T::USE_GC && Arc::count(&self.arc) == 2 { + // Only `self` and the global map point to the object. + + self.drop_slow(); + } + } +} + +impl Interned { + #[cold] + fn drop_slow(&mut self) { + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &self.arc); + + if Arc::count(&self.arc) != 2 { + // Another thread has interned another copy + return; + } + + shard.remove_entry(hash, |(other, _)| **other == **self); + + // Shrink the backing storage if the shard is less than 50% occupied. + if shard.len() * 2 < shard.capacity() { + let len = shard.len(); + shard.shrink_to(len, |(x, _)| Self::hash(storage, x)); + } + } +} + +/// Compares interned `Ref`s using pointer equality. +impl PartialEq for Interned { + // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects. + + #[inline] + fn eq(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.arc, &other.arc) + } +} + +impl Eq for Interned {} + +impl Hash for Interned { + #[inline] + fn hash(&self, state: &mut H) { + state.write_usize(self.arc.as_ptr().addr()) + } +} + +impl AsRef for Interned { + #[inline] + fn as_ref(&self) -> &T { + self + } +} + +impl Deref for Interned { + type Target = T; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.arc + } +} + +impl Clone for Interned { + #[inline] + fn clone(&self) -> Self { + Self { arc: self.arc.clone() } + } +} + +impl Debug for Interned { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ::fmt(&**self, f) + } +} + +impl Display for Interned { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ::fmt(&**self, f) + } +} + +#[repr(transparent)] +pub struct InternedRef<'a, T> { + arc: ArcBorrow<'a, T>, +} + +impl<'a, T: Internable> InternedRef<'a, T> { + #[inline] + pub fn as_raw(self) -> *const T { + // Not `ptr::from_ref(&*self.arc)`, because we need to keep the provenance. + self.arc.with_arc(|arc| Arc::as_ptr(arc)) + } + + /// # Safety + /// + /// The pointer needs to originate from `Interned` or `InternedRef`. + #[inline] + pub unsafe fn from_raw(ptr: *const T) -> Self { + // SAFETY: Our precondition. + Self { arc: unsafe { ArcBorrow::from_ptr(ptr) } } + } + + #[inline] + pub fn to_owned(self) -> Interned { + Interned { arc: self.arc.clone_arc() } + } + + #[inline] + pub fn get(self) -> &'a T { + self.arc.get() + } + + /// # Safety + /// + /// You have to make sure the data is not referenced after the refcount reaches zero; beware the interning + /// map also keeps a reference to the value. + #[inline] + pub unsafe fn decrement_refcount(self) { + // SAFETY: Our precondition. + unsafe { drop(Arc::from_raw(self.as_raw())) } + } + + #[inline] + pub(crate) fn strong_count(self) -> usize { + ArcBorrow::strong_count(&self.arc) + } + + /// **Available only on GC mode**. + /// + /// Changes the attached lifetime, as in GC mode, the lifetime is more kind of a lint to prevent misuse + /// than actual soundness check. + #[inline] + pub fn change_lifetime<'b>(self) -> InternedRef<'b, T> { + const { assert!(T::USE_GC) }; + // SAFETY: The lifetime on `InternedRef` is essentially advisory only for GCed types. + unsafe { std::mem::transmute::, InternedRef<'b, T>>(self) } + } +} + +impl Clone for InternedRef<'_, T> { + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for InternedRef<'_, T> {} + +impl Hash for InternedRef<'_, T> { + #[inline] + fn hash(&self, state: &mut H) { + let ptr = ptr::from_ref::(&*self.arc); + state.write_usize(ptr.addr()); + } +} + +impl PartialEq for InternedRef<'_, T> { + #[inline] + fn eq(&self, other: &Self) -> bool { + ArcBorrow::ptr_eq(&self.arc, &other.arc) + } +} + +impl Eq for InternedRef<'_, T> {} + +impl Deref for InternedRef<'_, T> { + type Target = T; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.arc + } +} + +impl Debug for InternedRef<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (*self.arc).fmt(f) + } +} + +impl Display for InternedRef<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (*self.arc).fmt(f) + } +} + +pub struct InternStorage { + map: OnceLock>, +} + +#[allow( + clippy::new_without_default, + reason = "this a const fn, so it can't be default yet. See " +)] +impl InternStorage { + pub const fn new() -> Self { + Self { map: OnceLock::new() } + } +} + +impl InternStorage { + pub(crate) fn get(&self) -> &InternMap { + self.map.get_or_init(DashMap::default) + } +} + +pub trait Internable: Hash + Eq + Send + Sync + 'static { + const USE_GC: bool; + + fn storage() -> &'static InternStorage; +} + +/// Implements `Internable` for a given list of types, making them usable with `Interned`. +#[macro_export] +#[doc(hidden)] +macro_rules! _impl_internable { + ( gc; $($t:ty),+ $(,)? ) => { $( + impl $crate::Internable for $t { + const USE_GC: bool = true; + + fn storage() -> &'static $crate::InternStorage { + static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); + &STORAGE + } + } + )+ }; + ( $($t:ty),+ $(,)? ) => { $( + impl $crate::Internable for $t { + const USE_GC: bool = false; + + fn storage() -> &'static $crate::InternStorage { + static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); + &STORAGE + } + } + )+ }; +} +pub use crate::_impl_internable as impl_internable; diff --git a/src/tools/rust-analyzer/crates/intern/src/intern_slice.rs b/src/tools/rust-analyzer/crates/intern/src/intern_slice.rs new file mode 100644 index 0000000000000..58de6e17bdff6 --- /dev/null +++ b/src/tools/rust-analyzer/crates/intern/src/intern_slice.rs @@ -0,0 +1,325 @@ +//! Interning of slices, potentially with a header. +//! +//! See [`crate::intern`] for an explanation of interning modes. Note that slice interning is currently +//! available only in GC mode (there is no other need). +//! +//! [`InternedSlice`] and [`InternedSliceRef`] are essentially [`Interned<(Header, Box<[SliceType]>)>`][crate::Interned] +//! and [`InternedRef`][crate::InternedRef] with the same types, but more optimized. There is only one +//! allocation and the pointer is thin. + +use std::{ + ffi::c_void, + fmt::{self, Debug}, + hash::{BuildHasher, Hash, Hasher}, + marker::PhantomData, + mem::ManuallyDrop, + ops::Deref, + ptr::{self, NonNull}, + sync::OnceLock, +}; + +use dashmap::{DashMap, SharedValue}; +use hashbrown::raw::RawTable; +use rustc_hash::FxBuildHasher; +use triomphe::{HeaderSlice, HeaderWithLength, ThinArc}; + +type InternMap = DashMap< + ThinArc<::Header, ::SliceType>, + (), + FxBuildHasher, +>; +type Guard = dashmap::RwLockWriteGuard< + 'static, + RawTable<( + ThinArc<::Header, ::SliceType>, + SharedValue<()>, + )>, +>; +type Pointee = HeaderSlice< + HeaderWithLength<::Header>, + [::SliceType], +>; + +pub struct InternedSlice { + arc: ThinArc, +} + +impl InternedSlice { + #[inline] + pub fn from_header_and_slice<'a>( + header: T::Header, + slice: &[T::SliceType], + ) -> InternedSliceRef<'a, T> { + const { assert!(T::USE_GC) }; + + let storage = T::storage().get(); + let (mut shard, hash) = Self::select(storage, &header, slice); + // Atomically, + // - check if `obj` is already in the map + // - if so, clone its `Arc` and return it + // - if not, box it up, insert it, and return a clone + // This needs to be atomic (locking the shard) to avoid races with other thread, which could + // insert the same object between us looking it up and inserting it. + let bucket = match shard.find_or_find_insert_slot( + hash, + |(other, _)| other.header.header == header && other.slice == *slice, + |(x, _)| storage.hasher().hash_one(x), + ) { + Ok(bucket) => bucket, + // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. + Err(insert_slot) => unsafe { + shard.insert_in_slot( + hash, + insert_slot, + (ThinArc::from_header_and_slice(header, slice), SharedValue::new(())), + ) + }, + }; + // SAFETY: We just retrieved/inserted this bucket. + // `NonNull::new_unchecked()` is safe because the pointer originates from a `ThinArc`. + unsafe { + InternedSliceRef { + // INVARIANT: We create it from a `ThinArc`. + ptr: NonNull::new_unchecked(ThinArc::as_ptr(&bucket.as_ref().0).cast_mut()), + _marker: PhantomData, + } + } + } + + #[inline] + fn select( + storage: &'static InternMap, + header: &T::Header, + slice: &[T::SliceType], + ) -> (Guard, u64) { + let hash = Self::hash(storage, header, slice); + let shard_idx = storage.determine_shard(hash as usize); + let shard = &storage.shards()[shard_idx]; + (shard.write(), hash) + } + + #[inline] + fn hash(storage: &'static InternMap, header: &T::Header, slice: &[T::SliceType]) -> u64 { + storage.hasher().hash_one(HeaderSlice { + header: HeaderWithLength { header, length: slice.len() }, + slice, + }) + } + + #[inline(always)] + fn ptr(&self) -> *const c_void { + self.arc.as_ptr() + } + + #[inline] + pub fn as_ref(&self) -> InternedSliceRef<'_, T> { + InternedSliceRef { + // SAFETY: `self.ptr` comes from a valid `ThinArc`, so non null. + // INVARIANT: We create it from a `ThinArc`. + ptr: unsafe { NonNull::new_unchecked(self.ptr().cast_mut()) }, + _marker: PhantomData, + } + } +} + +/// Compares interned `Ref`s using pointer equality. +impl PartialEq for InternedSlice { + // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects. + + #[inline] + fn eq(&self, other: &Self) -> bool { + self.arc.as_ptr() == other.arc.as_ptr() + } +} + +impl Eq for InternedSlice {} + +impl Hash for InternedSlice { + #[inline] + fn hash(&self, state: &mut H) { + state.write_usize(self.ptr().addr()) + } +} + +impl Deref for InternedSlice { + type Target = Pointee; + + #[inline] + fn deref(&self) -> &Self::Target { + &self.arc + } +} + +impl Clone for InternedSlice { + #[inline] + fn clone(&self) -> Self { + Self { arc: self.arc.clone() } + } +} + +impl Debug for InternedSlice +where + T: SliceInternable, + T::SliceType: Debug, + T::Header: Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (*self.arc).fmt(f) + } +} + +#[repr(transparent)] +pub struct InternedSliceRef<'a, T> { + /// # Invariant + /// + /// There is no `ThinArcBorrow` unfortunately, so this is basically a `ManuallyDrop`, + /// except that can't be `Copy`, so we store a raw pointer instead. + ptr: NonNull, + _marker: PhantomData<&'a T>, +} + +// SAFETY: This is essentially a `ThinArc`, implemented as a raw pointer because there is no `ThinArcBorrowed`. +unsafe impl Send for InternedSliceRef<'_, T> {} +unsafe impl Sync for InternedSliceRef<'_, T> {} + +impl<'a, T: SliceInternable> InternedSliceRef<'a, T> { + #[inline(always)] + fn arc(self) -> ManuallyDrop> { + // SAFETY: `self.ptr`'s invariant. + unsafe { ManuallyDrop::new(ThinArc::from_raw(self.ptr.as_ptr())) } + } + + #[inline] + pub fn to_owned(self) -> InternedSlice { + InternedSlice { arc: (*self.arc()).clone() } + } + + #[inline] + pub fn get(self) -> &'a Pointee { + // SAFETY: This is a lifetime extension, valid because we live for `'a`. + unsafe { &*ptr::from_ref::>(&*self.arc()) } + } + + /// # Safety + /// + /// You have to make sure the data is not referenced after the refcount reaches zero; beware the interning + /// map also keeps a reference to the value. + #[inline] + pub unsafe fn decrement_refcount(self) { + drop(ManuallyDrop::into_inner(self.arc())); + } + + #[inline] + pub(crate) fn strong_count(self) -> usize { + ThinArc::strong_count(&self.arc()) + } + + #[inline] + pub(crate) fn as_raw(self) -> *const c_void { + self.arc().as_ptr() + } + + /// **Available only on GC mode**. + /// + /// Changes the attached lifetime, as in GC mode, the lifetime is more kind of a lint to prevent misuse + /// than actual soundness check. + #[inline] + pub fn change_lifetime<'b>(self) -> InternedSliceRef<'b, T> { + const { assert!(T::USE_GC) }; + // SAFETY: The lifetime on `InternedSliceRef` is essentially advisory only for GCed types. + unsafe { std::mem::transmute::, InternedSliceRef<'b, T>>(self) } + } +} + +impl Clone for InternedSliceRef<'_, T> { + #[inline] + fn clone(&self) -> Self { + *self + } +} + +impl Copy for InternedSliceRef<'_, T> {} + +impl Hash for InternedSliceRef<'_, T> { + #[inline] + fn hash(&self, state: &mut H) { + state.write_usize(self.ptr.as_ptr().addr()); + } +} + +impl PartialEq for InternedSliceRef<'_, T> { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.ptr == other.ptr + } +} + +impl Eq for InternedSliceRef<'_, T> {} + +impl Deref for InternedSliceRef<'_, T> { + type Target = Pointee; + + #[inline] + fn deref(&self) -> &Self::Target { + self.get() + } +} + +impl Debug for InternedSliceRef<'_, T> +where + T: SliceInternable, + T::SliceType: Debug, + T::Header: Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +pub struct InternSliceStorage { + map: OnceLock>, +} + +#[allow( + clippy::new_without_default, + reason = "this a const fn, so it can't be default yet. See " +)] +impl InternSliceStorage { + pub const fn new() -> Self { + Self { map: OnceLock::new() } + } +} + +impl InternSliceStorage { + pub(crate) fn get(&self) -> &InternMap { + self.map.get_or_init(DashMap::default) + } +} + +pub trait SliceInternable: Sized + 'static { + const USE_GC: bool; + type Header: Eq + Hash + Send + Sync; + type SliceType: Eq + Hash + Send + Sync + Copy + 'static; + fn storage() -> &'static InternSliceStorage; +} + +/// Implements `SliceInternable` for a given list of types, making them usable with `InternedSlice`. +#[macro_export] +#[doc(hidden)] +macro_rules! _impl_slice_internable { + ( gc; $tag:ident, $h:ty, $t:ty $(,)? ) => { + #[allow(unreachable_pub)] + pub struct $tag; + impl $crate::SliceInternable for $tag { + const USE_GC: bool = true; + type Header = $h; + type SliceType = $t; + fn storage() -> &'static $crate::InternSliceStorage { + static STORAGE: $crate::InternSliceStorage<$tag> = + $crate::InternSliceStorage::new(); + &STORAGE + } + } + }; +} +pub use crate::_impl_slice_internable as impl_slice_internable; diff --git a/src/tools/rust-analyzer/crates/intern/src/lib.rs b/src/tools/rust-analyzer/crates/intern/src/lib.rs index 398d224c07ad2..0c0b12427d212 100644 --- a/src/tools/rust-analyzer/crates/intern/src/lib.rs +++ b/src/tools/rust-analyzer/crates/intern/src/lib.rs @@ -2,219 +2,14 @@ //! //! Eventually this should probably be replaced with salsa-based interning. -use std::{ - borrow::Borrow, - fmt::{self, Debug, Display}, - hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}, - ops::Deref, - sync::OnceLock, -}; - -use dashmap::{DashMap, SharedValue}; -use hashbrown::raw::RawTable; -use rustc_hash::FxHasher; -use triomphe::Arc; - -type InternMap = DashMap, (), BuildHasherDefault>; -type Guard = dashmap::RwLockWriteGuard<'static, RawTable<(Arc, SharedValue<()>)>>; - +mod gc; +mod intern; +mod intern_slice; mod symbol; -pub use self::symbol::{Symbol, symbols as sym}; - -pub struct Interned { - arc: Arc, -} - -impl Interned { - #[inline] - pub fn new(obj: T) -> Self { - Self::new_generic(obj) - } -} - -impl Interned { - #[inline] - pub fn new_str(s: &str) -> Self { - Self::new_generic(s) - } -} - -impl Interned { - #[inline] - pub fn new_generic(obj: U) -> Self - where - U: Borrow, - Arc: From, - { - let storage = T::storage().get(); - let (mut shard, hash) = Self::select(storage, obj.borrow()); - // Atomically, - // - check if `obj` is already in the map - // - if so, clone its `Arc` and return it - // - if not, box it up, insert it, and return a clone - // This needs to be atomic (locking the shard) to avoid races with other thread, which could - // insert the same object between us looking it up and inserting it. - let bucket = match shard.find_or_find_insert_slot( - hash, - |(other, _)| **other == *obj.borrow(), - |(x, _)| Self::hash(storage, x), - ) { - Ok(bucket) => bucket, - // SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then. - Err(insert_slot) => unsafe { - shard.insert_in_slot(hash, insert_slot, (Arc::from(obj), SharedValue::new(()))) - }, - }; - // SAFETY: We just retrieved/inserted this bucket. - unsafe { Self { arc: bucket.as_ref().0.clone() } } - } - - #[inline] - fn select(storage: &'static InternMap, obj: &T) -> (Guard, u64) { - let hash = Self::hash(storage, obj); - let shard_idx = storage.determine_shard(hash as usize); - let shard = &storage.shards()[shard_idx]; - (shard.write(), hash) - } - - #[inline] - fn hash(storage: &'static InternMap, obj: &T) -> u64 { - storage.hasher().hash_one(obj) - } -} - -impl Drop for Interned { - #[inline] - fn drop(&mut self) { - // When the last `Ref` is dropped, remove the object from the global map. - if Arc::count(&self.arc) == 2 { - // Only `self` and the global map point to the object. - - self.drop_slow(); - } - } -} - -impl Interned { - #[cold] - fn drop_slow(&mut self) { - let storage = T::storage().get(); - let (mut shard, hash) = Self::select(storage, &self.arc); - - if Arc::count(&self.arc) != 2 { - // Another thread has interned another copy - return; - } - - shard.remove_entry(hash, |(other, _)| **other == *self.arc); - - // Shrink the backing storage if the shard is less than 50% occupied. - if shard.len() * 2 < shard.capacity() { - let len = shard.len(); - shard.shrink_to(len, |(x, _)| Self::hash(storage, x)); - } - } -} - -/// Compares interned `Ref`s using pointer equality. -impl PartialEq for Interned { - // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects. - - #[inline] - fn eq(&self, other: &Self) -> bool { - Arc::ptr_eq(&self.arc, &other.arc) - } -} -impl Eq for Interned {} - -impl PartialEq for Interned { - fn eq(&self, other: &Self) -> bool { - Arc::ptr_eq(&self.arc, &other.arc) - } -} - -impl Eq for Interned {} - -impl Hash for Interned { - fn hash(&self, state: &mut H) { - // NOTE: Cast disposes vtable pointer / slice/str length. - state.write_usize(Arc::as_ptr(&self.arc) as *const () as usize) - } -} - -impl AsRef for Interned { - #[inline] - fn as_ref(&self) -> &T { - &self.arc - } -} - -impl Deref for Interned { - type Target = T; - - #[inline] - fn deref(&self) -> &Self::Target { - &self.arc - } -} - -impl Clone for Interned { - fn clone(&self) -> Self { - Self { arc: self.arc.clone() } - } -} - -impl Debug for Interned { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - (*self.arc).fmt(f) - } -} - -impl Display for Interned { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - (*self.arc).fmt(f) - } -} - -pub struct InternStorage { - map: OnceLock>, -} - -#[allow( - clippy::new_without_default, - reason = "this a const fn, so it can't be default yet. See " -)] -impl InternStorage { - pub const fn new() -> Self { - Self { map: OnceLock::new() } - } -} - -impl InternStorage { - fn get(&self) -> &InternMap { - self.map.get_or_init(DashMap::default) - } -} - -pub trait Internable: Hash + Eq + 'static { - fn storage() -> &'static InternStorage; -} - -/// Implements `Internable` for a given list of types, making them usable with `Interned`. -#[macro_export] -#[doc(hidden)] -macro_rules! _impl_internable { - ( $($t:path),+ $(,)? ) => { $( - impl $crate::Internable for $t { - fn storage() -> &'static $crate::InternStorage { - static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); - &STORAGE - } - } - )+ }; -} - -pub use crate::_impl_internable as impl_internable; - -impl_internable!(str,); +pub use self::gc::{GarbageCollector, GcInternedSliceVisit, GcInternedVisit}; +pub use self::intern::{InternStorage, Internable, Interned, InternedRef, impl_internable}; +pub use self::intern_slice::{ + InternSliceStorage, InternedSlice, InternedSliceRef, SliceInternable, impl_slice_internable, +}; +pub use self::symbol::{Symbol, symbols as sym}; diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index 462f1c69fa694..6e9c6d26b5b1e 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -161,6 +161,7 @@ define_symbols! { cfg_select, char, clone, + trivial_clone, Clone, coerce_unsized, column, diff --git a/src/tools/rust-analyzer/crates/macros/src/lib.rs b/src/tools/rust-analyzer/crates/macros/src/lib.rs index 3f90ecc8f902d..de8c3f2e55f14 100644 --- a/src/tools/rust-analyzer/crates/macros/src/lib.rs +++ b/src/tools/rust-analyzer/crates/macros/src/lib.rs @@ -25,6 +25,9 @@ decl_derive!( /// visited (and its type is not required to implement `TypeVisitable`). type_visitable_derive ); +decl_derive!( + [GenericTypeVisitable] => generic_type_visitable_derive +); fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { if let syn::Data::Union(_) = s.ast().data { @@ -163,6 +166,33 @@ fn has_ignore_attr(attrs: &[syn::Attribute], name: &'static str, meta: &'static ignored } +fn generic_type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { + if let syn::Data::Union(_) = s.ast().data { + panic!("cannot derive on union") + } + + s.add_bounds(synstructure::AddBounds::Fields); + s.bind_with(|_| synstructure::BindStyle::Move); + s.add_impl_generic(parse_quote!(__V: hir_ty::next_solver::interner::WorldExposer)); + let body_visit = s.each(|bind| { + quote! { + ::rustc_type_ir::GenericTypeVisitable::<__V>::generic_visit_with(#bind, __visitor); + } + }); + + s.bound_impl( + quote!(::rustc_type_ir::GenericTypeVisitable<__V>), + quote! { + fn generic_visit_with( + &self, + __visitor: &mut __V + ) { + match self { #body_visit } + } + }, + ) +} + decl_derive!( [UpmapFromRaFixture] => upmap_from_ra_fixture ); diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs index 6510fefcb6ad1..274b779c1b2e3 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs @@ -128,7 +128,10 @@ enum Fragment<'a> { #[default] Empty, /// token fragments are just copy-pasted into the output - Tokens(tt::TokenTreesView<'a, Span>), + Tokens { + tree: tt::TokenTreesView<'a, Span>, + origin: TokensOrigin, + }, /// Expr ast fragments are surrounded with `()` on transcription to preserve precedence. /// Note that this impl is different from the one currently in `rustc` -- /// `rustc` doesn't translate fragments into token trees at all. @@ -156,10 +159,16 @@ impl Fragment<'_> { fn is_empty(&self) -> bool { match self { Fragment::Empty => true, - Fragment::Tokens(it) => it.len() == 0, + Fragment::Tokens { tree, .. } => tree.len() == 0, Fragment::Expr(it) => it.len() == 0, Fragment::Path(it) => it.len() == 0, Fragment::TokensOwned(it) => it.0.is_empty(), } } } + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum TokensOrigin { + Raw, + Ast, +} diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index 4da8b309f0056..a21468fbb0d59 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -71,7 +71,7 @@ use tt::{ use crate::{ ExpandError, ExpandErrorKind, MetaTemplate, ValueResult, - expander::{Binding, Bindings, ExpandResult, Fragment}, + expander::{Binding, Bindings, ExpandResult, Fragment, TokensOrigin}, expect_fragment, parser::{ExprKind, MetaVarKind, Op, RepeatKind, Separator}, }; @@ -842,18 +842,23 @@ fn match_meta_var<'t>( } .err(); let tt_result = input.from_savepoint(savepoint); - return ValueResult { value: Fragment::Tokens(tt_result), err }; + return ValueResult { + value: Fragment::Tokens { tree: tt_result, origin: TokensOrigin::Raw }, + err, + }; } - MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, - MetaVarKind::Pat => parser::PrefixEntryPoint::PatTop, - MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat, - MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt, - MetaVarKind::Block => parser::PrefixEntryPoint::Block, - MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem, - MetaVarKind::Item => parser::PrefixEntryPoint::Item, - MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, + MetaVarKind::Ty => (parser::PrefixEntryPoint::Ty, TokensOrigin::Ast), + MetaVarKind::Pat => (parser::PrefixEntryPoint::PatTop, TokensOrigin::Ast), + MetaVarKind::PatParam => (parser::PrefixEntryPoint::Pat, TokensOrigin::Ast), + MetaVarKind::Stmt => (parser::PrefixEntryPoint::Stmt, TokensOrigin::Ast), + MetaVarKind::Block => (parser::PrefixEntryPoint::Block, TokensOrigin::Ast), + MetaVarKind::Meta => (parser::PrefixEntryPoint::MetaItem, TokensOrigin::Ast), + MetaVarKind::Item => (parser::PrefixEntryPoint::Item, TokensOrigin::Ast), + MetaVarKind::Vis => (parser::PrefixEntryPoint::Vis, TokensOrigin::Ast), }; - expect_fragment(db, input, fragment, delim_span).map(Fragment::Tokens) + let (entry_point, origin) = fragment; + expect_fragment(db, input, entry_point, delim_span) + .map(|tree| Fragment::Tokens { tree, origin }) } fn collect_vars(collector_fun: &mut impl FnMut(Symbol), pattern: &MetaTemplate) { diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs index 3e4ab8bdc1d8d..006ef1af806ef 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs @@ -5,6 +5,7 @@ use intern::{Symbol, sym}; use span::{Edition, Span}; use tt::{Delimiter, TopSubtreeBuilder, iter::TtElement}; +use super::TokensOrigin; use crate::{ ExpandError, ExpandErrorKind, ExpandResult, MetaTemplate, expander::{Binding, Bindings, Fragment}, @@ -313,7 +314,7 @@ fn expand_subtree( } }; let values = match &var_value { - Fragment::Tokens(tokens) => { + Fragment::Tokens { tree: tokens, .. } => { let mut iter = tokens.iter(); (iter.next(), iter.next()) } @@ -393,7 +394,13 @@ fn expand_var( // rustc spacing is not like ours. Ours is like proc macros', it dictates how puncts will actually be joined. // rustc uses them mostly for pretty printing. So we have to deviate a bit from what rustc does here. // Basically, a metavariable can never be joined with whatever after it. - Fragment::Tokens(tt) => builder.extend_with_tt_alone(tt.strip_invisible()), + Fragment::Tokens { tree, origin } => { + let view = match origin { + TokensOrigin::Raw => tree.strip_invisible(), + TokensOrigin::Ast => tree, + }; + builder.extend_with_tt_alone(view); + } Fragment::TokensOwned(tt) => { builder.extend_with_tt_alone(tt.view().strip_invisible()) } diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 6a38044e3b579..6cf2524c16c34 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -1005,6 +1005,7 @@ impl SyntaxKind { Some(tok) } } +#[doc = r" `T![]`"] #[macro_export] macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR }; diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index 041b9accf41d2..b3478d2cfe033 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -163,6 +163,7 @@ impl ProjectJson { cfg, target: crate_data.target, env: crate_data.env, + crate_attrs: crate_data.crate_attrs, proc_macro_dylib_path: crate_data .proc_macro_dylib_path .map(absolutize_on_base), @@ -244,6 +245,8 @@ pub struct Crate { pub(crate) cfg: Vec, pub(crate) target: Option, pub(crate) env: FxHashMap, + // Extra crate-level attributes, without the surrounding `#![]`. + pub(crate) crate_attrs: Vec, pub(crate) proc_macro_dylib_path: Option, pub(crate) is_workspace_member: bool, pub(crate) include: Vec, @@ -365,6 +368,8 @@ struct CrateData { target: Option, #[serde(default)] env: FxHashMap, + #[serde(default)] + crate_attrs: Vec, proc_macro_dylib_path: Option, is_workspace_member: Option, source: Option, diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 1908fc02904a8..a03ed562e1be5 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -198,6 +198,15 @@ fn rust_project_cfg_groups() { check_crate_graph(crate_graph, expect_file!["../test_data/output/rust_project_cfg_groups.txt"]); } +#[test] +fn rust_project_crate_attrs() { + let (crate_graph, _proc_macros) = load_rust_project("crate-attrs.json"); + check_crate_graph( + crate_graph, + expect_file!["../test_data/output/rust_project_crate_attrs.txt"], + ); +} + #[test] fn crate_graph_dedup_identical() { let (mut crate_graph, proc_macros) = load_cargo("regex-metadata.json"); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 10abb21ace833..fa3a79e041e0f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -391,6 +391,7 @@ impl ProjectWorkspace { sysroot.load_workspace( &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( config, + workspace_dir, &targets, toolchain.clone(), )), @@ -500,6 +501,7 @@ impl ProjectWorkspace { sysroot.load_workspace( &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( config, + project_json.project_root(), &targets, toolchain.clone(), )), @@ -555,6 +557,7 @@ impl ProjectWorkspace { let loaded_sysroot = sysroot.load_workspace( &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( config, + dir, &targets, toolchain.clone(), )), @@ -1090,6 +1093,7 @@ fn project_json_to_crate_graph( cfg, target, env, + crate_attrs, proc_macro_dylib_path, is_proc_macro, repository, @@ -1160,6 +1164,7 @@ fn project_json_to_crate_graph( } else { CrateOrigin::Local { repo: None, name: None } }, + crate_attrs.clone(), *is_proc_macro, match proc_macro_cwd { Some(path) => Arc::new(path.clone()), @@ -1464,6 +1469,7 @@ fn detached_file_to_crate_graph( repo: None, name: display_name.map(|n| n.canonical_name().to_owned()), }, + Vec::new(), false, Arc::new(detached_file.parent().to_path_buf()), crate_ws_data, @@ -1644,6 +1650,7 @@ fn add_target_crate_root( potential_cfg_options, env, origin, + Vec::new(), matches!(kind, TargetKind::Lib { is_proc_macro: true }), proc_macro_cwd, crate_ws_data, @@ -1827,6 +1834,7 @@ fn sysroot_to_crate_graph( None, Env::default(), CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)), + Vec::new(), false, Arc::new(stitched[krate].root.parent().to_path_buf()), crate_ws_data.clone(), @@ -1907,12 +1915,28 @@ fn add_dep_inner(graph: &mut CrateGraphBuilder, from: CrateBuilderId, dep: Depen fn sysroot_metadata_config( config: &CargoConfig, + workspace_root: &AbsPath, targets: &[String], toolchain_version: Option, ) -> CargoMetadataConfig { + // If the target is a JSON path, prefix it with workspace root directory. + // Since `cargo metadata` command for sysroot is run inside sysroots dir, it may fail to + // locate the target file if it is given as a relative path. + let targets = targets + .iter() + .map(|target| { + if target.ends_with(".json") { + // If `target` is an absolute path, this will replace the whole path. + workspace_root.join(target).to_string() + } else { + target.to_owned() + } + }) + .collect(); + CargoMetadataConfig { features: Default::default(), - targets: targets.to_vec(), + targets, extra_args: Default::default(), extra_env: config.extra_env.clone(), toolchain_version, diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/crate-attrs.json b/src/tools/rust-analyzer/crates/project-model/test_data/crate-attrs.json new file mode 100644 index 0000000000000..b2a7e37150171 --- /dev/null +++ b/src/tools/rust-analyzer/crates/project-model/test_data/crate-attrs.json @@ -0,0 +1,13 @@ +{ + "sysroot_src": null, + "crates": [ + { + "display_name": "foo", + "root_module": "$ROOT$src/lib.rs", + "edition": "2024", + "deps": [], + "crate_attrs": ["no_std", "feature(f16,f128)", "crate_type = \"lib\""], + "is_workspace_member": true + } + ] +} diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index 4f6ce4dc95374..a895ef53afa06 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -21,6 +21,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -106,6 +107,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -191,6 +193,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -276,6 +279,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -344,6 +348,7 @@ ), name: "libc", }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98", diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index 4f6ce4dc95374..a895ef53afa06 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -21,6 +21,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -106,6 +107,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -191,6 +193,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -276,6 +279,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -344,6 +348,7 @@ ), name: "libc", }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98", diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index 6862918e09ae6..9eb47947b6fd5 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -21,6 +21,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -105,6 +106,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -189,6 +191,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -273,6 +276,7 @@ "hello-world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$hello-world", @@ -340,6 +344,7 @@ ), name: "libc", }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98", diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt index 28ad3236ae813..32f9206a3e167 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt @@ -12,6 +12,7 @@ "hello_world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$", @@ -62,6 +63,7 @@ "other_crate", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$", diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_crate_attrs.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_crate_attrs.txt new file mode 100644 index 0000000000000..21b484bc0c423 --- /dev/null +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_crate_attrs.txt @@ -0,0 +1,54 @@ +{ + 0: CrateBuilder { + basic: CrateData { + root_file_id: FileId( + 1, + ), + edition: Edition2024, + dependencies: [], + origin: Local { + repo: None, + name: Some( + "foo", + ), + }, + crate_attrs: [ + "#![no_std]", + "#![feature(f16,f128)]", + "#![crate_type = \"lib\"]", + ], + is_proc_macro: false, + proc_macro_cwd: AbsPathBuf( + "$ROOT$", + ), + }, + extra: ExtraCrateData { + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "foo", + ), + canonical_name: "foo", + }, + ), + potential_cfg_options: None, + }, + cfg_options: CfgOptions( + [ + "rust_analyzer", + "test", + "true", + ], + ), + env: Env { + entries: {}, + }, + ws_data: CrateWorkspaceData { + target: Err( + "test has no target data", + ), + toolchain: None, + }, + }, +} \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index dabb3aa674414..de793115b9d52 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -12,6 +12,7 @@ "hello_world", ), }, + crate_attrs: [], is_proc_macro: false, proc_macro_cwd: AbsPathBuf( "$ROOT$", diff --git a/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs b/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs index 7698ce5fff13e..83ce8902d0c9d 100644 --- a/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs +++ b/src/tools/rust-analyzer/crates/query-group-macro/src/queries.rs @@ -48,8 +48,7 @@ impl ToTokens for TrackedQuery { quote!(#(#options),*) }) .into_iter() - .chain(self.lru.map(|lru| quote!(lru = #lru))) - .chain(Some(quote!(unsafe(non_update_return_type)))); + .chain(self.lru.map(|lru| quote!(lru = #lru))); let annotation = quote!(#[salsa_macros::tracked( #(#options),* )]); let pat_and_tys = &self.pat_and_tys; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index f39ab1301f8f5..76256b0a22530 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -355,6 +355,7 @@ impl flags::AnalysisStats { } hir::clear_tls_solver_cache(); + unsafe { hir::collect_ty_garbage() }; let db = host.raw_database_mut(); db.trigger_lru_eviction(); @@ -390,11 +391,12 @@ impl flags::AnalysisStats { all += 1; let Err(e) = db.layout_of_adt( hir_def::AdtId::from(a), - GenericArgs::new_from_iter(interner, []), + GenericArgs::empty(interner).store(), hir_ty::ParamEnvAndCrate { param_env: db.trait_environment(a.into()), krate: a.krate(db).into(), - }, + } + .store(), ) else { continue; }; @@ -830,7 +832,7 @@ impl flags::AnalysisStats { let (previous_exprs, previous_unknown, previous_partially_unknown) = (num_exprs, num_exprs_unknown, num_exprs_partially_unknown); for (expr_id, _) in body.exprs() { - let ty = &inference_result[expr_id]; + let ty = inference_result.expr_ty(expr_id); num_exprs += 1; let unknown_or_partial = if ty.is_ty_error() { num_exprs_unknown += 1; @@ -897,15 +899,15 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } else { bar.println(format!( "{}: Expected {}, got {}", name.display(db, Edition::LATEST), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } } @@ -913,8 +915,8 @@ impl flags::AnalysisStats { println!( r#"{},mismatch,"{}","{}""#, location_csv_expr(db, vfs, &sm(), expr_id), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) ); } } @@ -934,7 +936,7 @@ impl flags::AnalysisStats { let (previous_pats, previous_unknown, previous_partially_unknown) = (num_pats, num_pats_unknown, num_pats_partially_unknown); for (pat_id, _) in body.pats() { - let ty = &inference_result[pat_id]; + let ty = inference_result.pat_ty(pat_id); num_pats += 1; let unknown_or_partial = if ty.is_ty_error() { num_pats_unknown += 1; @@ -999,15 +1001,15 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } else { bar.println(format!( "{}: Expected {}, got {}", name.display(db, Edition::LATEST), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) )); } } @@ -1015,8 +1017,8 @@ impl flags::AnalysisStats { println!( r#"{},mismatch,"{}","{}""#, location_csv_pat(db, vfs, &sm(), pat_id), - mismatch.expected.display(db, display_target), - mismatch.actual.display(db, display_target) + mismatch.expected.as_ref().display(db, display_target), + mismatch.actual.as_ref().display(db, display_target) ); } } @@ -1205,6 +1207,7 @@ impl flags::AnalysisStats { sized_bound: false, discriminant_hints: ide::DiscriminantHints::Always, parameter_hints: true, + parameter_hints_for_missing_arguments: false, generic_parameter_hints: ide::GenericParameterHints { type_hints: true, lifetime_hints: true, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 1a2ea97204aa0..2371f7a65649e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -98,6 +98,13 @@ config_data! { /// Code's `files.watcherExclude`. files_exclude | files_excludeDirs: Vec = vec![], + /// This config controls the frequency in which rust-analyzer will perform its internal Garbage + /// Collection. It is specified in revisions, roughly equivalent to number of changes. The default + /// is 1000. + /// + /// Setting a smaller value may help limit peak memory usage at the expense of speed. + gc_frequency: usize = 1000, + /// If this is `true`, when "Goto Implementations" and in "Implementations" lens, are triggered on a `struct` or `enum` or `union`, we filter out trait implementations that originate from `derive`s above the type. gotoImplementations_filterAdjacentDerives: bool = false, @@ -280,6 +287,9 @@ config_data! { /// Show function parameter name inlay hints at the call site. inlayHints_parameterHints_enable: bool = true, + /// Show parameter name inlay hints for missing arguments at the call site. + inlayHints_parameterHints_missingArguments_enable: bool = false, + /// Show exclusive range inlay hints. inlayHints_rangeExclusiveHints_enable: bool = false, @@ -1698,9 +1708,11 @@ impl Config { pub fn caps(&self) -> &ClientCapabilities { &self.caps } -} -impl Config { + pub fn gc_freq(&self) -> usize { + *self.gc_frequency() + } + pub fn assist(&self, source_root: Option) -> AssistConfig { AssistConfig { snippet_cap: self.snippet_cap(), @@ -1916,6 +1928,9 @@ impl Config { type_hints: self.inlayHints_typeHints_enable().to_owned(), sized_bound: self.inlayHints_implicitSizedBoundHints_enable().to_owned(), parameter_hints: self.inlayHints_parameterHints_enable().to_owned(), + parameter_hints_for_missing_arguments: self + .inlayHints_parameterHints_missingArguments_enable() + .to_owned(), generic_parameter_hints: GenericParameterHints { type_hints: self.inlayHints_genericParameterHints_type_enable().to_owned(), lifetime_hints: self.inlayHints_genericParameterHints_lifetime_enable().to_owned(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 7828f50844339..41783584a9ba4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -193,6 +193,8 @@ pub(crate) struct GlobalState { /// which will usually end up causing a bunch of incorrect diagnostics on startup. pub(crate) incomplete_crate_graph: bool, + pub(crate) revisions_until_next_gc: usize, + pub(crate) minicore: MiniCoreRustAnalyzerInternalOnly, } @@ -319,6 +321,8 @@ impl GlobalState { incomplete_crate_graph: false, minicore: MiniCoreRustAnalyzerInternalOnly::default(), + + revisions_until_next_gc: config.gc_freq(), }; // Apply any required database inputs from the config. this.update_configuration(config); @@ -435,6 +439,15 @@ impl GlobalState { }); self.analysis_host.apply_change(change); + + if self.revisions_until_next_gc == 0 { + // SAFETY: Just changed some database inputs, all queries were canceled. + unsafe { hir::collect_ty_garbage() }; + self.revisions_until_next_gc = self.config.gc_freq(); + } else { + self.revisions_until_next_gc -= 1; + } + if !modified_ratoml_files.is_empty() || !self.config.same_source_root_parent_map(&self.local_roots_parent_map) { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index d15b519d6983c..4d97505768a23 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -42,8 +42,8 @@ use crate::{ lsp::{ LspError, completion_item_hash, ext::{ - InternalTestingFetchConfigOption, InternalTestingFetchConfigParams, - InternalTestingFetchConfigResponse, + GetFailedObligationsParams, InternalTestingFetchConfigOption, + InternalTestingFetchConfigParams, InternalTestingFetchConfigResponse, }, from_proto, to_proto, utils::{all_edits_are_disjoint, invalid_params_error}, @@ -2575,6 +2575,18 @@ pub(crate) fn internal_testing_fetch_config( })) } +pub(crate) fn get_failed_obligations( + snap: GlobalStateSnapshot, + params: GetFailedObligationsParams, +) -> anyhow::Result { + let _p = tracing::info_span!("get_failed_obligations").entered(); + let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?); + let line_index = snap.file_line_index(file_id)?; + let offset = from_proto::offset(&line_index, params.position)?; + + Ok(snap.analysis.get_failed_obligations(offset, file_id)?) +} + /// Searches for the directory of a Rust crate given this crate's root file path. /// /// # Arguments diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index b132323bec5b1..e6493eefef17a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -864,3 +864,18 @@ pub struct CompletionImport { pub struct ClientCommandOptions { pub commands: Vec, } + +pub enum GetFailedObligations {} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct GetFailedObligationsParams { + pub text_document: TextDocumentIdentifier, + pub position: Position, +} + +impl Request for GetFailedObligations { + type Params = GetFailedObligationsParams; + type Result = String; + const METHOD: &'static str = "rust-analyzer/getFailedObligations"; +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 1a1c0182f87ad..6e08b7bb88d4c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -1328,6 +1328,7 @@ impl GlobalState { .on::(handlers::handle_move_item) // .on::(handlers::internal_testing_fetch_config) + .on::(handlers::get_failed_obligations) .finish(); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index 48433342d51db..eb1b8c5dd0e6e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -1488,3 +1488,42 @@ version = "0.0.0" server.request::(Default::default(), json!([])); } + +#[test] +fn test_get_failed_obligations() { + use expect_test::expect; + if skip_slow_tests() { + return; + } + + let server = Project::with_fixture( + r#" +//- /Cargo.toml +[package] +name = "foo" +version = "0.0.0" + +//- /src/lib.rs +trait Trait {} +fn requires_trait(x: T) {} + +fn test() { + requires_trait(0usize); +} +"#, + ) + .server() + .wait_until_workspace_is_loaded(); + + let res = server.send_request::( + rust_analyzer::lsp::ext::GetFailedObligationsParams { + text_document: server.doc_id("src/lib.rs"), + position: Position::new(4, 19), + }, + ); + + let res: serde_json::Value = serde_json::from_str(res.as_str().unwrap()).unwrap(); + let arr = res.as_array().unwrap(); + assert_eq!(arr.len(), 2); + expect![[r#"{"goal":"Goal { param_env: ParamEnv { clauses: [] }, predicate: Binder { value: TraitPredicate(usize: Trait, polarity:Positive), bound_vars: [] } }","result":"Err(NoSolution)","depth":0,"candidates":[]}"#]].assert_eq(&arr[0].to_string()); +} diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs index bd49e08b10fa2..e54e0bd2fcc6d 100644 --- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs +++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs @@ -48,6 +48,11 @@ pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId = ErasedFileAstId(pack_hash_index_and_kind(0, 0, ErasedFileAstIdKind::Fixup as u32)); +/// [`ErasedFileAstId`] used as the span for syntax nodes that should not be mapped down to +/// macro expansion. Any `Span` containing this file id is to be considered fake. +pub const NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId = + ErasedFileAstId(pack_hash_index_and_kind(0, 0, ErasedFileAstIdKind::NoDownmap as u32)); + /// This is a type erased FileAstId. #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct ErasedFileAstId(u32); @@ -95,6 +100,7 @@ impl fmt::Debug for ErasedFileAstId { BlockExpr, AsmExpr, Fixup, + NoDownmap, ); if f.alternate() { write!(f, "{kind}[{:04X}, {}]", self.hash_value(), self.index()) @@ -150,6 +156,9 @@ enum ErasedFileAstIdKind { // because incrementality is not a problem, they will always be the only item in the macro file, // and memory usage also not because they're rare. AsmExpr, + /// Represents a fake [`ErasedFileAstId`] that should not be mapped down to macro expansion + /// result. + NoDownmap, /// Keep this last. Root, } @@ -158,7 +167,7 @@ enum ErasedFileAstIdKind { const HASH_BITS: u32 = 16; const INDEX_BITS: u32 = 11; const KIND_BITS: u32 = 5; -const _: () = assert!(ErasedFileAstIdKind::Fixup as u32 <= ((1 << KIND_BITS) - 1)); +const _: () = assert!(ErasedFileAstIdKind::Root as u32 <= ((1 << KIND_BITS) - 1)); const _: () = assert!(HASH_BITS + INDEX_BITS + KIND_BITS == u32::BITS); #[inline] diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs index c44b0198b72c2..1a8aaeb71517c 100644 --- a/src/tools/rust-analyzer/crates/span/src/lib.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -14,7 +14,7 @@ mod map; pub use self::{ ast_id::{ AstIdMap, AstIdNode, ErasedFileAstId, FIXUP_ERASED_FILE_AST_ID_MARKER, FileAstId, - ROOT_ERASED_FILE_AST_ID, + NO_DOWNMAP_ERASED_FILE_AST_ID_MARKER, ROOT_ERASED_FILE_AST_ID, }, hygiene::{SyntaxContext, Transparency}, map::{RealSpanMap, SpanMap}, diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index b2904ce3c07c6..98d759aef2093 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -690,6 +690,13 @@ pub fn expr_macro(path: ast::Path, tt: ast::TokenTree) -> ast::MacroExpr { pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr { expr_from_text(&if exclusive { format!("&mut {expr}") } else { format!("&{expr}") }) } +pub fn expr_raw_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr { + expr_from_text(&if exclusive { + format!("&raw mut {expr}") + } else { + format!("&raw const {expr}") + }) +} pub fn expr_reborrow(expr: ast::Expr) -> ast::Expr { expr_from_text(&format!("&mut *{expr}")) } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index b872221bf7113..800dd5f4ac329 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -1096,6 +1096,16 @@ impl ast::MatchGuard { } } +impl ast::MatchArm { + pub fn parent_match(&self) -> ast::MatchExpr { + self.syntax() + .parent() + .and_then(|it| it.parent()) + .and_then(ast::MatchExpr::cast) + .expect("MatchArms are always nested in MatchExprs") + } +} + impl From for ast::AnyHasAttrs { fn from(node: ast::Item) -> Self { Self::new(node) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs index aca6fcfb2e45f..7cf9e2bf14f9f 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs @@ -748,6 +748,22 @@ impl SyntaxFactory { ast.into() } + pub fn expr_raw_ref(&self, expr: ast::Expr, exclusive: bool) -> ast::Expr { + let ast::Expr::RefExpr(ast) = + make::expr_raw_ref(expr.clone(), exclusive).clone_for_update() + else { + unreachable!() + }; + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(expr.syntax().clone(), ast.expr().unwrap().syntax().clone()); + builder.finish(&mut mapping); + } + + ast.into() + } + pub fn expr_closure( &self, pats: impl IntoIterator, diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index 5e8b250c24a07..01e4215cfb868 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -239,6 +239,7 @@ impl ChangeFixture { Some(meta.cfg), meta.env, origin, + meta.crate_attrs, false, proc_macro_cwd.clone(), crate_ws_data.clone(), @@ -292,6 +293,7 @@ impl ChangeFixture { String::from("__ra_is_test_fixture"), )]), CrateOrigin::Lang(LangCrateOrigin::Core), + Vec::new(), false, proc_macro_cwd.clone(), crate_ws_data.clone(), @@ -322,6 +324,7 @@ impl ChangeFixture { Some(default_cfg), default_env, CrateOrigin::Local { repo: None, name: None }, + Vec::new(), false, proc_macro_cwd.clone(), crate_ws_data.clone(), @@ -385,6 +388,7 @@ impl ChangeFixture { String::from("__ra_is_test_fixture"), )]), CrateOrigin::Local { repo: None, name: None }, + Vec::new(), true, proc_macro_cwd, crate_ws_data, @@ -635,6 +639,7 @@ struct FileMeta { cfg: CfgOptions, edition: Edition, env: Env, + crate_attrs: Vec, introduce_new_source_root: Option, } @@ -666,6 +671,7 @@ impl FileMeta { cfg, edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()), env: f.env.into_iter().collect(), + crate_attrs: f.crate_attrs, introduce_new_source_root, } } diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs index 831d2b30c1b65..1f6262c897c7f 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs @@ -107,6 +107,11 @@ pub struct Fixture { /// /// Syntax: `env:PATH=/bin,RUST_LOG=debug` pub env: FxHashMap, + /// Specifies extra crate-level attributes injected at the top of the crate root file. + /// This must be used with `crate` meta. + /// + /// Syntax: `crate-attr:no_std crate-attr:features(f16,f128) crate-attr:cfg(target_arch="x86")` + pub crate_attrs: Vec, /// Introduces a new source root. This file **and the following /// files** will belong the new source root. This must be used /// with `crate` meta. @@ -275,6 +280,7 @@ impl FixtureWithProjectMeta { let mut krate = None; let mut deps = Vec::new(); + let mut crate_attrs = Vec::new(); let mut extern_prelude = None; let mut edition = None; let mut cfgs = Vec::new(); @@ -292,6 +298,7 @@ impl FixtureWithProjectMeta { match key { "crate" => krate = Some(value.to_owned()), "deps" => deps = value.split(',').map(|it| it.to_owned()).collect(), + "crate-attr" => crate_attrs.push(value.to_owned()), "extern-prelude" => { if value.is_empty() { extern_prelude = Some(Vec::new()); @@ -334,6 +341,7 @@ impl FixtureWithProjectMeta { line, krate, deps, + crate_attrs, extern_prelude, cfgs, edition, @@ -548,7 +556,7 @@ fn parse_fixture_gets_full_meta() { //- toolchain: nightly //- proc_macros: identity //- minicore: coerce_unsized -//- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo +//- /lib.rs crate:foo deps:bar,baz crate-attr:no_std crate-attr:features(f16,f128) crate-attr:cfg(target_arch="x86") cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo mod m; "#, ); @@ -561,6 +569,14 @@ mod m; assert_eq!("mod m;\n", meta.text); assert_eq!("foo", meta.krate.as_ref().unwrap()); + assert_eq!( + vec![ + "no_std".to_owned(), + "features(f16,f128)".to_owned(), + "cfg(target_arch=\"x86\")".to_owned() + ], + meta.crate_attrs + ); assert_eq!("/lib.rs", meta.path); assert_eq!(2, meta.env.len()); } diff --git a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md index b36576b4bb205..6b7ef049645c8 100644 --- a/src/tools/rust-analyzer/docs/book/src/configuration_generated.md +++ b/src/tools/rust-analyzer/docs/book/src/configuration_generated.md @@ -635,6 +635,17 @@ Default: `"client"` Controls file watching implementation. +## rust-analyzer.gc.frequency {#gc.frequency} + +Default: `1000` + +This config controls the frequency in which rust-analyzer will perform its internal Garbage +Collection. It is specified in revisions, roughly equivalent to number of changes. The default +is 1000. + +Setting a smaller value may help limit peak memory usage at the expense of speed. + + ## rust-analyzer.gotoImplementations.filterAdjacentDerives {#gotoImplementations.filterAdjacentDerives} Default: `false` @@ -1070,6 +1081,13 @@ Default: `true` Show function parameter name inlay hints at the call site. +## rust-analyzer.inlayHints.parameterHints.missingArguments.enable {#inlayHints.parameterHints.missingArguments.enable} + +Default: `false` + +Show parameter name inlay hints for missing arguments at the call site. + + ## rust-analyzer.inlayHints.rangeExclusiveHints.enable {#inlayHints.rangeExclusiveHints.enable} Default: `false` diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md b/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md index 5922f0b551d64..5d21c37806ddd 100644 --- a/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/book/src/contributing/lsp-extensions.md @@ -1,5 +1,5 @@