From 840cd38f8dfddfab316a136f9b24badfae8b70de Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Wed, 4 Jun 2025 10:55:08 +0800 Subject: [PATCH 01/76] Add ide-assist: remove else branches --- .../src/handlers/remove_else_branches.rs | 90 +++++++++++++++++++ .../crates/ide-assists/src/lib.rs | 2 + .../crates/ide-assists/src/tests/generated.rs | 40 +++++++++ 3 files changed, 132 insertions(+) create mode 100644 src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_else_branches.rs diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_else_branches.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_else_branches.rs new file mode 100644 index 0000000000000..6a02c37015d33 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_else_branches.rs @@ -0,0 +1,90 @@ +use syntax::{AstNode, SyntaxKind, T, TextRange, ast}; + +use crate::{AssistContext, AssistId, Assists}; + +// Assist: remove_else_branches +// +// Removes the `else` keyword and else branches. +// +// ``` +// fn main() { +// if true { +// let _ = 2; +// } $0else { +// unreachable!(); +// } +// } +// ``` +// -> +// ``` +// fn main() { +// if true { +// let _ = 2; +// } +// } +// ``` +// --- +// ``` +// fn main() { +// let _x = 2 $0else { unreachable!() }; +// } +// ``` +// -> +// ``` +// fn main() { +// let _x = 2; +// } +// ``` +pub(crate) fn remove_else_branches(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let else_token = ctx.find_token_syntax_at_offset(T![else])?; + let else_branches = ctx + .find_node_at_range::() + .and_then(|if_expr| if_expr.else_branch()?.syntax().clone().into()) + .or_else(|| { + ctx.find_node_at_range::()? + .let_else()? + .block_expr()? + .syntax() + .clone() + .into() + })?; + + let target = TextRange::cover(else_token.text_range(), else_branches.text_range()); + acc.add( + AssistId::refactor("remove_else_branches"), + "Remove `else` branches", + target, + |builder| { + let mut editor = builder.make_editor(&else_token.parent().unwrap()); + match else_token.prev_token() { + Some(it) if it.kind() == SyntaxKind::WHITESPACE => editor.delete(it), + _ => (), + } + match else_token.next_token() { + Some(it) if it.kind() == SyntaxKind::WHITESPACE => editor.delete(it), + _ => (), + } + editor.delete(else_token); + editor.delete(else_branches); + builder.add_file_edits(ctx.vfs_file_id(), editor); + }, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::check_assist_not_applicable; + + #[test] + fn test_remove_else_branches_not_on_else_token() { + check_assist_not_applicable( + remove_else_branches, + r#" +fn main() { + let _x = 2 else {$0 unreachable!() }; +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index c2604432032d7..f4d5136c1995f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -199,6 +199,7 @@ mod handlers { mod qualify_path; mod raw_string; mod remove_dbg; + mod remove_else_branches; mod remove_mut; mod remove_parentheses; mod remove_underscore; @@ -337,6 +338,7 @@ mod handlers { raw_string::remove_hash, remove_dbg::remove_dbg, remove_mut::remove_mut, + remove_else_branches::remove_else_branches, remove_parentheses::remove_parentheses, remove_underscore::remove_underscore, remove_unused_imports::remove_unused_imports, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 72f7195cbd773..74c663450c69a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -2738,6 +2738,46 @@ fn main() { ) } +#[test] +fn doctest_remove_else_branches() { + check_doc_test( + "remove_else_branches", + r#####" +fn main() { + if true { + let _ = 2; + } $0else { + unreachable!(); + } +} +"#####, + r#####" +fn main() { + if true { + let _ = 2; + } +} +"#####, + ) +} + +#[test] +fn doctest_remove_else_branches_1() { + check_doc_test( + "remove_else_branches", + r#####" +fn main() { + let _x = 2 $0else { unreachable!() }; +} +"#####, + r#####" +fn main() { + let _x = 2; +} +"#####, + ) +} + #[test] fn doctest_remove_hash() { check_doc_test( From 2969b0e2c5c70e27833baff42cd19dc809ac8615 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Tue, 2 Sep 2025 15:45:58 +0800 Subject: [PATCH 02/76] Fix extract multiple item in impl for extract_module Example --- ```rust struct Foo; impl Foo { $0fn foo() {} fn bar() {}$0 fn baz() {} } ``` **Before this PR**: ```rust struct Foo; impl Foo { mod modname { pub(crate) fn foo() {} pub(crate) fn bar() {} } fn baz() {} } ``` **After this PR**: ```rust struct Foo; impl Foo { fn baz() {} } mod modname { use super::Foo; impl Foo { pub(crate) fn foo() {} pub(crate) fn bar() {} } } ``` --- .../src/handlers/extract_module.rs | 152 +++++++++++------- 1 file changed, 92 insertions(+), 60 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs index dad19bfb8a2c8..a17ae4885e62c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs @@ -1,4 +1,4 @@ -use std::ops::RangeInclusive; +use std::{iter::once, ops::RangeInclusive}; use hir::{HasSource, ModuleSource}; use ide_db::{ @@ -63,19 +63,6 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti syntax::NodeOrToken::Token(t) => t.parent()?, }; - //If the selection is inside impl block, we need to place new module outside impl block, - //as impl blocks cannot contain modules - - let mut impl_parent: Option = None; - let mut impl_child_count: usize = 0; - if let Some(parent_assoc_list) = node.parent() - && let Some(parent_impl) = parent_assoc_list.parent() - && let Some(impl_) = ast::Impl::cast(parent_impl) - { - impl_child_count = parent_assoc_list.children().count(); - impl_parent = Some(impl_); - } - let mut curr_parent_module: Option = None; if let Some(mod_syn_opt) = node.ancestors().find(|it| ast::Module::can_cast(it.kind())) { curr_parent_module = ast::Module::cast(mod_syn_opt); @@ -94,7 +81,22 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti return None; } - let old_item_indent = module.body_items[0].indent_level(); + let mut old_item_indent = module.body_items[0].indent_level(); + let old_items: Vec<_> = module.use_items.iter().chain(&module.body_items).cloned().collect(); + + // If the selection is inside impl block, we need to place new module outside impl block, + // as impl blocks cannot contain modules + + let mut impl_parent: Option = None; + let mut impl_child_count: usize = 0; + if let Some(parent_assoc_list) = module.body_items[0].syntax().parent() + && let Some(parent_impl) = parent_assoc_list.parent() + && let Some(impl_) = ast::Impl::cast(parent_impl) + { + impl_child_count = parent_assoc_list.children().count(); + old_item_indent = impl_.indent_level(); + impl_parent = Some(impl_); + } acc.add( AssistId::refactor_extract("extract_module"), @@ -127,7 +129,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let import_items = module.resolve_imports(curr_parent_module, ctx); module.change_visibility(record_fields); - let module_def = generate_module_def(&impl_parent, module, old_item_indent).to_string(); + let module_def = generate_module_def(&impl_parent, &module).indent(old_item_indent); let mut usages_to_be_processed_for_cur_file = vec![]; for (file_id, usages) in usages_to_be_processed { @@ -149,27 +151,32 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti if let Some(impl_) = impl_parent { // Remove complete impl block if it has only one child (as such it will be empty // after deleting that child) - let node_to_be_removed = if impl_child_count == 1 { - impl_.syntax() + let nodes_to_be_removed = if impl_child_count == old_items.len() { + vec![impl_.syntax()] } else { //Remove selected node - &node + old_items.iter().map(|it| it.syntax()).collect() }; - builder.delete(node_to_be_removed.text_range()); - // Remove preceding indentation from node - if let Some(range) = indent_range_before_given_node(node_to_be_removed) { - builder.delete(range); + for node_to_be_removed in nodes_to_be_removed { + builder.delete(node_to_be_removed.text_range()); + // Remove preceding indentation from node + if let Some(range) = indent_range_before_given_node(node_to_be_removed) { + builder.delete(range); + } } - builder.insert(impl_.syntax().text_range().end(), format!("\n\n{module_def}")); + builder.insert( + impl_.syntax().text_range().end(), + format!("\n\n{old_item_indent}{module_def}"), + ); } else { for import_item in import_items { if !module_text_range.contains_range(import_item) { builder.delete(import_item); } } - builder.replace(module_text_range, module_def) + builder.replace(module_text_range, module_def.to_string()) } }, ) @@ -177,34 +184,35 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti fn generate_module_def( parent_impl: &Option, - module: Module, - old_indent: IndentLevel, + Module { name, body_items, use_items }: &Module, ) -> ast::Module { - let Module { name, body_items, use_items } = module; - let items = if let Some(self_ty) = parent_impl.as_ref().and_then(|imp| imp.self_ty()) { + let items: Vec<_> = if let Some(impl_) = parent_impl.as_ref() + && let Some(self_ty) = impl_.self_ty() + { let assoc_items = body_items - .into_iter() + .iter() .map(|item| item.syntax().clone()) .filter_map(ast::AssocItem::cast) .map(|it| it.indent(IndentLevel(1))) .collect_vec(); - let assoc_item_list = make::assoc_item_list(Some(assoc_items)); - let impl_ = make::impl_(None, None, None, self_ty.clone(), None, Some(assoc_item_list)); + let assoc_item_list = make::assoc_item_list(Some(assoc_items)).clone_for_update(); + let impl_ = impl_.reset_indent(); + ted::replace(impl_.get_or_create_assoc_item_list().syntax(), assoc_item_list.syntax()); // Add the import for enum/struct corresponding to given impl block let use_impl = make_use_stmt_of_node_with_super(self_ty.syntax()); - let mut module_body_items = use_items; - module_body_items.insert(0, use_impl); - module_body_items.push(ast::Item::Impl(impl_)); - module_body_items + once(use_impl) + .chain(use_items.iter().cloned()) + .chain(once(ast::Item::Impl(impl_))) + .collect() } else { - [use_items, body_items].concat() + use_items.iter().chain(body_items).cloned().collect() }; let items = items.into_iter().map(|it| it.reset_indent().indent(IndentLevel(1))).collect_vec(); let module_body = make::item_list(Some(items)); let module_name = make::name(name); - make::mod_(module_name, Some(module_body)).indent(old_indent) + make::mod_(module_name, Some(module_body)) } fn make_use_stmt_of_node_with_super(node_syntax: &SyntaxNode) -> ast::Item { @@ -1400,28 +1408,54 @@ mod modname { fn test_if_inside_impl_block_generate_module_outside() { check_assist( extract_module, - r" - struct A {} + r"struct A {} impl A { -$0fn foo() {}$0 + $0fn foo() {}$0 fn bar() {} } ", - r" - struct A {} + r"struct A {} impl A { fn bar() {} } -mod modname { - use super::A; + mod modname { + use super::A; - impl A { - pub(crate) fn foo() {} - } -} + impl A { + pub(crate) fn foo() {} + } + } + ", + ); + + check_assist( + extract_module, + r"struct A {} + + impl A { + $0fn foo() {} + fn bar() {}$0 + fn baz() {} + } + ", + r"struct A {} + + impl A { + fn baz() {} + } + + mod modname { + use super::A; + + impl A { + pub(crate) fn foo() {} + + pub(crate) fn bar() {} + } + } ", ) } @@ -1430,27 +1464,25 @@ mod modname { fn test_if_inside_impl_block_generate_module_outside_but_impl_block_having_one_child() { check_assist( extract_module, - r" - struct A {} + r"struct A {} struct B {} impl A { $0fn foo(x: B) {}$0 } ", - r" - struct A {} + r"struct A {} struct B {} -mod modname { - use super::A; + mod modname { + use super::A; - use super::B; + use super::B; - impl A { - pub(crate) fn foo(x: B) {} - } -} + impl A { + pub(crate) fn foo(x: B) {} + } + } ", ) } From 419902412b4a970cb12401675a2d7471508e4cc7 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sat, 20 Sep 2025 11:17:24 +0800 Subject: [PATCH 03/76] Add ide-assist: flip_range_expr Flips operands of a range expression. Example --- ```rust fn main() { let _ = 90..$02; } ``` -> ```rust fn main() { let _ = 2..90; } ``` --- ```rust fn main() { let _ = 90..$0; } ``` -> ```rust fn main() { let _ = ..90; } ``` --- .../ide-assists/src/handlers/flip_binexpr.rs | 71 ++++++++++++++++++- .../crates/ide-assists/src/lib.rs | 1 + .../crates/ide-assists/src/tests/generated.rs | 34 +++++++++ 3 files changed, 105 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs index 247e8109abc9d..8f2306e9037e7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs @@ -1,6 +1,7 @@ use syntax::{ SyntaxKind, T, - ast::{self, AstNode, BinExpr, syntax_factory::SyntaxFactory}, + ast::{self, AstNode, BinExpr, RangeItem, syntax_factory::SyntaxFactory}, + syntax_editor::Position, }; use crate::{AssistContext, AssistId, Assists}; @@ -87,6 +88,74 @@ impl From for FlipAction { } } +// Assist: flip_range_expr +// +// Flips operands of a range expression. +// +// ``` +// fn main() { +// let _ = 90..$02; +// } +// ``` +// -> +// ``` +// fn main() { +// let _ = 2..90; +// } +// ``` +// --- +// ``` +// fn main() { +// let _ = 90..$0; +// } +// ``` +// -> +// ``` +// fn main() { +// let _ = ..90; +// } +// ``` +pub(crate) fn flip_range_expr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let range_expr = ctx.find_node_at_offset::()?; + let op = range_expr.op_token()?; + let start = range_expr.start(); + let end = range_expr.end(); + + if !op.text_range().contains_range(ctx.selection_trimmed()) { + return None; + } + if start.is_none() && end.is_none() { + return None; + } + + acc.add( + AssistId::refactor_rewrite("flip_range_expr"), + "Flip range expression", + op.text_range(), + |builder| { + let mut edit = builder.make_editor(range_expr.syntax()); + + match (start, end) { + (Some(start), Some(end)) => { + edit.replace(start.syntax(), end.syntax()); + edit.replace(end.syntax(), start.syntax()); + } + (Some(start), None) => { + edit.delete(start.syntax()); + edit.insert(Position::after(&op), start.syntax().clone_for_update()); + } + (None, Some(end)) => { + edit.delete(end.syntax()); + edit.insert(Position::before(&op), end.syntax().clone_for_update()); + } + (None, None) => (), + } + + builder.add_file_edits(ctx.vfs_file_id(), edit); + }, + ) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index 4682c04732389..0e160af652f91 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -283,6 +283,7 @@ mod handlers { extract_type_alias::extract_type_alias, fix_visibility::fix_visibility, flip_binexpr::flip_binexpr, + flip_binexpr::flip_range_expr, flip_comma::flip_comma, flip_or_pattern::flip_or_pattern, flip_trait_bound::flip_trait_bound, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 91348be97eb72..7b042ed4dc998 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1298,6 +1298,40 @@ fn foo() { ) } +#[test] +fn doctest_flip_range_expr() { + check_doc_test( + "flip_range_expr", + r#####" +fn main() { + let _ = 90..$02; +} +"#####, + r#####" +fn main() { + let _ = 2..90; +} +"#####, + ) +} + +#[test] +fn doctest_flip_range_expr_1() { + check_doc_test( + "flip_range_expr", + r#####" +fn main() { + let _ = 90..$0; +} +"#####, + r#####" +fn main() { + let _ = ..90; +} +"#####, + ) +} + #[test] fn doctest_flip_trait_bound() { check_doc_test( From 404f7499b36709e47e703a6d8cbc68e55c24bbab Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sat, 20 Sep 2025 21:41:27 +0800 Subject: [PATCH 04/76] Fix shorthand field pat for destructure_tuple_binding Example --- ```rust struct S { field: (i32, i32) } fn main() { let S { $0field } = S { field: (2, 3) }; let v = field.0 + field.1; } ``` **Before this PR**: ```rust struct S { field: (i32, i32) } fn main() { let S { ($0_0, _1) } = S { field: (2, 3) }; let v = _0 + _1; } ``` **After this PR**: ```rust struct S { field: (i32, i32) } fn main() { let S { field: ($0_0, _1) } = S { field: (2, 3) }; let v = _0 + _1; } ``` --- .../src/handlers/destructure_tuple_binding.rs | 54 ++++++++++++++++++- 1 file changed, 53 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index f09389f8302f3..e2afc0bf130ee 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -7,6 +7,7 @@ use ide_db::{ }; use itertools::Itertools; use syntax::{ + T, ast::{self, AstNode, FieldExpr, HasName, IdentPat, make}, ted, }; @@ -179,6 +180,11 @@ fn edit_tuple_assignment( .map(|name| ast::Pat::from(make::ident_pat(is_ref, is_mut, make::name(name)))); make::tuple_pat(fields).clone_for_update() }; + let is_shorthand_field = ident_pat + .name() + .as_ref() + .and_then(ast::RecordPatField::for_field_name) + .is_some_and(|field| field.colon_token().is_none()); if let Some(cap) = ctx.config.snippet_cap { // place cursor on first tuple name @@ -190,12 +196,13 @@ fn edit_tuple_assignment( } } - AssignmentEdit { ident_pat, tuple_pat, in_sub_pattern } + AssignmentEdit { ident_pat, tuple_pat, in_sub_pattern, is_shorthand_field } } struct AssignmentEdit { ident_pat: ast::IdentPat, tuple_pat: ast::TuplePat, in_sub_pattern: bool, + is_shorthand_field: bool, } impl AssignmentEdit { @@ -203,6 +210,9 @@ impl AssignmentEdit { // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)` if self.in_sub_pattern { self.ident_pat.set_pat(Some(self.tuple_pat.into())) + } else if self.is_shorthand_field { + ted::insert(ted::Position::after(self.ident_pat.syntax()), self.tuple_pat.syntax()); + ted::insert_raw(ted::Position::after(self.ident_pat.syntax()), make::token(T![:])); } else { ted::replace(self.ident_pat.syntax(), self.tuple_pat.syntax()) } @@ -799,6 +809,48 @@ fn main() { ) } + #[test] + fn in_record_shorthand_field() { + check_assist( + assist, + r#" +struct S { field: (i32, i32) } +fn main() { + let S { $0field } = S { field: (2, 3) }; + let v = field.0 + field.1; +} + "#, + r#" +struct S { field: (i32, i32) } +fn main() { + let S { field: ($0_0, _1) } = S { field: (2, 3) }; + let v = _0 + _1; +} + "#, + ) + } + + #[test] + fn in_record_field() { + check_assist( + assist, + r#" +struct S { field: (i32, i32) } +fn main() { + let S { field: $0t } = S { field: (2, 3) }; + let v = t.0 + t.1; +} + "#, + r#" +struct S { field: (i32, i32) } +fn main() { + let S { field: ($0_0, _1) } = S { field: (2, 3) }; + let v = _0 + _1; +} + "#, + ) + } + #[test] fn in_nested_tuple() { check_assist( From 5647e83ba1bae599c7e6e40c4712fb66ee45f7dd Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sat, 27 Sep 2025 12:46:11 +0800 Subject: [PATCH 05/76] =?UTF-8?q?Add=20`doc=20=3D=20include=5Fstr!("?= =?UTF-8?q?=E2=80=A6")`=20completion?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/completions/attribute.rs | 3 ++- .../ide-completion/src/tests/attribute.rs | 23 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs index e174b0c8922ab..297ce3339e022 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs @@ -231,7 +231,7 @@ const fn attr( macro_rules! attrs { // attributes applicable to all items [@ { item $($tt:tt)* } {$($acc:tt)*}] => { - attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "must_use", "no_mangle" }) + attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "docinclude", "must_use", "no_mangle" }) }; // attributes applicable to all adts [@ { adt $($tt:tt)* } {$($acc:tt)*}] => { @@ -345,6 +345,7 @@ const ATTRIBUTES: &[AttrCompletion] = &[ attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)), attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)), attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)), + attr(r#"doc = include_str!("…")"#, Some("docinclude"), Some(r#"doc = include_str!("$0")"#)), attr("expect(…)", Some("expect"), Some("expect(${0:lint})")), attr( r#"export_name = "…""#, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs index 30e1e108c6c40..cd660e496f27c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs @@ -33,6 +33,7 @@ pub struct Foo(#[m$0] i32); at diagnostic::do_not_recommend at diagnostic::on_unimplemented at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -85,6 +86,7 @@ struct Foo; at deprecated at derive(…) at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -158,6 +160,7 @@ fn attr_on_source_file() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -189,6 +192,7 @@ fn attr_on_module() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -212,6 +216,7 @@ fn attr_on_module() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -238,6 +243,7 @@ fn attr_on_macro_rules() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -264,6 +270,7 @@ fn attr_on_macro_def() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -288,6 +295,7 @@ fn attr_on_extern_crate() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -313,6 +321,7 @@ fn attr_on_use() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -337,6 +346,7 @@ fn attr_on_type_alias() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -368,6 +378,7 @@ struct Foo; at derive(…) at derive_const macro derive_const at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -396,6 +407,7 @@ fn attr_on_enum() { at deprecated at derive(…) at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -422,6 +434,7 @@ fn attr_on_const() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -446,6 +459,7 @@ fn attr_on_static() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -476,6 +490,7 @@ fn attr_on_trait() { at deprecated at diagnostic::on_unimplemented at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -502,6 +517,7 @@ fn attr_on_impl() { at deprecated at diagnostic::do_not_recommend at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -522,6 +538,7 @@ fn attr_on_impl() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -548,6 +565,7 @@ fn attr_with_qualifier() { at deprecated at do_not_recommend at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -566,6 +584,7 @@ fn attr_with_qualifier() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -616,6 +635,7 @@ fn attr_on_extern_block() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -637,6 +657,7 @@ fn attr_on_extern_block() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -682,6 +703,7 @@ fn attr_on_fn() { at deny(…) at deprecated at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) @@ -724,6 +746,7 @@ fn attr_in_source_file_end() { at diagnostic::do_not_recommend at diagnostic::on_unimplemented at doc = "…" + at doc = include_str!("…") at doc(alias = "…") at doc(hidden) at expect(…) From 6390c89472e2feb255a20257b3d7ecd522eece36 Mon Sep 17 00:00:00 2001 From: bit-aloo Date: Sat, 27 Sep 2025 12:03:04 +0530 Subject: [PATCH 06/76] Use FileId::MAX for id assertion in PathInterner::intern --- src/tools/rust-analyzer/crates/vfs/src/path_interner.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs b/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs index 64f51976053d4..225bfc7218b44 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/path_interner.rs @@ -28,7 +28,7 @@ impl PathInterner { /// - Else, returns a newly allocated id. pub(crate) fn intern(&mut self, path: VfsPath) -> FileId { let (id, _added) = self.map.insert_full(path); - assert!(id < u32::MAX as usize); + assert!(id < FileId::MAX as usize); FileId(id as u32) } From f6eb4ea86b043dce17d6b4388b67e72f8fd33b00 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Tue, 30 Sep 2025 13:04:01 +0800 Subject: [PATCH 07/76] Fix let-expr in lhs for convert_to_guarded_return Example --- ```rust fn main() { if$0 let Ok(x) = Err(92) && let Ok(y) = Ok(37) && x < 30 && let Some(y) = Some(8) { foo(x, y); } } ``` **Before this PR**: ```rust fn main() { let Ok(x) = Err(92) else { return }; if !(let Ok(y) = Ok(37) && x < 30) { return; } let Some(y) = Some(8) else { return }; foo(x, y); } ``` **After this PR**: ```rust fn main() { let Ok(x) = Err(92) else { return }; let Ok(y) = Ok(37) else { return }; if x >= 30 { return; } let Some(y) = Some(8) else { return }; foo(x, y); } ``` --- .../src/handlers/convert_to_guarded_return.rs | 104 +++++++++++++++++- 1 file changed, 98 insertions(+), 6 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs index 82213ae3217e7..6b5a37513cbe5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs @@ -254,20 +254,25 @@ fn early_expression( fn flat_let_chain(mut expr: ast::Expr) -> Vec { let mut chains = vec![]; + let mut reduce_cond = |rhs| { + if !matches!(rhs, ast::Expr::LetExpr(_)) + && let Some(last) = chains.pop_if(|last| !matches!(last, ast::Expr::LetExpr(_))) + { + chains.push(make::expr_bin_op(rhs, ast::BinaryOp::LogicOp(ast::LogicOp::And), last)); + } else { + chains.push(rhs); + } + }; while let ast::Expr::BinExpr(bin_expr) = &expr && bin_expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) && let (Some(lhs), Some(rhs)) = (bin_expr.lhs(), bin_expr.rhs()) { - if let Some(last) = chains.pop_if(|last| !matches!(last, ast::Expr::LetExpr(_))) { - chains.push(make::expr_bin_op(rhs, ast::BinaryOp::LogicOp(ast::LogicOp::And), last)); - } else { - chains.push(rhs); - } + reduce_cond(rhs); expr = lhs; } - chains.push(expr); + reduce_cond(expr); chains.reverse(); chains } @@ -493,6 +498,93 @@ fn main() { let Some(y) = Some(8) else { return }; foo(x, y); } +"#, + ); + + check_assist( + convert_to_guarded_return, + r#" +fn main() { + if$0 let Ok(x) = Err(92) + && let Ok(y) = Ok(37) + && x < 30 + && let Some(y) = Some(8) + { + foo(x, y); + } +} +"#, + r#" +fn main() { + let Ok(x) = Err(92) else { return }; + let Ok(y) = Ok(37) else { return }; + if x >= 30 { + return; + } + let Some(y) = Some(8) else { return }; + foo(x, y); +} +"#, + ); + + check_assist( + convert_to_guarded_return, + r#" +fn main() { + if$0 cond + && let Ok(x) = Err(92) + && let Ok(y) = Ok(37) + && x < 30 + && let Some(y) = Some(8) + { + foo(x, y); + } +} +"#, + r#" +fn main() { + if !cond { + return; + } + let Ok(x) = Err(92) else { return }; + let Ok(y) = Ok(37) else { return }; + if x >= 30 { + return; + } + let Some(y) = Some(8) else { return }; + foo(x, y); +} +"#, + ); + + check_assist( + convert_to_guarded_return, + r#" +fn main() { + if$0 cond + && foo() + && let Ok(x) = Err(92) + && let Ok(y) = Ok(37) + && x < 30 + && let Some(y) = Some(8) + { + foo(x, y); + } +} +"#, + r#" +fn main() { + if !(cond && foo()) { + return; + } + let Ok(x) = Err(92) else { return }; + let Ok(y) = Ok(37) else { return }; + if x >= 30 { + return; + } + let Some(y) = Some(8) else { return }; + foo(x, y); +} "#, ); } From 26cf7899b7d5643af8492cc3a88681d43a649840 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Wed, 1 Oct 2025 12:24:50 +0800 Subject: [PATCH 08/76] Add applicable on `else` for invert_if Example --- ```rust fn f() { if cond { 3 * 2 } e$0lse { 1 } } ``` -> ```rust fn f() { if !cond { 1 } else { 3 * 2 } } ``` --- .../crates/ide-assists/src/handlers/invert_if.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs index 7576d2fab976f..bf82d8df9b58f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/invert_if.rs @@ -27,7 +27,9 @@ use crate::{ // } // ``` pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let if_keyword = ctx.find_token_syntax_at_offset(T![if])?; + let if_keyword = ctx + .find_token_syntax_at_offset(T![if]) + .or_else(|| ctx.find_token_syntax_at_offset(T![else]))?; let expr = ast::IfExpr::cast(if_keyword.parent()?)?; let if_range = if_keyword.text_range(); let cursor_in_range = if_range.contains_range(ctx.selection_trimmed()); @@ -111,6 +113,15 @@ mod tests { ) } + #[test] + fn invert_if_on_else_keyword() { + check_assist( + invert_if, + "fn f() { if cond { 3 * 2 } e$0lse { 1 } }", + "fn f() { if !cond { 1 } else { 3 * 2 } }", + ) + } + #[test] fn invert_if_doesnt_apply_with_cursor_not_on_if() { check_assist_not_applicable(invert_if, "fn f() { if !$0cond { 3 * 2 } else { 1 } }") From 92eef5348ac32ac8934bc9397f11b1fd6bdb2eea Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sun, 12 Oct 2025 11:13:11 +0300 Subject: [PATCH 09/76] Migrate inhabitedness checking to the new solver --- .../diagnostics/match_check/pat_analysis.rs | 29 +++- .../crates/hir-ty/src/inhabitedness.rs | 148 ++++++++++-------- .../crates/hir-ty/src/mir/lower.rs | 6 +- .../crates/hir-ty/src/next_solver/def_id.rs | 38 ++++- .../crates/hir-ty/src/next_solver/interner.rs | 4 +- 5 files changed, 138 insertions(+), 87 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index eb20d3c51ff41..76f50c1948354 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -19,6 +19,11 @@ use crate::{ db::HirDatabase, infer::normalize, inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from}, + next_solver::{ + DbInterner, TypingMode, + infer::{DbInternerInferExt, InferCtxt}, + mapping::ChalkToNextSolver, + }, }; use super::{FieldPat, Pat, PatKind}; @@ -28,7 +33,7 @@ use Constructor::*; // Re-export r-a-specific versions of all these types. pub(crate) type DeconstructedPat<'db> = rustc_pattern_analysis::pat::DeconstructedPat>; -pub(crate) type MatchArm<'db> = rustc_pattern_analysis::MatchArm<'db, MatchCheckCtx<'db>>; +pub(crate) type MatchArm<'a, 'db> = rustc_pattern_analysis::MatchArm<'a, MatchCheckCtx<'db>>; pub(crate) type WitnessPat<'db> = rustc_pattern_analysis::pat::WitnessPat>; /// [Constructor] uses this in unimplemented variants. @@ -71,6 +76,7 @@ pub(crate) struct MatchCheckCtx<'db> { pub(crate) db: &'db dyn HirDatabase, exhaustive_patterns: bool, env: Arc>, + infcx: InferCtxt<'db>, } impl<'db> MatchCheckCtx<'db> { @@ -82,15 +88,17 @@ impl<'db> MatchCheckCtx<'db> { ) -> Self { let def_map = module.crate_def_map(db); let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns); - Self { module, body, db, exhaustive_patterns, env } + let interner = DbInterner::new_with(db, Some(env.krate), env.block); + let infcx = interner.infer_ctxt().build(TypingMode::typeck_for_body(interner, body.into())); + Self { module, body, db, exhaustive_patterns, env, infcx } } - pub(crate) fn compute_match_usefulness( + pub(crate) fn compute_match_usefulness<'a>( &self, - arms: &[MatchArm<'db>], + arms: &[MatchArm<'a, 'db>], scrut_ty: Ty, known_valid_scrutinee: Option, - ) -> Result, ()> { + ) -> Result, ()> { if scrut_ty.contains_unknown() { return Err(()); } @@ -107,7 +115,12 @@ impl<'db> MatchCheckCtx<'db> { } fn is_uninhabited(&self, ty: &Ty) -> bool { - is_ty_uninhabited_from(self.db, ty, self.module, self.env.clone()) + is_ty_uninhabited_from( + &self.infcx, + ty.to_nextsolver(self.infcx.interner), + self.module, + self.env.clone(), + ) } /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`. @@ -429,9 +442,9 @@ impl PatCx for MatchCheckCtx<'_> { let mut variants = IndexVec::with_capacity(enum_data.variants.len()); for &(variant, _, _) in enum_data.variants.iter() { let is_uninhabited = is_enum_variant_uninhabited_from( - cx.db, + &cx.infcx, variant, - subst, + subst.to_nextsolver(cx.infcx.interner), cx.module, self.env.clone(), ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs index 826f19cf0b68d..7ebc2df6f75dd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs @@ -1,60 +1,62 @@ //! Type inhabitedness logic. use std::ops::ControlFlow::{self, Break, Continue}; -use chalk_ir::{ - DebruijnIndex, - visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, -}; use hir_def::{AdtId, EnumVariantId, ModuleId, VariantId, visibility::Visibility}; use rustc_hash::FxHashSet; +use rustc_type_ir::{ + TypeSuperVisitable, TypeVisitable, TypeVisitor, + inherent::{AdtDef, IntoKind}, +}; use triomphe::Arc; use crate::{ - AliasTy, Binders, Interner, Substitution, TraitEnvironment, Ty, TyKind, + TraitEnvironment, consteval::try_const_usize, db::HirDatabase, - next_solver::{DbInterner, mapping::ChalkToNextSolver}, + next_solver::{ + DbInterner, EarlyBinder, GenericArgs, Ty, TyKind, + infer::{InferCtxt, traits::ObligationCause}, + obligation_ctxt::ObligationCtxt, + }, }; // FIXME: Turn this into a query, it can be quite slow /// Checks whether a type is visibly uninhabited from a particular module. -pub(crate) fn is_ty_uninhabited_from( - db: &dyn HirDatabase, - ty: &Ty, +pub(crate) fn is_ty_uninhabited_from<'db>( + infcx: &InferCtxt<'db>, + ty: Ty<'db>, target_mod: ModuleId, - env: Arc>, + env: Arc>, ) -> bool { let _p = tracing::info_span!("is_ty_uninhabited_from", ?ty).entered(); - let mut uninhabited_from = - UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env }; - let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST); + let mut uninhabited_from = UninhabitedFrom::new(infcx, target_mod, env); + let inhabitedness = ty.visit_with(&mut uninhabited_from); inhabitedness == BREAK_VISIBLY_UNINHABITED } // FIXME: Turn this into a query, it can be quite slow /// Checks whether a variant is visibly uninhabited from a particular module. -pub(crate) fn is_enum_variant_uninhabited_from( - db: &dyn HirDatabase, +pub(crate) fn is_enum_variant_uninhabited_from<'db>( + infcx: &InferCtxt<'db>, variant: EnumVariantId, - subst: &Substitution, + subst: GenericArgs<'db>, target_mod: ModuleId, - env: Arc>, + env: Arc>, ) -> bool { let _p = tracing::info_span!("is_enum_variant_uninhabited_from").entered(); - let mut uninhabited_from = - UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env }; + let mut uninhabited_from = UninhabitedFrom::new(infcx, target_mod, env); let inhabitedness = uninhabited_from.visit_variant(variant.into(), subst); inhabitedness == BREAK_VISIBLY_UNINHABITED } -struct UninhabitedFrom<'a> { +struct UninhabitedFrom<'a, 'db> { target_mod: ModuleId, - recursive_ty: FxHashSet, + recursive_ty: FxHashSet>, // guard for preventing stack overflow in non trivial non terminating types max_depth: usize, - db: &'a dyn HirDatabase, - env: Arc>, + infcx: &'a InferCtxt<'db>, + env: Arc>, } const CONTINUE_OPAQUELY_INHABITED: ControlFlow = Continue(()); @@ -62,63 +64,73 @@ const BREAK_VISIBLY_UNINHABITED: ControlFlow = Break(Visibly #[derive(PartialEq, Eq)] struct VisiblyUninhabited; -impl TypeVisitor for UninhabitedFrom<'_> { - type BreakTy = VisiblyUninhabited; - - fn as_dyn(&mut self) -> &mut dyn TypeVisitor { - self - } +impl<'db> TypeVisitor> for UninhabitedFrom<'_, 'db> { + type Result = ControlFlow; - fn visit_ty( - &mut self, - ty: &Ty, - outer_binder: DebruijnIndex, - ) -> ControlFlow { - if self.recursive_ty.contains(ty) || self.max_depth == 0 { + fn visit_ty(&mut self, mut ty: Ty<'db>) -> ControlFlow { + if self.recursive_ty.contains(&ty) || self.max_depth == 0 { // rustc considers recursive types always inhabited. I think it is valid to consider // recursive types as always uninhabited, but we should do what rustc is doing. return CONTINUE_OPAQUELY_INHABITED; } - self.recursive_ty.insert(ty.clone()); + self.recursive_ty.insert(ty); self.max_depth -= 1; - let interner = DbInterner::new_with(self.db, None, None); - let r = match ty.kind(Interner) { - TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst), - TyKind::Never => BREAK_VISIBLY_UNINHABITED, - TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder), - TyKind::Array(item_ty, len) => { - match try_const_usize(self.db, len.to_nextsolver(interner)) { - Some(0) | None => CONTINUE_OPAQUELY_INHABITED, - Some(1..) => item_ty.super_visit_with(self, outer_binder), - } - } - TyKind::Alias(AliasTy::Projection(projection)) => { - // FIXME: I think this currently isn't used for monomorphized bodies, so there is no need to handle - // `TyKind::AssociatedType`, but perhaps in the future it will. - let normalized = self.db.normalize_projection(projection.clone(), self.env.clone()); - self.visit_ty(&normalized, outer_binder) + + if matches!(ty.kind(), TyKind::Alias(..)) { + let mut ocx = ObligationCtxt::new(self.infcx); + match ocx.structurally_normalize_ty(&ObligationCause::dummy(), self.env.env, ty) { + Ok(it) => ty = it, + Err(_) => return CONTINUE_OPAQUELY_INHABITED, } + } + + let r = match ty.kind() { + TyKind::Adt(adt, subst) => self.visit_adt(adt.def_id().0, subst), + TyKind::Never => BREAK_VISIBLY_UNINHABITED, + TyKind::Tuple(..) => ty.super_visit_with(self), + TyKind::Array(item_ty, len) => match try_const_usize(self.infcx.interner.db, len) { + Some(0) | None => CONTINUE_OPAQUELY_INHABITED, + Some(1..) => item_ty.super_visit_with(self), + }, _ => CONTINUE_OPAQUELY_INHABITED, }; - self.recursive_ty.remove(ty); + self.recursive_ty.remove(&ty); self.max_depth += 1; r } +} - fn interner(&self) -> Interner { - Interner +impl<'a, 'db> UninhabitedFrom<'a, 'db> { + fn new( + infcx: &'a InferCtxt<'db>, + target_mod: ModuleId, + env: Arc>, + ) -> Self { + Self { target_mod, recursive_ty: FxHashSet::default(), max_depth: 500, infcx, env } + } + + #[inline] + fn interner(&self) -> DbInterner<'db> { + self.infcx.interner + } + + #[inline] + fn db(&self) -> &'db dyn HirDatabase { + self.interner().db } -} -impl UninhabitedFrom<'_> { - fn visit_adt(&mut self, adt: AdtId, subst: &Substitution) -> ControlFlow { + fn visit_adt( + &mut self, + adt: AdtId, + subst: GenericArgs<'db>, + ) -> ControlFlow { // An ADT is uninhabited iff all its variants uninhabited. match adt { // rustc: For now, `union`s are never considered uninhabited. AdtId::UnionId(_) => CONTINUE_OPAQUELY_INHABITED, AdtId::StructId(s) => self.visit_variant(s.into(), subst), AdtId::EnumId(e) => { - let enum_data = e.enum_variants(self.db); + let enum_data = e.enum_variants(self.db()); for &(variant, _, _) in enum_data.variants.iter() { let variant_inhabitedness = self.visit_variant(variant.into(), subst); @@ -135,17 +147,17 @@ impl UninhabitedFrom<'_> { fn visit_variant( &mut self, variant: VariantId, - subst: &Substitution, + subst: GenericArgs<'db>, ) -> ControlFlow { - let variant_data = variant.fields(self.db); + let variant_data = variant.fields(self.db()); let fields = variant_data.fields(); if fields.is_empty() { return CONTINUE_OPAQUELY_INHABITED; } let is_enum = matches!(variant, VariantId::EnumVariantId(..)); - let field_tys = self.db.field_types(variant); - let field_vis = if is_enum { None } else { Some(self.db.field_visibilities(variant)) }; + let field_tys = self.db().field_types_ns(variant); + let field_vis = if is_enum { None } else { Some(self.db().field_visibilities(variant)) }; for (fid, _) in fields.iter() { self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid], subst)?; @@ -156,12 +168,12 @@ impl UninhabitedFrom<'_> { fn visit_field( &mut self, vis: Option, - ty: &Binders, - subst: &Substitution, + ty: &EarlyBinder<'db, Ty<'db>>, + subst: GenericArgs<'db>, ) -> ControlFlow { - if vis.is_none_or(|it| it.is_visible_from(self.db, self.target_mod)) { - let ty = ty.clone().substitute(Interner, subst); - ty.visit_with(self, DebruijnIndex::INNERMOST) + if vis.is_none_or(|it| it.is_visible_from(self.db(), self.target_mod)) { + let ty = ty.instantiate(self.interner(), subst); + ty.visit_with(self) } else { CONTINUE_OPAQUELY_INHABITED } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 92f9cd42615ec..1439c43e99e84 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -43,7 +43,6 @@ use crate::{ next_solver::{ Const, DbInterner, ParamConst, Region, TyKind, TypingMode, UnevaluatedConst, infer::{DbInternerInferExt, InferCtxt}, - mapping::NextSolverToChalk, }, traits::FnTrait, }; @@ -303,6 +302,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { let resolver = owner.resolver(db); let env = db.trait_environment_for_body(owner); let interner = DbInterner::new_with(db, Some(env.krate), env.block); + // FIXME(next-solver): Is `non_body_analysis()` correct here? Don't we want to reveal opaque types defined by this body? let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis()); MirLowerCtx { @@ -1766,8 +1766,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { fn is_uninhabited(&self, expr_id: ExprId) -> bool { is_ty_uninhabited_from( - self.db, - &self.infer[expr_id].to_chalk(self.interner()), + &self.infcx, + self.infer[expr_id], self.owner.module(self.db), self.env.clone(), ) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs index 789be3b731b13..8525d4bc96e6d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs @@ -1,8 +1,8 @@ //! Definition of `SolverDefId` use hir_def::{ - AdtId, CallableDefId, ConstId, EnumId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, - ImplId, StaticId, StructId, TraitId, TypeAliasId, UnionId, + AdtId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, + GeneralConstId, GenericDefId, ImplId, StaticId, StructId, TraitId, TypeAliasId, UnionId, }; use rustc_type_ir::inherent; use stdx::impl_from; @@ -29,6 +29,8 @@ pub enum SolverDefId { InternedClosureId(InternedClosureId), InternedCoroutineId(InternedCoroutineId), InternedOpaqueTyId(InternedOpaqueTyId), + EnumVariantId(EnumVariantId), + // FIXME(next-solver): Do we need the separation of `Ctor`? It duplicates some variants. Ctor(Ctor), } @@ -73,6 +75,16 @@ impl std::fmt::Debug for SolverDefId { SolverDefId::InternedOpaqueTyId(id) => { f.debug_tuple("InternedOpaqueTyId").field(&id).finish() } + SolverDefId::EnumVariantId(id) => { + let parent_enum = id.loc(db).parent; + f.debug_tuple("EnumVariantId") + .field(&format_args!( + "\"{}::{}\"", + db.enum_signature(parent_enum).name.as_str(), + parent_enum.enum_variants(db).variant_name_by_id(id).unwrap().as_str() + )) + .finish() + } SolverDefId::Ctor(Ctor::Struct(id)) => { f.debug_tuple("Ctor").field(&db.struct_signature(id).name.as_str()).finish() } @@ -101,6 +113,7 @@ impl_from!( InternedClosureId, InternedCoroutineId, InternedOpaqueTyId, + EnumVariantId, Ctor for SolverDefId ); @@ -129,6 +142,18 @@ impl From for SolverDefId { } } +impl From for SolverDefId { + #[inline] + fn from(value: DefWithBodyId) -> Self { + match value { + DefWithBodyId::FunctionId(id) => id.into(), + DefWithBodyId::StaticId(id) => id.into(), + DefWithBodyId::ConstId(id) => id.into(), + DefWithBodyId::VariantId(id) => id.into(), + } + } +} + impl TryFrom for GenericDefId { type Error = SolverDefId; @@ -141,10 +166,11 @@ impl TryFrom for GenericDefId { SolverDefId::StaticId(static_id) => GenericDefId::StaticId(static_id), SolverDefId::TraitId(trait_id) => GenericDefId::TraitId(trait_id), SolverDefId::TypeAliasId(type_alias_id) => GenericDefId::TypeAliasId(type_alias_id), - SolverDefId::InternedClosureId(_) => return Err(value), - SolverDefId::InternedCoroutineId(_) => return Err(value), - SolverDefId::InternedOpaqueTyId(_) => return Err(value), - SolverDefId::Ctor(_) => return Err(value), + SolverDefId::InternedClosureId(_) + | SolverDefId::InternedCoroutineId(_) + | SolverDefId::InternedOpaqueTyId(_) + | SolverDefId::EnumVariantId(_) + | SolverDefId::Ctor(_) => return Err(value), }) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index 3fd8e7b39dd7d..cfa8b5b8a7f7d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -1211,6 +1211,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { | SolverDefId::AdtId(_) | SolverDefId::TraitId(_) | SolverDefId::ImplId(_) + | SolverDefId::EnumVariantId(..) | SolverDefId::Ctor(..) | SolverDefId::InternedOpaqueTyId(..) => panic!(), }; @@ -1969,8 +1970,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { self, defining_anchor: Self::LocalDefId, ) -> Self::LocalDefIds { - // FIXME(next-solver) - unimplemented!() + Default::default() } type Probe = rustc_type_ir::solve::inspect::Probe>; From 5736f47b032a2d4021ee3b1084689d4e084d0a4e Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sun, 12 Oct 2025 12:25:14 +0300 Subject: [PATCH 10/76] Migrate hir-ty body validation diagnostics to the next solver --- .../crates/hir-ty/src/chalk_ext.rs | 59 +---- .../crates/hir-ty/src/diagnostics/expr.rs | 154 +++++++------ .../hir-ty/src/diagnostics/match_check.rs | 111 +++++---- .../diagnostics/match_check/pat_analysis.rs | 218 +++++++++--------- .../rust-analyzer/crates/hir-ty/src/lib.rs | 2 +- 5 files changed, 248 insertions(+), 296 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index e9960374c6f5f..ea3ed1589d756 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -1,32 +1,22 @@ //! Various extensions traits for Chalk types. -use chalk_ir::Mutability; use hir_def::{FunctionId, ItemContainerId, Lookup, TraitId}; use crate::{ - AdtId, Binders, CallableDefId, CallableSig, DynTy, Interner, Lifetime, ProjectionTy, - Substitution, ToChalk, TraitRef, Ty, TyKind, TypeFlags, WhereClause, db::HirDatabase, - from_assoc_type_id, from_chalk_trait_id, generics::generics, to_chalk_trait_id, - utils::ClosureSubst, + Binders, CallableDefId, CallableSig, DynTy, Interner, ProjectionTy, Substitution, ToChalk, + TraitRef, Ty, TyKind, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, + generics::generics, to_chalk_trait_id, utils::ClosureSubst, }; pub(crate) trait TyExt { fn is_unit(&self) -> bool; fn is_unknown(&self) -> bool; - fn contains_unknown(&self) -> bool; - fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>; fn as_tuple(&self) -> Option<&Substitution>; fn as_fn_def(&self, db: &dyn HirDatabase) -> Option; - fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>; fn callable_def(&self, db: &dyn HirDatabase) -> Option; fn callable_sig(&self, db: &dyn HirDatabase) -> Option; - - fn strip_references(&self) -> &Ty; - - /// If this is a `dyn Trait`, returns that trait. - fn dyn_trait(&self) -> Option; } impl TyExt for Ty { @@ -38,17 +28,6 @@ impl TyExt for Ty { matches!(self.kind(Interner), TyKind::Error) } - fn contains_unknown(&self) -> bool { - self.data(Interner).flags.contains(TypeFlags::HAS_ERROR) - } - - fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)> { - match self.kind(Interner) { - TyKind::Adt(AdtId(adt), parameters) => Some((*adt, parameters)), - _ => None, - } - } - fn as_tuple(&self) -> Option<&Substitution> { match self.kind(Interner) { TyKind::Tuple(_, substs) => Some(substs), @@ -63,13 +42,6 @@ impl TyExt for Ty { } } - fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)> { - match self.kind(Interner) { - TyKind::Ref(mutability, lifetime, ty) => Some((ty, lifetime.clone(), *mutability)), - _ => None, - } - } - fn callable_def(&self, db: &dyn HirDatabase) -> Option { match self.kind(Interner) { &TyKind::FnDef(def, ..) => Some(ToChalk::from_chalk(db, def)), @@ -85,31 +57,6 @@ impl TyExt for Ty { _ => None, } } - - fn dyn_trait(&self) -> Option { - let trait_ref = match self.kind(Interner) { - // The principal trait bound should be the first element of the bounds. This is an - // invariant ensured by `TyLoweringContext::lower_dyn_trait()`. - // FIXME: dyn types may not have principal trait and we don't want to return auto trait - // here. - TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().first().and_then(|b| { - match b.skip_binders() { - WhereClause::Implemented(trait_ref) => Some(trait_ref), - _ => None, - } - }), - _ => None, - }?; - Some(from_chalk_trait_id(trait_ref.trait_id)) - } - - fn strip_references(&self) -> &Ty { - let mut t: &Ty = self; - while let TyKind::Ref(_mutability, _lifetime, ty) = t.kind(Interner) { - t = ty; - } - t - } } pub trait ProjectionTyExt { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 7b6fb994ecaf5..0eca0c09d6907 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -15,6 +15,7 @@ use intern::sym; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::constructor::Constructor; +use rustc_type_ir::inherent::{AdtDef, IntoKind}; use syntax::{ AstNode, ast::{self, UnaryOp}, @@ -23,16 +24,18 @@ use tracing::debug; use triomphe::Arc; use typed_arena::Arena; -use crate::next_solver::DbInterner; -use crate::next_solver::mapping::NextSolverToChalk; use crate::{ - Adjust, InferenceResult, Interner, TraitEnvironment, Ty, TyExt, TyKind, + Adjust, InferenceResult, TraitEnvironment, db::HirDatabase, diagnostics::match_check::{ self, pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat}, }, display::{DisplayTarget, HirDisplay}, + next_solver::{ + DbInterner, Ty, TyKind, TypingMode, + infer::{DbInternerInferExt, InferCtxt}, + }, }; pub(crate) use hir_def::{ @@ -77,6 +80,8 @@ impl BodyValidationDiagnostic { let body = db.body(owner); let env = db.trait_environment_for_body(owner); let interner = DbInterner::new_with(db, Some(env.krate), env.block); + let infcx = + interner.infer_ctxt().build(TypingMode::typeck_for_body(interner, owner.into())); let mut validator = ExprValidator { owner, body, @@ -84,9 +89,9 @@ impl BodyValidationDiagnostic { diagnostics: Vec::new(), validate_lints, env, - interner, + infcx, }; - validator.validate_body(db); + validator.validate_body(); validator.diagnostics } } @@ -98,11 +103,17 @@ struct ExprValidator<'db> { env: Arc>, diagnostics: Vec, validate_lints: bool, - interner: DbInterner<'db>, + infcx: InferCtxt<'db>, } impl<'db> ExprValidator<'db> { - fn validate_body(&mut self, db: &'db dyn HirDatabase) { + #[inline] + fn db(&self) -> &'db dyn HirDatabase { + self.infcx.interner.db + } + + fn validate_body(&mut self) { + let db = self.db(); let mut filter_map_next_checker = None; // we'll pass &mut self while iterating over body.exprs, so they need to be disjoint let body = Arc::clone(&self.body); @@ -124,19 +135,19 @@ impl<'db> ExprValidator<'db> { match expr { Expr::Match { expr, arms } => { - self.validate_match(id, *expr, arms, db); + self.validate_match(id, *expr, arms); } Expr::Call { .. } | Expr::MethodCall { .. } => { - self.validate_call(db, id, expr, &mut filter_map_next_checker); + self.validate_call(id, expr, &mut filter_map_next_checker); } Expr::Closure { body: body_expr, .. } => { self.check_for_trailing_return(*body_expr, &body); } Expr::If { .. } => { - self.check_for_unnecessary_else(id, expr, db); + self.check_for_unnecessary_else(id, expr); } Expr::Block { .. } | Expr::Async { .. } | Expr::Unsafe { .. } => { - self.validate_block(db, expr); + self.validate_block(expr); } _ => {} } @@ -157,10 +168,9 @@ impl<'db> ExprValidator<'db> { fn validate_call( &mut self, - db: &dyn HirDatabase, call_id: ExprId, expr: &Expr, - filter_map_next_checker: &mut Option, + filter_map_next_checker: &mut Option>, ) { if !self.validate_lints { return; @@ -176,8 +186,9 @@ impl<'db> ExprValidator<'db> { None => return, }; - let checker = filter_map_next_checker - .get_or_insert_with(|| FilterMapNextChecker::new(&self.owner.resolver(db), db)); + let checker = filter_map_next_checker.get_or_insert_with(|| { + FilterMapNextChecker::new(&self.owner.resolver(self.db()), self.db()) + }); if checker.check(call_id, receiver, &callee).is_some() { self.diagnostics.push(BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { @@ -186,27 +197,20 @@ impl<'db> ExprValidator<'db> { } if let Some(receiver_ty) = self.infer.type_of_expr_with_adjust(*receiver) { - checker.prev_receiver_ty = Some(receiver_ty.to_chalk(self.interner)); + checker.prev_receiver_ty = Some(receiver_ty); } } } - fn validate_match( - &mut self, - match_expr: ExprId, - scrutinee_expr: ExprId, - arms: &[MatchArm], - db: &dyn HirDatabase, - ) { + fn validate_match(&mut self, match_expr: ExprId, scrutinee_expr: ExprId, arms: &[MatchArm]) { let Some(scrut_ty) = self.infer.type_of_expr_with_adjust(scrutinee_expr) else { return; }; - let scrut_ty = scrut_ty.to_chalk(self.interner); - if scrut_ty.contains_unknown() { + if scrut_ty.references_non_lt_error() { return; } - let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone()); + let cx = MatchCheckCtx::new(self.owner.module(self.db()), &self.infcx, self.env.clone()); let pattern_arena = Arena::new(); let mut m_arms = Vec::with_capacity(arms.len()); @@ -217,8 +221,7 @@ impl<'db> ExprValidator<'db> { let Some(pat_ty) = self.infer.type_of_pat_with_adjust(arm.pat) else { return; }; - let pat_ty = pat_ty.to_chalk(self.interner); - if pat_ty.contains_unknown() { + if pat_ty.references_non_lt_error() { return; } @@ -235,14 +238,14 @@ impl<'db> ExprValidator<'db> { if (pat_ty == scrut_ty || scrut_ty .as_reference() - .map(|(match_expr_ty, ..)| *match_expr_ty == pat_ty) + .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty) .unwrap_or(false)) && types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer) { // If we had a NotUsefulMatchArm diagnostic, we could // check the usefulness of each pattern as we added it // to the matrix here. - let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors); + let pat = self.lower_pattern(&cx, arm.pat, &mut has_lowering_errors); let m_arm = pat_analysis::MatchArm { pat: pattern_arena.alloc(pat), has_guard: arm.guard.is_some(), @@ -258,15 +261,12 @@ impl<'db> ExprValidator<'db> { return; } - let known_valid_scrutinee = Some(self.is_known_valid_scrutinee(scrutinee_expr, db)); - let report = match cx.compute_match_usefulness( - m_arms.as_slice(), - scrut_ty.clone(), - known_valid_scrutinee, - ) { - Ok(report) => report, - Err(()) => return, - }; + let known_valid_scrutinee = Some(self.is_known_valid_scrutinee(scrutinee_expr)); + let report = + match cx.compute_match_usefulness(m_arms.as_slice(), scrut_ty, known_valid_scrutinee) { + Ok(report) => report, + Err(()) => return, + }; // FIXME Report unreachable arms // https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200 @@ -277,10 +277,10 @@ impl<'db> ExprValidator<'db> { match_expr, uncovered_patterns: missing_match_arms( &cx, - &scrut_ty, + scrut_ty, witnesses, m_arms.is_empty(), - self.owner.krate(db), + self.owner.krate(self.db()), ), }); } @@ -291,7 +291,9 @@ impl<'db> ExprValidator<'db> { // While the above function in rustc uses thir exprs, r-a doesn't have them. // So, the logic here is getting same result as "hir lowering + match with lowered thir" // with "hir only" - fn is_known_valid_scrutinee(&self, scrutinee_expr: ExprId, db: &dyn HirDatabase) -> bool { + fn is_known_valid_scrutinee(&self, scrutinee_expr: ExprId) -> bool { + let db = self.db(); + if self .infer .expr_adjustments @@ -311,20 +313,18 @@ impl<'db> ExprValidator<'db> { ); value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_))) } - Expr::Field { expr, .. } => { - match self.infer.type_of_expr[*expr].to_chalk(self.interner).kind(Interner) { - TyKind::Adt(adt, ..) if matches!(adt.0, AdtId::UnionId(_)) => false, - _ => self.is_known_valid_scrutinee(*expr, db), - } - } - Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base, db), - Expr::Cast { expr, .. } => self.is_known_valid_scrutinee(*expr, db), + Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind() { + TyKind::Adt(adt, ..) if matches!(adt.def_id().0, AdtId::UnionId(_)) => false, + _ => self.is_known_valid_scrutinee(*expr), + }, + Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base), + Expr::Cast { expr, .. } => self.is_known_valid_scrutinee(*expr), Expr::Missing => false, _ => true, } } - fn validate_block(&mut self, db: &dyn HirDatabase, expr: &Expr) { + fn validate_block(&mut self, expr: &Expr) { let (Expr::Block { statements, .. } | Expr::Async { statements, .. } | Expr::Unsafe { statements, .. }) = expr @@ -332,7 +332,7 @@ impl<'db> ExprValidator<'db> { return; }; let pattern_arena = Arena::new(); - let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone()); + let cx = MatchCheckCtx::new(self.owner.module(self.db()), &self.infcx, self.env.clone()); for stmt in &**statements { let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else { continue; @@ -342,13 +342,12 @@ impl<'db> ExprValidator<'db> { } let Some(initializer) = initializer else { continue }; let Some(ty) = self.infer.type_of_expr_with_adjust(initializer) else { continue }; - let ty = ty.to_chalk(self.interner); - if ty.contains_unknown() { + if ty.references_non_lt_error() { continue; } let mut have_errors = false; - let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors); + let deconstructed_pat = self.lower_pattern(&cx, pat, &mut have_errors); // optimization, wildcard trivially hold if have_errors || matches!(deconstructed_pat.ctor(), Constructor::Wildcard) { @@ -360,7 +359,7 @@ impl<'db> ExprValidator<'db> { has_guard: false, arm_data: (), }; - let report = match cx.compute_match_usefulness(&[match_arm], ty.clone(), None) { + let report = match cx.compute_match_usefulness(&[match_arm], ty, None) { Ok(v) => v, Err(e) => { debug!(?e, "match usefulness error"); @@ -373,24 +372,23 @@ impl<'db> ExprValidator<'db> { pat, uncovered_patterns: missing_match_arms( &cx, - &ty, + ty, witnesses, false, - self.owner.krate(db), + self.owner.krate(self.db()), ), }); } } } - fn lower_pattern<'p>( + fn lower_pattern<'a>( &self, - cx: &MatchCheckCtx<'p>, + cx: &MatchCheckCtx<'a, 'db>, pat: PatId, - db: &dyn HirDatabase, have_errors: &mut bool, - ) -> DeconstructedPat<'p> { - let mut patcx = match_check::PatCtxt::new(db, &self.infer, &self.body); + ) -> DeconstructedPat<'a, 'db> { + let mut patcx = match_check::PatCtxt::new(self.db(), &self.infer, &self.body); let pattern = patcx.lower_pattern(pat); let pattern = cx.lower_pat(&pattern); if !patcx.errors.is_empty() { @@ -434,7 +432,7 @@ impl<'db> ExprValidator<'db> { } } - fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) { + fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr) { if !self.validate_lints { return; } @@ -453,11 +451,11 @@ impl<'db> ExprValidator<'db> { && last_then_expr_ty.is_never() { // Only look at sources if the then branch diverges and we have an else branch. - let source_map = db.body_with_source_map(self.owner).1; + let source_map = self.db().body_with_source_map(self.owner).1; let Ok(source_ptr) = source_map.expr_syntax(id) else { return; }; - let root = source_ptr.file_syntax(db); + let root = source_ptr.file_syntax(self.db()); let either::Left(ast::Expr::IfExpr(if_expr)) = source_ptr.value.to_node(&root) else { return; @@ -491,15 +489,15 @@ impl<'db> ExprValidator<'db> { } } -struct FilterMapNextChecker { +struct FilterMapNextChecker<'db> { filter_map_function_id: Option, next_function_id: Option, prev_filter_map_expr_id: Option, - prev_receiver_ty: Option>, + prev_receiver_ty: Option>, } -impl FilterMapNextChecker { - fn new(resolver: &hir_def::resolver::Resolver<'_>, db: &dyn HirDatabase) -> Self { +impl<'db> FilterMapNextChecker<'db> { + fn new(resolver: &hir_def::resolver::Resolver<'db>, db: &'db dyn HirDatabase) -> Self { // Find and store the FunctionIds for Iterator::filter_map and Iterator::next let (next_function_id, filter_map_function_id) = match LangItem::IteratorNext .resolve_function(db, resolver.krate()) @@ -639,15 +637,19 @@ fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResul !has_type_mismatches } -fn missing_match_arms<'p>( - cx: &MatchCheckCtx<'p>, - scrut_ty: &Ty, - witnesses: Vec>, +fn missing_match_arms<'a, 'db>( + cx: &MatchCheckCtx<'a, 'db>, + scrut_ty: Ty<'a>, + witnesses: Vec>, arms_is_empty: bool, krate: Crate, ) -> String { - struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>, DisplayTarget); - impl fmt::Display for DisplayWitness<'_, '_> { + struct DisplayWitness<'a, 'b, 'db>( + &'a WitnessPat<'b, 'db>, + &'a MatchCheckCtx<'b, 'db>, + DisplayTarget, + ); + impl fmt::Display for DisplayWitness<'_, '_, '_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let DisplayWitness(witness, cx, display_target) = *self; let pat = cx.hoist_witness_pat(witness); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index af541ffa342ef..af6795e6018a5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -9,7 +9,6 @@ mod pat_util; pub(crate) mod pat_analysis; -use chalk_ir::Mutability; use hir_def::{ AdtId, EnumVariantId, LocalFieldId, Lookup, VariantId, expr_store::{Body, path::Path}, @@ -17,16 +16,16 @@ use hir_def::{ item_tree::FieldsShape, }; use hir_expand::name::Name; +use rustc_type_ir::inherent::{IntoKind, SliceLike}; use span::Edition; use stdx::{always, never}; -use crate::next_solver::DbInterner; -use crate::next_solver::mapping::NextSolverToChalk; use crate::{ - InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, + InferenceResult, db::HirDatabase, display::{HirDisplay, HirDisplayError, HirFormatter}, infer::BindingMode, + next_solver::{GenericArgs, Mutability, Ty, TyKind}, }; use self::pat_util::EnumerateAndAdjustIterator; @@ -41,46 +40,46 @@ pub(crate) enum PatternError { } #[derive(Clone, Debug, PartialEq)] -pub(crate) struct FieldPat { +pub(crate) struct FieldPat<'db> { pub(crate) field: LocalFieldId, - pub(crate) pattern: Pat, + pub(crate) pattern: Pat<'db>, } #[derive(Clone, Debug, PartialEq)] -pub(crate) struct Pat { - pub(crate) ty: Ty, - pub(crate) kind: Box, +pub(crate) struct Pat<'db> { + pub(crate) ty: Ty<'db>, + pub(crate) kind: Box>, } /// Close relative to `rustc_mir_build::thir::pattern::PatKind` #[derive(Clone, Debug, PartialEq)] -pub(crate) enum PatKind { +pub(crate) enum PatKind<'db> { Wild, Never, /// `x`, `ref x`, `x @ P`, etc. Binding { name: Name, - subpattern: Option, + subpattern: Option>, }, /// `Foo(...)` or `Foo{...}` or `Foo`, where `Foo` is a variant name from an ADT with /// multiple variants. Variant { - substs: Substitution, + substs: GenericArgs<'db>, enum_variant: EnumVariantId, - subpatterns: Vec, + subpatterns: Vec>, }, /// `(...)`, `Foo(...)`, `Foo{...}`, or `Foo`, where `Foo` is a variant name from an ADT with /// a single variant. Leaf { - subpatterns: Vec, + subpatterns: Vec>, }, /// `&P`, `&mut P`, etc. Deref { - subpattern: Pat, + subpattern: Pat<'db>, }, // FIXME: for now, only bool literals are implemented @@ -91,28 +90,27 @@ pub(crate) enum PatKind { /// An or-pattern, e.g. `p | q`. /// Invariant: `pats.len() >= 2`. Or { - pats: Vec, + pats: Vec>, }, } -pub(crate) struct PatCtxt<'db> { +pub(crate) struct PatCtxt<'a, 'db> { db: &'db dyn HirDatabase, - infer: &'db InferenceResult<'db>, - body: &'db Body, + infer: &'a InferenceResult<'db>, + body: &'a Body, pub(crate) errors: Vec, - interner: DbInterner<'db>, } -impl<'a> PatCtxt<'a> { +impl<'a, 'db> PatCtxt<'a, 'db> { pub(crate) fn new( - db: &'a dyn HirDatabase, - infer: &'a InferenceResult<'a>, + db: &'db dyn HirDatabase, + infer: &'a InferenceResult<'db>, body: &'a Body, ) -> Self { - Self { db, infer, body, errors: Vec::new(), interner: DbInterner::new_with(db, None, None) } + Self { db, infer, body, errors: Vec::new() } } - pub(crate) fn lower_pattern(&mut self, pat: PatId) -> Pat { + pub(crate) fn lower_pattern(&mut self, pat: PatId) -> Pat<'db> { // XXX(iDawer): Collecting pattern adjustments feels imprecise to me. // When lowering of & and box patterns are implemented this should be tested // in a manner of `match_ergonomics_issue_9095` test. @@ -121,15 +119,12 @@ impl<'a> PatCtxt<'a> { let unadjusted_pat = self.lower_pattern_unadjusted(pat); self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold( unadjusted_pat, - |subpattern, ref_ty| Pat { - ty: ref_ty.to_chalk(self.interner).clone(), - kind: Box::new(PatKind::Deref { subpattern }), - }, + |subpattern, ref_ty| Pat { ty: *ref_ty, kind: Box::new(PatKind::Deref { subpattern }) }, ) } - fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat { - let mut ty = self.infer[pat].to_chalk(self.interner); + fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat<'db> { + let mut ty = self.infer[pat]; let variant = self.infer.variant_resolution_for_pat(pat); let kind = match self.body[pat] { @@ -142,8 +137,8 @@ impl<'a> PatCtxt<'a> { } hir_def::hir::Pat::Tuple { ref args, ellipsis } => { - let arity = match *ty.kind(Interner) { - TyKind::Tuple(arity, _) => arity, + let arity = match ty.kind() { + TyKind::Tuple(tys) => tys.len(), _ => { never!("unexpected type for tuple pattern: {:?}", ty); self.errors.push(PatternError::UnexpectedType); @@ -156,10 +151,10 @@ impl<'a> PatCtxt<'a> { hir_def::hir::Pat::Bind { id, subpat, .. } => { let bm = self.infer.binding_modes[pat]; - ty = self.infer[id].to_chalk(self.interner); + ty = self.infer[id]; let name = &self.body[id].name; - match (bm, ty.kind(Interner)) { - (BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty.clone(), + match (bm, ty.kind()) { + (BindingMode::Ref(_), TyKind::Ref(_, rty, _)) => ty = rty, (BindingMode::Ref(_), _) => { never!( "`ref {}` has wrong type {:?}", @@ -167,7 +162,7 @@ impl<'a> PatCtxt<'a> { ty ); self.errors.push(PatternError::UnexpectedType); - return Pat { ty: ty.clone(), kind: PatKind::Wild.into() }; + return Pat { ty, kind: PatKind::Wild.into() }; } _ => (), } @@ -177,7 +172,7 @@ impl<'a> PatCtxt<'a> { hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => { let expected_len = variant.unwrap().fields(self.db).fields().len(); let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis); - self.lower_variant_or_leaf(pat, &ty, subpatterns) + self.lower_variant_or_leaf(pat, ty, subpatterns) } hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => { @@ -193,7 +188,7 @@ impl<'a> PatCtxt<'a> { }) .collect(); match subpatterns { - Some(subpatterns) => self.lower_variant_or_leaf(pat, &ty, subpatterns), + Some(subpatterns) => self.lower_variant_or_leaf(pat, ty, subpatterns), None => { self.errors.push(PatternError::MissingField); PatKind::Wild @@ -213,7 +208,7 @@ impl<'a> PatCtxt<'a> { } }; - Pat { ty: ty.clone(), kind: Box::new(kind) } + Pat { ty, kind: Box::new(kind) } } fn lower_tuple_subpats( @@ -221,7 +216,7 @@ impl<'a> PatCtxt<'a> { pats: &[PatId], expected_len: usize, ellipsis: Option, - ) -> Vec { + ) -> Vec> { if pats.len() > expected_len { self.errors.push(PatternError::ExtraFields); return Vec::new(); @@ -236,28 +231,28 @@ impl<'a> PatCtxt<'a> { .collect() } - fn lower_patterns(&mut self, pats: &[PatId]) -> Vec { + fn lower_patterns(&mut self, pats: &[PatId]) -> Vec> { pats.iter().map(|&p| self.lower_pattern(p)).collect() } - fn lower_opt_pattern(&mut self, pat: Option) -> Option { + fn lower_opt_pattern(&mut self, pat: Option) -> Option> { pat.map(|p| self.lower_pattern(p)) } fn lower_variant_or_leaf( &mut self, pat: PatId, - ty: &Ty, - subpatterns: Vec, - ) -> PatKind { + ty: Ty<'db>, + subpatterns: Vec>, + ) -> PatKind<'db> { match self.infer.variant_resolution_for_pat(pat) { Some(variant_id) => { if let VariantId::EnumVariantId(enum_variant) = variant_id { - let substs = match ty.kind(Interner) { - TyKind::Adt(_, substs) => substs.clone(), + let substs = match ty.kind() { + TyKind::Adt(_, substs) => substs, kind => { always!( - matches!(kind, TyKind::FnDef(..) | TyKind::Error), + matches!(kind, TyKind::FnDef(..) | TyKind::Error(_)), "inappropriate type for def: {:?}", ty ); @@ -277,13 +272,13 @@ impl<'a> PatCtxt<'a> { } } - fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat { - let ty = self.infer[pat].to_chalk(self.interner); + fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat<'db> { + let ty = self.infer[pat]; - let pat_from_kind = |kind| Pat { ty: ty.clone(), kind: Box::new(kind) }; + let pat_from_kind = |kind| Pat { ty, kind: Box::new(kind) }; match self.infer.variant_resolution_for_pat(pat) { - Some(_) => pat_from_kind(self.lower_variant_or_leaf(pat, &ty, Vec::new())), + Some(_) => pat_from_kind(self.lower_variant_or_leaf(pat, ty, Vec::new())), None => { self.errors.push(PatternError::UnresolvedVariant); pat_from_kind(PatKind::Wild) @@ -291,7 +286,7 @@ impl<'a> PatCtxt<'a> { } } - fn lower_lit(&mut self, expr: hir_def::hir::ExprId) -> PatKind { + fn lower_lit(&mut self, expr: hir_def::hir::ExprId) -> PatKind<'db> { use hir_def::hir::{Expr, Literal::Bool}; match self.body[expr] { @@ -304,7 +299,7 @@ impl<'a> PatCtxt<'a> { } } -impl HirDisplay for Pat { +impl HirDisplay for Pat<'_> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match &*self.kind { PatKind::Wild => write!(f, "_"), @@ -402,7 +397,7 @@ impl HirDisplay for Pat { }) }); f.write_joined(subpats, ", ")?; - if let (TyKind::Tuple(..), 1) = (self.ty.kind(Interner), num_fields) { + if let (TyKind::Tuple(..), 1) = (self.ty.kind(), num_fields) { write!(f, ",")?; } write!(f, ")")?; @@ -411,8 +406,8 @@ impl HirDisplay for Pat { Ok(()) } PatKind::Deref { subpattern } => { - match self.ty.kind(Interner) { - &TyKind::Ref(mutbl, ..) => { + match self.ty.kind() { + TyKind::Ref(.., mutbl) => { write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })? } _ => never!("{:?} is a bad Deref pattern type", self.ty), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 76f50c1948354..f0efadeafcea7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -1,28 +1,26 @@ //! Interface with `rustc_pattern_analysis`. -use std::cell::LazyCell; -use std::fmt; +use std::{cell::LazyCell, fmt}; -use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; +use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use intern::sym; use rustc_pattern_analysis::{ IndexVec, PatCx, PrivateUninhabitedField, constructor::{Constructor, ConstructorSet, VariantVisibility}, usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness}, }; +use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike}; use smallvec::{SmallVec, smallvec}; use stdx::never; use triomphe::Arc; use crate::{ - AdtId, Interner, Scalar, TraitEnvironment, Ty, TyExt, TyKind, + TraitEnvironment, db::HirDatabase, - infer::normalize, inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from}, next_solver::{ - DbInterner, TypingMode, - infer::{DbInternerInferExt, InferCtxt}, - mapping::ChalkToNextSolver, + Ty, TyKind, + infer::{InferCtxt, traits::ObligationCause}, }, }; @@ -31,10 +29,12 @@ use super::{FieldPat, Pat, PatKind}; use Constructor::*; // Re-export r-a-specific versions of all these types. -pub(crate) type DeconstructedPat<'db> = - rustc_pattern_analysis::pat::DeconstructedPat>; -pub(crate) type MatchArm<'a, 'db> = rustc_pattern_analysis::MatchArm<'a, MatchCheckCtx<'db>>; -pub(crate) type WitnessPat<'db> = rustc_pattern_analysis::pat::WitnessPat>; +pub(crate) type DeconstructedPat<'a, 'db> = + rustc_pattern_analysis::pat::DeconstructedPat>; +pub(crate) type MatchArm<'a, 'b, 'db> = + rustc_pattern_analysis::MatchArm<'b, MatchCheckCtx<'a, 'db>>; +pub(crate) type WitnessPat<'a, 'db> = + rustc_pattern_analysis::pat::WitnessPat>; /// [Constructor] uses this in unimplemented variants. /// It allows porting match expressions from upstream algorithm without losing semantics. @@ -70,40 +70,37 @@ impl rustc_pattern_analysis::Idx for EnumVariantContiguousIndex { } #[derive(Clone)] -pub(crate) struct MatchCheckCtx<'db> { +pub(crate) struct MatchCheckCtx<'a, 'db> { module: ModuleId, - body: DefWithBodyId, pub(crate) db: &'db dyn HirDatabase, exhaustive_patterns: bool, env: Arc>, - infcx: InferCtxt<'db>, + infcx: &'a InferCtxt<'db>, } -impl<'db> MatchCheckCtx<'db> { +impl<'a, 'db> MatchCheckCtx<'a, 'db> { pub(crate) fn new( module: ModuleId, - body: DefWithBodyId, - db: &'db dyn HirDatabase, + infcx: &'a InferCtxt<'db>, env: Arc>, ) -> Self { + let db = infcx.interner.db; let def_map = module.crate_def_map(db); let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns); - let interner = DbInterner::new_with(db, Some(env.krate), env.block); - let infcx = interner.infer_ctxt().build(TypingMode::typeck_for_body(interner, body.into())); - Self { module, body, db, exhaustive_patterns, env, infcx } + Self { module, db, exhaustive_patterns, env, infcx } } - pub(crate) fn compute_match_usefulness<'a>( + pub(crate) fn compute_match_usefulness<'b>( &self, - arms: &[MatchArm<'a, 'db>], - scrut_ty: Ty, + arms: &[MatchArm<'a, 'b, 'db>], + scrut_ty: Ty<'db>, known_valid_scrutinee: Option, - ) -> Result, ()> { - if scrut_ty.contains_unknown() { + ) -> Result, ()> { + if scrut_ty.references_non_lt_error() { return Err(()); } for arm in arms { - if arm.pat.ty().contains_unknown() { + if arm.pat.ty().references_non_lt_error() { return Err(()); } } @@ -114,13 +111,8 @@ impl<'db> MatchCheckCtx<'db> { compute_match_usefulness(self, arms, scrut_ty, place_validity, complexity_limit) } - fn is_uninhabited(&self, ty: &Ty) -> bool { - is_ty_uninhabited_from( - &self.infcx, - ty.to_nextsolver(self.infcx.interner), - self.module, - self.env.clone(), - ) + fn is_uninhabited(&self, ty: Ty<'db>) -> bool { + is_ty_uninhabited_from(self.infcx, ty, self.module, self.env.clone()) } /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`. @@ -153,23 +145,27 @@ impl<'db> MatchCheckCtx<'db> { // This lists the fields of a variant along with their types. fn list_variant_fields( &self, - ty: &Ty, + ty: Ty<'db>, variant: VariantId, - ) -> impl Iterator { + ) -> impl Iterator)> { let (_, substs) = ty.as_adt().unwrap(); - let field_tys = self.db.field_types(variant); + let field_tys = self.db.field_types_ns(variant); let fields_len = variant.fields(self.db).fields().len() as u32; (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| { - let ty = field_tys[fid].clone().substitute(Interner, substs); - let ty = normalize(self.db, self.db.trait_environment_for_body(self.body), ty); + let ty = field_tys[fid].instantiate(self.infcx.interner, substs); + let ty = self + .infcx + .at(&ObligationCause::dummy(), self.env.env) + .deeply_normalize(ty) + .unwrap_or(ty); (fid, ty) }) } - pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'db> { - let singleton = |pat: DeconstructedPat<'db>| vec![pat.at_index(0)]; + pub(crate) fn lower_pat(&self, pat: &Pat<'db>) -> DeconstructedPat<'a, 'db> { + let singleton = |pat: DeconstructedPat<'a, 'db>| vec![pat.at_index(0)]; let ctor; let mut fields: Vec<_>; let arity; @@ -182,7 +178,7 @@ impl<'db> MatchCheckCtx<'db> { arity = 0; } PatKind::Deref { subpattern } => { - ctor = match pat.ty.kind(Interner) { + ctor = match pat.ty.kind() { TyKind::Ref(..) => Ref, _ => { never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty); @@ -200,12 +196,13 @@ impl<'db> MatchCheckCtx<'db> { self.lower_pat(&pat.pattern).at_index(idx as usize) }) .collect(); - match pat.ty.kind(Interner) { - TyKind::Tuple(_, substs) => { + match pat.ty.kind() { + TyKind::Tuple(substs) => { ctor = Struct; - arity = substs.len(Interner); + arity = substs.len(); } - &TyKind::Adt(AdtId(adt), _) => { + TyKind::Adt(adt_def, _) => { + let adt = adt_def.def_id().0; ctor = match pat.kind.as_ref() { PatKind::Leaf { .. } if matches!(adt, hir_def::AdtId::UnionId(_)) => { UnionField @@ -253,15 +250,15 @@ impl<'db> MatchCheckCtx<'db> { arity = pats.len(); } } - DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), ()) + DeconstructedPat::new(ctor, fields, arity, pat.ty, ()) } - pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'db>) -> Pat { + pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'a, 'db>) -> Pat<'db> { let mut subpatterns = pat.iter_fields().map(|p| self.hoist_witness_pat(p)); let kind = match pat.ctor() { &Bool(value) => PatKind::LiteralBool { value }, IntRange(_) => unimplemented!(), - Struct | Variant(_) | UnionField => match pat.ty().kind(Interner) { + Struct | Variant(_) | UnionField => match pat.ty().kind() { TyKind::Tuple(..) => PatKind::Leaf { subpatterns: subpatterns .zip(0u32..) @@ -272,15 +269,16 @@ impl<'db> MatchCheckCtx<'db> { .collect(), }, TyKind::Adt(adt, substs) => { - let variant = Self::variant_id_for_adt(self.db, pat.ctor(), adt.0).unwrap(); + let variant = + Self::variant_id_for_adt(self.db, pat.ctor(), adt.def_id().0).unwrap(); let subpatterns = self - .list_variant_fields(pat.ty(), variant) + .list_variant_fields(*pat.ty(), variant) .zip(subpatterns) .map(|((field, _ty), pattern)| FieldPat { field, pattern }) .collect(); if let VariantId::EnumVariantId(enum_variant) = variant { - PatKind::Variant { substs: substs.clone(), enum_variant, subpatterns } + PatKind::Variant { substs, enum_variant, subpatterns } } else { PatKind::Leaf { subpatterns } } @@ -306,13 +304,13 @@ impl<'db> MatchCheckCtx<'db> { PatKind::Wild } }; - Pat { ty: pat.ty().clone(), kind: Box::new(kind) } + Pat { ty: *pat.ty(), kind: Box::new(kind) } } } -impl PatCx for MatchCheckCtx<'_> { +impl<'a, 'db> PatCx for MatchCheckCtx<'a, 'db> { type Error = (); - type Ty = Ty; + type Ty = Ty<'db>; type VariantIdx = EnumVariantContiguousIndex; type StrLit = Void; type ArmData = (); @@ -328,10 +326,11 @@ impl PatCx for MatchCheckCtx<'_> { ty: &Self::Ty, ) -> usize { match ctor { - Struct | Variant(_) | UnionField => match *ty.kind(Interner) { - TyKind::Tuple(arity, ..) => arity, - TyKind::Adt(AdtId(adt), ..) => { - let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap(); + Struct | Variant(_) | UnionField => match ty.kind() { + TyKind::Tuple(tys) => tys.len(), + TyKind::Adt(adt_def, ..) => { + let variant = + Self::variant_id_for_adt(self.db, ctor, adt_def.def_id().0).unwrap(); variant.fields(self.db).fields().len() } _ => { @@ -359,24 +358,24 @@ impl PatCx for MatchCheckCtx<'_> { ) -> impl ExactSizeIterator { let single = |ty| smallvec![(ty, PrivateUninhabitedField(false))]; let tys: SmallVec<[_; 2]> = match ctor { - Struct | Variant(_) | UnionField => match ty.kind(Interner) { - TyKind::Tuple(_, substs) => { - let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner)); - tys.cloned().map(|ty| (ty, PrivateUninhabitedField(false))).collect() + Struct | Variant(_) | UnionField => match ty.kind() { + TyKind::Tuple(substs) => { + substs.iter().map(|ty| (ty, PrivateUninhabitedField(false))).collect() } - TyKind::Ref(.., rty) => single(rty.clone()), - &TyKind::Adt(AdtId(adt), ..) => { + TyKind::Ref(_, rty, _) => single(rty), + TyKind::Adt(adt_def, ..) => { + let adt = adt_def.def_id().0; let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap(); let visibilities = LazyCell::new(|| self.db.field_visibilities(variant)); - self.list_variant_fields(ty, variant) + self.list_variant_fields(*ty, variant) .map(move |(fid, ty)| { let is_visible = || { matches!(adt, hir_def::AdtId::EnumId(..)) || visibilities[fid].is_visible_from(self.db, self.module) }; - let is_uninhabited = self.is_uninhabited(&ty); + let is_uninhabited = self.is_uninhabited(ty); let private_uninhabited = is_uninhabited && !is_visible(); (ty, PrivateUninhabitedField(private_uninhabited)) }) @@ -384,14 +383,14 @@ impl PatCx for MatchCheckCtx<'_> { } ty_kind => { never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind); - single(ty.clone()) + single(*ty) } }, - Ref => match ty.kind(Interner) { - TyKind::Ref(.., rty) => single(rty.clone()), + Ref => match ty.kind() { + TyKind::Ref(_, rty, _) => single(rty), ty_kind => { never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind); - single(ty.clone()) + single(*ty) } }, Slice(_) => unreachable!("Found a `Slice` constructor in match checking"), @@ -427,42 +426,51 @@ impl PatCx for MatchCheckCtx<'_> { // returned list of constructors. // Invariant: this is empty if and only if the type is uninhabited (as determined by // `cx.is_uninhabited()`). - Ok(match ty.kind(Interner) { - TyKind::Scalar(Scalar::Bool) => ConstructorSet::Bool, - TyKind::Scalar(Scalar::Char) => unhandled(), - TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(), + Ok(match ty.kind() { + TyKind::Bool => ConstructorSet::Bool, + TyKind::Char => unhandled(), + TyKind::Int(..) | TyKind::Uint(..) => unhandled(), TyKind::Array(..) | TyKind::Slice(..) => unhandled(), - &TyKind::Adt(AdtId(adt @ hir_def::AdtId::EnumId(enum_id)), ref subst) => { - let enum_data = enum_id.enum_variants(cx.db); - let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt); - - if enum_data.variants.is_empty() && !is_declared_nonexhaustive { - ConstructorSet::NoConstructors - } else { - let mut variants = IndexVec::with_capacity(enum_data.variants.len()); - for &(variant, _, _) in enum_data.variants.iter() { - let is_uninhabited = is_enum_variant_uninhabited_from( - &cx.infcx, - variant, - subst.to_nextsolver(cx.infcx.interner), - cx.module, - self.env.clone(), - ); - let visibility = if is_uninhabited { - VariantVisibility::Empty + TyKind::Adt(adt_def, subst) => { + let adt = adt_def.def_id().0; + match adt { + hir_def::AdtId::EnumId(enum_id) => { + let enum_data = enum_id.enum_variants(cx.db); + let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive(adt); + + if enum_data.variants.is_empty() && !is_declared_nonexhaustive { + ConstructorSet::NoConstructors } else { - VariantVisibility::Visible - }; - variants.push(visibility); - } + let mut variants = IndexVec::with_capacity(enum_data.variants.len()); + for &(variant, _, _) in enum_data.variants.iter() { + let is_uninhabited = is_enum_variant_uninhabited_from( + cx.infcx, + variant, + subst, + cx.module, + self.env.clone(), + ); + let visibility = if is_uninhabited { + VariantVisibility::Empty + } else { + VariantVisibility::Visible + }; + variants.push(visibility); + } - ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive } + ConstructorSet::Variants { + variants, + non_exhaustive: is_declared_nonexhaustive, + } + } + } + hir_def::AdtId::UnionId(_) => ConstructorSet::Union, + hir_def::AdtId::StructId(_) => { + ConstructorSet::Struct { empty: cx.is_uninhabited(*ty) } + } } } - TyKind::Adt(AdtId(hir_def::AdtId::UnionId(_)), _) => ConstructorSet::Union, - TyKind::Adt(..) | TyKind::Tuple(..) => { - ConstructorSet::Struct { empty: cx.is_uninhabited(ty) } - } + TyKind::Tuple(..) => ConstructorSet::Struct { empty: cx.is_uninhabited(*ty) }, TyKind::Ref(..) => ConstructorSet::Ref, TyKind::Never => ConstructorSet::NoConstructors, // This type is one for which we cannot list constructors, like `str` or `f64`. @@ -505,14 +513,14 @@ impl PatCx for MatchCheckCtx<'_> { fn report_mixed_deref_pat_ctors( &self, - _deref_pat: &DeconstructedPat<'_>, - _normal_pat: &DeconstructedPat<'_>, + _deref_pat: &DeconstructedPat<'a, 'db>, + _normal_pat: &DeconstructedPat<'a, 'db>, ) { // FIXME(deref_patterns): This could report an error comparable to the one in rustc. } } -impl fmt::Debug for MatchCheckCtx<'_> { +impl fmt::Debug for MatchCheckCtx<'_, '_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("MatchCheckCtx").finish() } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 734483a823e45..eb01ef104b61a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -124,7 +124,7 @@ pub use utils::{ }; pub use variance::Variance; -use chalk_ir::{AdtId, BoundVar, DebruijnIndex, Safety, Scalar}; +use chalk_ir::{BoundVar, DebruijnIndex, Safety, Scalar}; pub(crate) type ForeignDefId = chalk_ir::ForeignDefId; pub(crate) type AssocTypeId = chalk_ir::AssocTypeId; From 4b247ba0a56049c7987a8ec082442cc79cc0d58a Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sun, 12 Oct 2025 14:05:51 +0300 Subject: [PATCH 11/76] Migrate unsafe checker to the new solver --- .../crates/hir-ty/src/chalk_ext.rs | 24 +++--------------- .../hir-ty/src/diagnostics/unsafe_check.rs | 25 +++++++++---------- 2 files changed, 16 insertions(+), 33 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index ea3ed1589d756..6bab30b40cc15 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -1,11 +1,11 @@ //! Various extensions traits for Chalk types. -use hir_def::{FunctionId, ItemContainerId, Lookup, TraitId}; +use hir_def::{ItemContainerId, Lookup, TraitId}; use crate::{ - Binders, CallableDefId, CallableSig, DynTy, Interner, ProjectionTy, Substitution, ToChalk, - TraitRef, Ty, TyKind, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, - generics::generics, to_chalk_trait_id, utils::ClosureSubst, + Binders, CallableSig, DynTy, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyKind, + db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, generics::generics, + to_chalk_trait_id, utils::ClosureSubst, }; pub(crate) trait TyExt { @@ -13,9 +13,7 @@ pub(crate) trait TyExt { fn is_unknown(&self) -> bool; fn as_tuple(&self) -> Option<&Substitution>; - fn as_fn_def(&self, db: &dyn HirDatabase) -> Option; - fn callable_def(&self, db: &dyn HirDatabase) -> Option; fn callable_sig(&self, db: &dyn HirDatabase) -> Option; } @@ -35,20 +33,6 @@ impl TyExt for Ty { } } - fn as_fn_def(&self, db: &dyn HirDatabase) -> Option { - match self.callable_def(db) { - Some(CallableDefId::FunctionId(func)) => Some(func), - Some(CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_)) | None => None, - } - } - - fn callable_def(&self, db: &dyn HirDatabase) -> Option { - match self.kind(Interner) { - &TyKind::FnDef(def, ..) => Some(ToChalk::from_chalk(db, def)), - _ => None, - } - } - fn callable_sig(&self, db: &dyn HirDatabase) -> Option { match self.kind(Interner) { TyKind::Function(fn_ptr) => Some(CallableSig::from_fn_ptr(fn_ptr)), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 31100e17f8465..53524d66a33c2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -5,22 +5,21 @@ use std::mem; use either::Either; use hir_def::{ - AdtId, DefWithBodyId, FieldId, FunctionId, VariantId, + AdtId, CallableDefId, DefWithBodyId, FieldId, FunctionId, VariantId, expr_store::{Body, path::Path}, hir::{AsmOperand, Expr, ExprId, ExprOrPatId, InlineAsmKind, Pat, PatId, Statement, UnaryOp}, resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs}, signatures::StaticFlags, type_ref::Rawness, }; +use rustc_type_ir::inherent::IntoKind; use span::Edition; -use crate::next_solver::DbInterner; -use crate::next_solver::mapping::NextSolverToChalk; -use crate::utils::TargetFeatureIsSafeInTarget; use crate::{ - InferenceResult, Interner, TargetFeatures, TyExt, TyKind, + InferenceResult, TargetFeatures, db::HirDatabase, - utils::{is_fn_unsafe_to_call, target_feature_is_safe_in_target}, + next_solver::{CallableIdWrapper, TyKind, abi::Safety}, + utils::{TargetFeatureIsSafeInTarget, is_fn_unsafe_to_call, target_feature_is_safe_in_target}, }; #[derive(Debug, Default)] @@ -151,7 +150,6 @@ struct UnsafeVisitor<'db> { /// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when /// the target feature is not enabled. This flag encodes that. target_feature_is_safe: TargetFeatureIsSafeInTarget, - interner: DbInterner<'db>, } impl<'db> UnsafeVisitor<'db> { @@ -186,7 +184,6 @@ impl<'db> UnsafeVisitor<'db> { def_target_features, edition, target_feature_is_safe, - interner: DbInterner::new_with(db, None, None), } } @@ -289,12 +286,14 @@ impl<'db> UnsafeVisitor<'db> { let inside_assignment = mem::replace(&mut self.inside_assignment, false); match expr { &Expr::Call { callee, .. } => { - let callee = self.infer[callee].to_chalk(self.interner); - if let Some(func) = callee.as_fn_def(self.db) { + let callee = self.infer[callee]; + if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(func)), _) = + callee.kind() + { self.check_call(current, func); } - if let TyKind::Function(fn_ptr) = callee.kind(Interner) - && fn_ptr.sig.safety == chalk_ir::Safety::Unsafe + if let TyKind::FnPtr(_, hdr) = callee.kind() + && hdr.safety == Safety::Unsafe { self.on_unsafe_op(current.into(), UnsafetyReason::UnsafeFnCall); } @@ -342,7 +341,7 @@ impl<'db> UnsafeVisitor<'db> { } } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { - if let TyKind::Raw(..) = &self.infer[*expr].to_chalk(self.interner).kind(Interner) { + if let TyKind::RawPtr(..) = self.infer[*expr].kind() { self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref); } } From aa711f622f96b74ac9e4f477884b2b018cab6c5e Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sun, 12 Oct 2025 16:15:13 +0300 Subject: [PATCH 12/76] Make `lookup_impl_method()` query transparent The bulk of the work is trait solving and cached in the trait solver's cache, and this will save memory. --- src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs | 3 +-- src/tools/rust-analyzer/crates/hir-ty/src/db.rs | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 002e0823b9d17..761d72243e9f8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -5,13 +5,12 @@ mod tests; use base_db::Crate; use hir_def::{ - EnumVariantId, GeneralConstId, + EnumVariantId, GeneralConstId, HasModule, StaticId, expr_store::{Body, HygieneId, path::Path}, hir::{Expr, ExprId}, resolver::{Resolver, ValueNs}, type_ref::LiteralConstRef, }; -use hir_def::{HasModule, StaticId}; use hir_expand::Lookup; use rustc_type_ir::{UnevaluatedConst, inherent::IntoKind}; use stdx::never; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 4b33c8a84a810..6c1d05ab1b50a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -96,6 +96,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { ) -> Result>; #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] + #[salsa::transparent] fn lookup_impl_method<'db>( &'db self, env: Arc>, From 2df61e02e4c258ba9aaa2b03d9d57c61e55d708f Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sun, 12 Oct 2025 16:20:56 +0300 Subject: [PATCH 13/76] Remove unneeded queries --- src/tools/rust-analyzer/crates/hir-ty/src/db.rs | 17 ----------------- src/tools/rust-analyzer/crates/hir/src/lib.rs | 2 +- 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 6c1d05ab1b50a..80945adedbfdf 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -323,23 +323,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { )] fn variances_of(&self, def: GenericDefId) -> Option>; - #[salsa::invoke(crate::traits::normalize_projection_query)] - #[salsa::transparent] - fn normalize_projection( - &self, - projection: crate::ProjectionTy, - env: Arc>, - ) -> Ty; - - #[salsa::invoke(crate::traits::trait_solve_query)] - #[salsa::transparent] - fn trait_solve( - &self, - krate: Crate, - block: Option, - goal: crate::Canonical>, - ) -> NextTraitSolveResult; - // next trait solver #[salsa::invoke(crate::lower_nextsolver::const_param_ty_query)] diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 55da27781db13..fc516a6764a5d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -3692,7 +3692,7 @@ impl GenericDef { }; expr_store_diagnostics(db, acc, &source_map); - push_ty_diagnostics(db, acc, db.generic_defaults_with_diagnostics(def).1, &source_map); + push_ty_diagnostics(db, acc, db.generic_defaults_ns_with_diagnostics(def).1, &source_map); push_ty_diagnostics( db, acc, From bf531aca66a6f9444b26f7b18279a8818a7cdc10 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Mon, 13 Oct 2025 00:15:55 +0900 Subject: [PATCH 14/76] minor: Fix creating `rust-analyzer/rust-analyzer` --- .../rust-analyzer/crates/rust-analyzer/src/flycheck.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index b545106fe1cfb..73a51bba3d9a9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -12,7 +12,7 @@ use cargo_metadata::PackageId; use crossbeam_channel::{Receiver, Sender, select_biased, unbounded}; use ide_db::FxHashSet; use itertools::Itertools; -use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::Deserialize as _; use serde_derive::Deserialize; @@ -432,8 +432,10 @@ impl FlycheckActor { options .target_dir .as_deref() - .unwrap_or("target".as_ref()) - .join(format!("rust-analyzer/flycheck{}", self.id)), + .unwrap_or( + Utf8Path::new("target").join("rust-analyzer").as_path(), + ) + .join(format!("flycheck{}", self.id)), ), _ => None, }, From 55b73e6b06b930d1d357d15beda76c9c3ffa9ab4 Mon Sep 17 00:00:00 2001 From: Elliot Roberts Date: Sun, 12 Oct 2025 15:47:00 -0700 Subject: [PATCH 15/76] enable tt feature in crates/cfg tests --- src/tools/rust-analyzer/Cargo.lock | 1 + src/tools/rust-analyzer/crates/cfg/Cargo.toml | 3 +++ 2 files changed, 4 insertions(+) diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 6625403572b0b..16a874ce5d97c 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -235,6 +235,7 @@ name = "cfg" version = "0.0.0" dependencies = [ "arbitrary", + "cfg", "expect-test", "intern", "oorandom", diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index af95f86c83521..e17969bd82d41 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -29,5 +29,8 @@ arbitrary = { version = "1.4.1", features = ["derive"] } syntax-bridge.workspace = true syntax.workspace = true +# tt is needed for testing +cfg = { path = ".", default-features = false, features = ["tt"] } + [lints] workspace = true From f35b6dfe9174d6e7e4f783bfbf676741e836d10b Mon Sep 17 00:00:00 2001 From: Michael Gruenewald Date: Tue, 7 Oct 2025 13:59:09 +0200 Subject: [PATCH 16/76] Add lodash --- src/tools/rust-analyzer/editors/code/package-lock.json | 8 ++++++++ src/tools/rust-analyzer/editors/code/package.json | 1 + 2 files changed, 9 insertions(+) diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json index e35a159cbc3fd..6dd4485223793 100644 --- a/src/tools/rust-analyzer/editors/code/package-lock.json +++ b/src/tools/rust-analyzer/editors/code/package-lock.json @@ -21,6 +21,7 @@ "@stylistic/eslint-plugin": "^4.1.0", "@stylistic/eslint-plugin-js": "^4.1.0", "@tsconfig/strictest": "^2.0.5", + "@types/lodash": "^4.17.20", "@types/node": "~22.13.4", "@types/vscode": "~1.93.0", "@typescript-eslint/eslint-plugin": "^8.25.0", @@ -1388,6 +1389,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/lodash": { + "version": "4.17.20", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.20.tgz", + "integrity": "sha512-H3MHACvFUEiujabxhaI/ImO6gUrd8oOurg7LQtS7mbwIXA/cUqWrvBsaeJ23aZEPk1TAYkurjfMbSELfoCXlGA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/node": { "version": "22.13.5", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.5.tgz", diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 70687238c854a..d659421a0299b 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -58,6 +58,7 @@ "@stylistic/eslint-plugin": "^4.1.0", "@stylistic/eslint-plugin-js": "^4.1.0", "@tsconfig/strictest": "^2.0.5", + "@types/lodash": "^4.17.20", "@types/node": "~22.13.4", "@types/vscode": "~1.93.0", "@typescript-eslint/eslint-plugin": "^8.25.0", From 145677f699412386d4956cef1edb3e1b6f980367 Mon Sep 17 00:00:00 2001 From: Michael Gruenewald Date: Tue, 7 Oct 2025 13:55:49 +0200 Subject: [PATCH 17/76] Don't pretend to have a WorkspaceConfiguration if there isn't one --- .../rust-analyzer/editors/code/src/client.ts | 2 +- .../rust-analyzer/editors/code/src/config.ts | 20 ++++++++++--------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/tools/rust-analyzer/editors/code/src/client.ts b/src/tools/rust-analyzer/editors/code/src/client.ts index 073ff2f4703f4..cb71a01138b3f 100644 --- a/src/tools/rust-analyzer/editors/code/src/client.ts +++ b/src/tools/rust-analyzer/editors/code/src/client.ts @@ -13,7 +13,7 @@ import { RaLanguageClient } from "./lang_client"; export async function createClient( traceOutputChannel: vscode.OutputChannel, outputChannel: vscode.OutputChannel, - initializationOptions: vscode.WorkspaceConfiguration, + initializationOptions: lc.LanguageClientOptions["initializationOptions"], serverOptions: lc.ServerOptions, config: Config, unlinkedFiles: vscode.Uri[], diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 3b1b0768d3cf7..06e179eb0eb1e 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -5,6 +5,7 @@ import * as vscode from "vscode"; import { expectNotUndefined, log, normalizeDriveLetter, unwrapUndefinable } from "./util"; import type { Env } from "./util"; import type { Disposable } from "vscode"; +import { get } from "lodash"; export type RunnableEnvCfgItem = { mask?: string; @@ -12,6 +13,9 @@ export type RunnableEnvCfgItem = { platform?: string | string[]; }; +export type ConfigurationTree = { [key: string]: ConfigurationValue }; +export type ConfigurationValue = undefined | null | boolean | number | string | ConfigurationValue[] | ConfigurationTree; + type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSelector }; export class Config { @@ -197,7 +201,7 @@ export class Config { * So this getter handles this quirk by not requiring the caller to use postfix `!` */ private get(path: string): T | undefined { - return prepareVSCodeConfig(this.cfg.get(path)); + return prepareVSCodeConfig(get(this.cfg, path)) as T; } get serverPath() { @@ -371,22 +375,20 @@ export class Config { } } -export function prepareVSCodeConfig(resp: T): T { +export function prepareVSCodeConfig(resp: ConfigurationValue): ConfigurationValue { if (Is.string(resp)) { - return substituteVSCodeVariableInString(resp) as T; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } else if (resp && Is.array(resp)) { + return substituteVSCodeVariableInString(resp); + } else if (resp && Is.array(resp)) { return resp.map((val) => { return prepareVSCodeConfig(val); - }) as T; + }); } else if (resp && typeof resp === "object") { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const res: { [key: string]: any } = {}; + const res: ConfigurationTree = {}; for (const key in resp) { const val = resp[key]; res[key] = prepareVSCodeConfig(val); } - return res as T; + return res; } return resp; } From 8ffdc2f84fd6e686804ec00d55eda85b3381e0e8 Mon Sep 17 00:00:00 2001 From: Michael Gruenewald Date: Tue, 7 Oct 2025 14:06:30 +0200 Subject: [PATCH 18/76] Allow other extensions to override the configuration --- .../rust-analyzer/editors/code/src/config.ts | 30 +++++++++++++++---- .../rust-analyzer/editors/code/src/ctx.ts | 6 +++- .../rust-analyzer/editors/code/src/main.ts | 2 ++ 3 files changed, 31 insertions(+), 7 deletions(-) diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 06e179eb0eb1e..340f107ebbabb 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -5,7 +5,7 @@ import * as vscode from "vscode"; import { expectNotUndefined, log, normalizeDriveLetter, unwrapUndefinable } from "./util"; import type { Env } from "./util"; import type { Disposable } from "vscode"; -import { get } from "lodash"; +import { cloneDeep, get, merge } from "lodash"; export type RunnableEnvCfgItem = { mask?: string; @@ -23,7 +23,7 @@ export class Config { configureLang: vscode.Disposable | undefined; readonly rootSection = "rust-analyzer"; - private readonly requiresServerReloadOpts = ["server", "files", "showSyntaxTree"].map( + private readonly requiresServerReloadOpts = ["cargo", "server", "files", "showSyntaxTree"].map( (opt) => `${this.rootSection}.${opt}`, ); @@ -31,6 +31,19 @@ export class Config { (opt) => `${this.rootSection}.${opt}`, ); + extensionConfigurations: Map> = new Map(); + + async addExtensionConfiguration(extensionId: string, configuration: Record): Promise { + this.extensionConfigurations.set(extensionId, configuration); + const prefix = `${this.rootSection}.`; + await this.onDidChangeConfiguration({ + affectsConfiguration(section: string, _scope?: vscode.ConfigurationScope): boolean { + // FIXME: questionable + return section.startsWith(prefix) && section.slice(prefix.length) in configuration; + }, + }); + } + constructor(disposables: Disposable[]) { vscode.workspace.onDidChangeConfiguration(this.onDidChangeConfiguration, this, disposables); this.refreshLogging(); @@ -180,10 +193,15 @@ export class Config { // We don't do runtime config validation here for simplicity. More on stackoverflow: // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension - private get cfg(): vscode.WorkspaceConfiguration { + private get rawCfg(): vscode.WorkspaceConfiguration { return vscode.workspace.getConfiguration(this.rootSection); } + public get cfg(): ConfigurationTree { + const vsCodeConfig = cloneDeep(this.rawCfg); + return merge(vsCodeConfig, ...this.extensionConfigurations.values()); + } + /** * Beware that postfix `!` operator erases both `null` and `undefined`. * This is why the following doesn't work as expected: @@ -227,7 +245,7 @@ export class Config { } async toggleCheckOnSave() { - const config = this.cfg.inspect("checkOnSave") ?? { key: "checkOnSave" }; + const config = this.rawCfg.inspect("checkOnSave") ?? { key: "checkOnSave" }; let overrideInLanguage; let target; let value; @@ -253,7 +271,7 @@ export class Config { overrideInLanguage = config.defaultLanguageValue; value = config.defaultValue || config.defaultLanguageValue; } - await this.cfg.update("checkOnSave", !(value || false), target || null, overrideInLanguage); + await this.rawCfg.update("checkOnSave", !(value || false), target || null, overrideInLanguage); } get problemMatcher(): string[] { @@ -371,7 +389,7 @@ export class Config { } async setAskBeforeUpdateTest(value: boolean) { - await this.cfg.update("runnables.askBeforeUpdateTest", value, true); + await this.rawCfg.update("runnables.askBeforeUpdateTest", value, true); } } diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index e55754fb9f048..dfbf5b1e47c3d 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -150,6 +150,10 @@ export class Ctx implements RustAnalyzerExtensionApi { }); } + async addConfiguration(extensionId: string, configuration: Record): Promise { + await this.config.addExtensionConfiguration(extensionId, configuration); + } + dispose() { this.config.dispose(); this.statusBar.dispose(); @@ -230,7 +234,7 @@ export class Ctx implements RustAnalyzerExtensionApi { debug: run, }; - let rawInitializationOptions = vscode.workspace.getConfiguration("rust-analyzer"); + let rawInitializationOptions = this.config.cfg; if (this.workspace.kind === "Detached Files") { rawInitializationOptions = { diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index 996298524f115..c126a0a105dc4 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -13,6 +13,8 @@ const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; export interface RustAnalyzerExtensionApi { // FIXME: this should be non-optional readonly client?: lc.LanguageClient; + + addConfiguration(extensionId: string, configuration: Record): Promise; } export async function deactivate() { From c53b566b2afe7f5dbfc04b30873ae156c21808da Mon Sep 17 00:00:00 2001 From: Michael Gruenewald Date: Thu, 9 Oct 2025 14:21:16 +0200 Subject: [PATCH 19/76] Remember configuration overrides by extensions --- .../rust-analyzer/editors/code/src/config.ts | 50 ++++++++++++------- .../rust-analyzer/editors/code/src/ctx.ts | 2 +- 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 340f107ebbabb..3afda60082636 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -4,8 +4,7 @@ import * as path from "path"; import * as vscode from "vscode"; import { expectNotUndefined, log, normalizeDriveLetter, unwrapUndefinable } from "./util"; import type { Env } from "./util"; -import type { Disposable } from "vscode"; -import { cloneDeep, get, merge } from "lodash"; +import { cloneDeep, get, merge, pickBy } from "lodash"; export type RunnableEnvCfgItem = { mask?: string; @@ -20,6 +19,7 @@ type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSel export class Config { readonly extensionId = "rust-lang.rust-analyzer"; + readonly workspaceState: vscode.Memento; configureLang: vscode.Disposable | undefined; readonly rootSection = "rust-analyzer"; @@ -31,29 +31,43 @@ export class Config { (opt) => `${this.rootSection}.${opt}`, ); - extensionConfigurations: Map> = new Map(); + constructor(ctx: vscode.ExtensionContext) { + this.workspaceState = ctx.workspaceState; + vscode.workspace.onDidChangeConfiguration(this.onDidChangeConfiguration, this, ctx.subscriptions); + this.refreshLogging(); + this.configureLanguage(); + } + + dispose() { + this.configureLang?.dispose(); + } + + /// Returns the rust-analyzer-specific workspace configuration, incl. any + /// configuration items overridden by (present) extensions. + get extensionConfigurations(): Record> { + return pickBy( + this.workspaceState.get>("extensionConfigurations", {}), + (_, extensionId) => vscode.extensions.getExtension(extensionId) !== undefined, + ); + } async addExtensionConfiguration(extensionId: string, configuration: Record): Promise { - this.extensionConfigurations.set(extensionId, configuration); + const oldConfiguration = this.cfg; + + const extCfgs = this.extensionConfigurations; + extCfgs[extensionId] = configuration; + await this.workspaceState.update("extensionConfigurations", extCfgs); + + const newConfiguration = this.cfg; const prefix = `${this.rootSection}.`; await this.onDidChangeConfiguration({ affectsConfiguration(section: string, _scope?: vscode.ConfigurationScope): boolean { - // FIXME: questionable - return section.startsWith(prefix) && section.slice(prefix.length) in configuration; + return section.startsWith(prefix) && + get(oldConfiguration, section.slice(prefix.length)) !== get(newConfiguration, section.slice(prefix.length)); }, }); } - constructor(disposables: Disposable[]) { - vscode.workspace.onDidChangeConfiguration(this.onDidChangeConfiguration, this, disposables); - this.refreshLogging(); - this.configureLanguage(); - } - - dispose() { - this.configureLang?.dispose(); - } - private refreshLogging() { log.info( "Extension version:", @@ -198,8 +212,7 @@ export class Config { } public get cfg(): ConfigurationTree { - const vsCodeConfig = cloneDeep(this.rawCfg); - return merge(vsCodeConfig, ...this.extensionConfigurations.values()); + return merge(cloneDeep(this.rawCfg), ...Object.values(this.extensionConfigurations)); } /** @@ -209,7 +222,6 @@ export class Config { * ```ts * const nullableNum = vscode * .workspace - * .getConfiguration * .getConfiguration("rust-analyzer") * .get(path)!; * diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index dfbf5b1e47c3d..69703efc69643 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -125,7 +125,7 @@ export class Ctx implements RustAnalyzerExtensionApi { extCtx.subscriptions.push(this); this.version = extCtx.extension.packageJSON.version ?? ""; this._serverVersion = ""; - this.config = new Config(extCtx.subscriptions); + this.config = new Config(extCtx); this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); this.updateStatusBarVisibility(vscode.window.activeTextEditor); this.statusBarActiveEditorListener = vscode.window.onDidChangeActiveTextEditor((editor) => From 6de179293805437f3c3359a99129b2468fc185c5 Mon Sep 17 00:00:00 2001 From: Michael Gruenewald Date: Mon, 13 Oct 2025 11:24:06 +0200 Subject: [PATCH 20/76] docs --- src/tools/rust-analyzer/editors/code/src/config.ts | 14 +++++++++++--- src/tools/rust-analyzer/editors/code/src/main.ts | 4 ++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 3afda60082636..4c895fb3d09b3 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -19,10 +19,11 @@ type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSel export class Config { readonly extensionId = "rust-lang.rust-analyzer"; - readonly workspaceState: vscode.Memento; + configureLang: vscode.Disposable | undefined; + workspaceState: vscode.Memento; - readonly rootSection = "rust-analyzer"; + private readonly rootSection = "rust-analyzer"; private readonly requiresServerReloadOpts = ["cargo", "server", "files", "showSyntaxTree"].map( (opt) => `${this.rootSection}.${opt}`, ); @@ -42,11 +43,14 @@ export class Config { this.configureLang?.dispose(); } + private readonly extensionConfigurationStateKey = "extensionConfigurations"; + /// Returns the rust-analyzer-specific workspace configuration, incl. any /// configuration items overridden by (present) extensions. get extensionConfigurations(): Record> { return pickBy( this.workspaceState.get>("extensionConfigurations", {}), + // ignore configurations from disabled/removed extensions (_, extensionId) => vscode.extensions.getExtension(extensionId) !== undefined, ); } @@ -56,7 +60,7 @@ export class Config { const extCfgs = this.extensionConfigurations; extCfgs[extensionId] = configuration; - await this.workspaceState.update("extensionConfigurations", extCfgs); + await this.workspaceState.update(this.extensionConfigurationStateKey, extCfgs); const newConfiguration = this.cfg; const prefix = `${this.rootSection}.`; @@ -207,10 +211,14 @@ export class Config { // We don't do runtime config validation here for simplicity. More on stackoverflow: // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension + // Returns the raw configuration for rust-analyzer as returned by vscode. This + // should only be used when modifications to the user/workspace configuration + // are required. private get rawCfg(): vscode.WorkspaceConfiguration { return vscode.workspace.getConfiguration(this.rootSection); } + // Returns the final configuration to use, with extension configuration overrides merged in. public get cfg(): ConfigurationTree { return merge(cloneDeep(this.rawCfg), ...Object.values(this.extensionConfigurations)); } diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index c126a0a105dc4..1b512696ac57f 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -14,6 +14,10 @@ export interface RustAnalyzerExtensionApi { // FIXME: this should be non-optional readonly client?: lc.LanguageClient; + // Allows adding a configuration override from another extension. + // `configuration` is a `rust-analyzer` subtree of the vscode configuration + // that gets merged with the workspace/user configuration. `extensionId` is + // used to only merge configuration override from present extensions. addConfiguration(extensionId: string, configuration: Record): Promise; } From e44c3c4f2607ca5971df5eb79615d10bc46350b2 Mon Sep 17 00:00:00 2001 From: Michael Gruenewald Date: Mon, 13 Oct 2025 14:29:31 +0200 Subject: [PATCH 21/76] Don't override users' settings --- .../rust-analyzer/editors/code/src/config.ts | 16 ++++++++++++++-- src/tools/rust-analyzer/editors/code/src/main.ts | 6 +++--- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 4c895fb3d09b3..b97a47d5b984f 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -4,7 +4,7 @@ import * as path from "path"; import * as vscode from "vscode"; import { expectNotUndefined, log, normalizeDriveLetter, unwrapUndefinable } from "./util"; import type { Env } from "./util"; -import { cloneDeep, get, merge, pickBy } from "lodash"; +import { cloneDeep, get, pickBy, set } from "lodash"; export type RunnableEnvCfgItem = { mask?: string; @@ -220,7 +220,19 @@ export class Config { // Returns the final configuration to use, with extension configuration overrides merged in. public get cfg(): ConfigurationTree { - return merge(cloneDeep(this.rawCfg), ...Object.values(this.extensionConfigurations)); + const finalConfig = cloneDeep(this.rawCfg); + for (const [extensionId, items] of Object.entries(this.extensionConfigurations)) { + for (const [k, v] of Object.entries(items)) { + const i = this.rawCfg.inspect(k); + if (i?.workspaceValue !== undefined || i?.workspaceFolderValue !== undefined || i?.globalValue !== undefined) { + log.trace(`Ignoring configuration override for ${k} from extension ${extensionId}`); + continue; + } + log.trace(`Extension ${extensionId} overrides configuration ${k} to `, v); + set(finalConfig, k, v); + } + } + return finalConfig; } /** diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index 1b512696ac57f..190f5866d0ead 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -15,9 +15,9 @@ export interface RustAnalyzerExtensionApi { readonly client?: lc.LanguageClient; // Allows adding a configuration override from another extension. - // `configuration` is a `rust-analyzer` subtree of the vscode configuration - // that gets merged with the workspace/user configuration. `extensionId` is - // used to only merge configuration override from present extensions. + // `extensionId` is used to only merge configuration override from present + // extensions. `configuration` is map of rust-analyzer-specific setting + // overrides, e.g., `{"cargo.cfgs": ["foo", "bar"]}`. addConfiguration(extensionId: string, configuration: Record): Promise; } From 38c4d80c08b7d9a8e275656709e77b1310c68a28 Mon Sep 17 00:00:00 2001 From: Michael Gruenewald Date: Mon, 13 Oct 2025 14:45:01 +0200 Subject: [PATCH 22/76] Format fixes --- .../rust-analyzer/editors/code/src/config.ts | 49 +++++++++++++++---- .../rust-analyzer/editors/code/src/ctx.ts | 5 +- 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index b97a47d5b984f..c0a1b3f02e36a 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -13,7 +13,14 @@ export type RunnableEnvCfgItem = { }; export type ConfigurationTree = { [key: string]: ConfigurationValue }; -export type ConfigurationValue = undefined | null | boolean | number | string | ConfigurationValue[] | ConfigurationTree; +export type ConfigurationValue = + | undefined + | null + | boolean + | number + | string + | ConfigurationValue[] + | ConfigurationTree; type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSelector }; @@ -34,7 +41,11 @@ export class Config { constructor(ctx: vscode.ExtensionContext) { this.workspaceState = ctx.workspaceState; - vscode.workspace.onDidChangeConfiguration(this.onDidChangeConfiguration, this, ctx.subscriptions); + vscode.workspace.onDidChangeConfiguration( + this.onDidChangeConfiguration, + this, + ctx.subscriptions, + ); this.refreshLogging(); this.configureLanguage(); } @@ -49,13 +60,19 @@ export class Config { /// configuration items overridden by (present) extensions. get extensionConfigurations(): Record> { return pickBy( - this.workspaceState.get>("extensionConfigurations", {}), + this.workspaceState.get>( + "extensionConfigurations", + {}, + ), // ignore configurations from disabled/removed extensions (_, extensionId) => vscode.extensions.getExtension(extensionId) !== undefined, ); } - async addExtensionConfiguration(extensionId: string, configuration: Record): Promise { + async addExtensionConfiguration( + extensionId: string, + configuration: Record, + ): Promise { const oldConfiguration = this.cfg; const extCfgs = this.extensionConfigurations; @@ -66,8 +83,11 @@ export class Config { const prefix = `${this.rootSection}.`; await this.onDidChangeConfiguration({ affectsConfiguration(section: string, _scope?: vscode.ConfigurationScope): boolean { - return section.startsWith(prefix) && - get(oldConfiguration, section.slice(prefix.length)) !== get(newConfiguration, section.slice(prefix.length)); + return ( + section.startsWith(prefix) && + get(oldConfiguration, section.slice(prefix.length)) !== + get(newConfiguration, section.slice(prefix.length)) + ); }, }); } @@ -224,8 +244,14 @@ export class Config { for (const [extensionId, items] of Object.entries(this.extensionConfigurations)) { for (const [k, v] of Object.entries(items)) { const i = this.rawCfg.inspect(k); - if (i?.workspaceValue !== undefined || i?.workspaceFolderValue !== undefined || i?.globalValue !== undefined) { - log.trace(`Ignoring configuration override for ${k} from extension ${extensionId}`); + if ( + i?.workspaceValue !== undefined || + i?.workspaceFolderValue !== undefined || + i?.globalValue !== undefined + ) { + log.trace( + `Ignoring configuration override for ${k} from extension ${extensionId}`, + ); continue; } log.trace(`Extension ${extensionId} overrides configuration ${k} to `, v); @@ -303,7 +329,12 @@ export class Config { overrideInLanguage = config.defaultLanguageValue; value = config.defaultValue || config.defaultLanguageValue; } - await this.rawCfg.update("checkOnSave", !(value || false), target || null, overrideInLanguage); + await this.rawCfg.update( + "checkOnSave", + !(value || false), + target || null, + overrideInLanguage, + ); } get problemMatcher(): string[] { diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index 69703efc69643..a7b7be03b5d88 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -150,7 +150,10 @@ export class Ctx implements RustAnalyzerExtensionApi { }); } - async addConfiguration(extensionId: string, configuration: Record): Promise { + async addConfiguration( + extensionId: string, + configuration: Record, + ): Promise { await this.config.addExtensionConfiguration(extensionId, configuration); } From 25ed1c5bfb9db3dce709ec7a579fa07b168e63d7 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Tue, 14 Oct 2025 21:30:34 +0300 Subject: [PATCH 23/76] Migrate `Display` impls to the next solver --- .../crates/hir-ty/src/builder.rs | 222 +--- .../crates/hir-ty/src/chalk_ext.rs | 44 +- .../rust-analyzer/crates/hir-ty/src/db.rs | 1 - .../hir-ty/src/diagnostics/match_check.rs | 29 +- .../crates/hir-ty/src/display.rs | 1141 ++++++----------- .../crates/hir-ty/src/generics.rs | 7 +- .../rust-analyzer/crates/hir-ty/src/infer.rs | 30 +- .../rust-analyzer/crates/hir-ty/src/lib.rs | 128 +- .../crates/hir-ty/src/lower_nextsolver.rs | 142 +- .../hir-ty/src/lower_nextsolver/path.rs | 29 +- .../crates/hir-ty/src/mir/pretty.rs | 6 +- .../crates/hir-ty/src/next_solver/consts.rs | 6 +- .../hir-ty/src/next_solver/predicate.rs | 20 + .../rust-analyzer/crates/hir-ty/src/traits.rs | 90 +- .../rust-analyzer/crates/hir-ty/src/utils.rs | 16 +- .../rust-analyzer/crates/hir/src/display.rs | 179 ++- .../crates/ide/src/hover/tests.rs | 2 +- .../rust-analyzer/crates/ide/src/moniker.rs | 2 +- .../crates/ide/src/navigation_target.rs | 2 +- 19 files changed, 682 insertions(+), 1414 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs index 706bbe856c67d..5c4eb8475bbc2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs @@ -1,15 +1,14 @@ //! `TyBuilder`, a helper for building instances of `Ty` and related types. use chalk_ir::{ - AdtId, DebruijnIndex, Scalar, - cast::{Cast, CastTo, Caster}, + DebruijnIndex, Scalar, + cast::{Cast, Caster}, }; -use hir_def::{GenericDefId, GenericParamId, TraitId, TypeAliasId, builtin_type::BuiltinType}; +use hir_def::{GenericDefId, GenericParamId, TraitId, builtin_type::BuiltinType}; use smallvec::SmallVec; use crate::{ - BoundVar, CallableSig, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, - TraitRef, Ty, TyDefId, TyExt, TyKind, + BoundVar, GenericArg, GenericArgData, Interner, Substitution, TraitRef, Ty, TyKind, consteval::unknown_const_as_generic, db::HirDatabase, error_lifetime, @@ -19,18 +18,18 @@ use crate::{ DbInterner, EarlyBinder, mapping::{ChalkToNextSolver, NextSolverToChalk}, }, - primitive, to_assoc_type_id, to_chalk_trait_id, + primitive, to_chalk_trait_id, }; #[derive(Debug, Clone, PartialEq, Eq)] -pub enum ParamKind { +pub(crate) enum ParamKind { Type, Lifetime, Const(Ty), } /// This is a builder for `Ty` or anything that needs a `Substitution`. -pub struct TyBuilder { +pub(crate) struct TyBuilder { /// The `data` field is used to keep track of what we're building (e.g. an /// ADT, a `TraitRef`, ...). data: D, @@ -60,10 +59,6 @@ impl TyBuilder { Self { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds, parent_subst } } - fn new_empty(data: D) -> Self { - TyBuilder::new(data, SmallVec::new(), None) - } - fn build_internal(self) -> (D, Substitution) { assert_eq!( self.vec.len(), @@ -83,35 +78,15 @@ impl TyBuilder { (self.data, subst) } - pub fn build_into_subst(self) -> Substitution { - self.build_internal().1 - } - - pub fn push(mut self, arg: impl CastTo) -> Self { - assert!(self.remaining() > 0); - let arg = arg.cast(Interner); - let expected_kind = &self.param_kinds[self.vec.len()]; - - let arg_kind = match arg.data(Interner) { - GenericArgData::Ty(_) => ParamKind::Type, - GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"), - GenericArgData::Const(c) => { - let c = c.data(Interner); - ParamKind::Const(c.ty.clone()) - } - }; - assert_eq!(*expected_kind, arg_kind); - - self.vec.push(arg); - - self - } - - pub fn remaining(&self) -> usize { + pub(crate) fn remaining(&self) -> usize { self.param_kinds.len() - self.vec.len() } - pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self { + pub(crate) fn fill_with_bound_vars( + self, + debruijn: DebruijnIndex, + starting_from: usize, + ) -> Self { // self.fill is inlined to make borrow checker happy let mut this = self; let other = &this.param_kinds[this.vec.len()..]; @@ -129,22 +104,6 @@ impl TyBuilder { this } - pub fn fill_with_unknown(self) -> Self { - let interner = DbInterner::conjure(); - // self.fill is inlined to make borrow checker happy - let mut this = self; - let filler = this.param_kinds[this.vec.len()..].iter().map(|x| match x { - ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner), - ParamKind::Const(ty) => { - unknown_const_as_generic(ty.to_nextsolver(interner)).to_chalk(interner) - } - ParamKind::Lifetime => error_lifetime().cast(Interner), - }); - this.vec.extend(filler.casted(Interner)); - assert_eq!(this.remaining(), 0); - this - } - #[tracing::instrument(skip_all)] pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self { self.fill(|x| { @@ -157,7 +116,7 @@ impl TyBuilder { }) } - pub fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self { + pub(crate) fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self { self.vec.extend(self.param_kinds[self.vec.len()..].iter().map(filler)); assert_eq!(self.remaining(), 0); self @@ -174,28 +133,11 @@ impl TyBuilder { } impl TyBuilder<()> { - pub fn unit() -> Ty { - TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner) - } - - // FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well - pub fn discr_ty() -> Ty { - TyKind::Scalar(chalk_ir::Scalar::Int(chalk_ir::IntTy::I128)).intern(Interner) - } - - pub fn bool() -> Ty { - TyKind::Scalar(chalk_ir::Scalar::Bool).intern(Interner) - } - - pub fn usize() -> Ty { + pub(crate) fn usize() -> Ty { TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner) } - pub fn fn_ptr(sig: CallableSig) -> Ty { - TyKind::Function(sig.to_fn_ptr()).intern(Interner) - } - - pub fn builtin(builtin: BuiltinType) -> Ty { + pub(crate) fn builtin(builtin: BuiltinType) -> Ty { match builtin { BuiltinType::Char => TyKind::Scalar(Scalar::Char).intern(Interner), BuiltinType::Bool => TyKind::Scalar(Scalar::Bool).intern(Interner), @@ -212,16 +154,10 @@ impl TyBuilder<()> { } } - pub fn slice(argument: Ty) -> Ty { - TyKind::Slice(argument).intern(Interner) - } - - pub fn placeholder_subst(db: &dyn HirDatabase, def: impl Into) -> Substitution { - let params = generics(db, def.into()); - params.placeholder_subst(db) - } - - pub fn unknown_subst(db: &dyn HirDatabase, def: impl Into) -> Substitution { + pub(crate) fn unknown_subst( + db: &dyn HirDatabase, + def: impl Into, + ) -> Substitution { let interner = DbInterner::conjure(); let params = generics(db, def.into()); Substitution::from_iter( @@ -239,7 +175,7 @@ impl TyBuilder<()> { } #[tracing::instrument(skip_all)] - pub fn subst_for_def( + pub(crate) fn subst_for_def( db: &dyn HirDatabase, def: impl Into, parent_subst: Option, @@ -257,114 +193,25 @@ impl TyBuilder<()> { TyBuilder::new((), params, parent_subst) } - pub fn build(self) -> Substitution { + pub(crate) fn build(self) -> Substitution { let ((), subst) = self.build_internal(); subst } } -impl TyBuilder { - pub fn adt(db: &dyn HirDatabase, def: hir_def::AdtId) -> TyBuilder { - TyBuilder::subst_for_def(db, def, None).with_data(def) - } - - pub fn fill_with_defaults( - mut self, - db: &dyn HirDatabase, - mut fallback: impl FnMut() -> Ty, - ) -> Self { - let interner = DbInterner::conjure(); - // Note that we're building ADT, so we never have parent generic parameters. - let defaults = db.generic_defaults(self.data.into()); - - if let Some(defaults) = defaults.get(self.vec.len()..) { - for default_ty in defaults { - // NOTE(skip_binders): we only check if the arg type is error type. - if let Some(x) = default_ty.skip_binders().ty(Interner) - && x.is_unknown() - { - self.vec.push(fallback().cast(Interner)); - continue; - } - // Each default can only depend on the previous parameters. - self.vec.push(default_ty.clone().substitute(Interner, &*self.vec).cast(Interner)); - } - } - - // The defaults may be missing if no param has default, so fill that. - let filler = self.param_kinds[self.vec.len()..].iter().map(|x| match x { - ParamKind::Type => fallback().cast(Interner), - ParamKind::Const(ty) => { - unknown_const_as_generic(ty.to_nextsolver(interner)).to_chalk(interner) - } - ParamKind::Lifetime => error_lifetime().cast(Interner), - }); - self.vec.extend(filler.casted(Interner)); - - self - } - - pub fn build(self) -> Ty { - let (adt, subst) = self.build_internal(); - TyKind::Adt(AdtId(adt), subst).intern(Interner) - } -} - -pub struct Tuple(usize); -impl TyBuilder { - pub fn tuple(size: usize) -> TyBuilder { - TyBuilder::new(Tuple(size), std::iter::repeat_n(ParamKind::Type, size).collect(), None) - } - - pub fn build(self) -> Ty { - let (Tuple(size), subst) = self.build_internal(); - TyKind::Tuple(size, subst).intern(Interner) - } - - pub fn tuple_with(elements: I) -> Ty - where - I: IntoIterator, - ::IntoIter: ExactSizeIterator, - { - let elements = elements.into_iter(); - let len = elements.len(); - let mut b = - TyBuilder::new(Tuple(len), std::iter::repeat_n(ParamKind::Type, len).collect(), None); - for e in elements { - b = b.push(e); - } - b.build() - } -} - impl TyBuilder { - pub fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder { + pub(crate) fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder { TyBuilder::subst_for_def(db, def, None).with_data(def) } - pub fn build(self) -> TraitRef { + pub(crate) fn build(self) -> TraitRef { let (trait_id, substitution) = self.build_internal(); TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution } } } -impl TyBuilder { - pub fn assoc_type_projection( - db: &dyn HirDatabase, - def: TypeAliasId, - parent_subst: Option, - ) -> TyBuilder { - TyBuilder::subst_for_def(db, def, parent_subst).with_data(def) - } - - pub fn build(self) -> ProjectionTy { - let (type_alias, substitution) = self.build_internal(); - ProjectionTy { associated_ty_id: to_assoc_type_id(type_alias), substitution } - } -} - impl<'db, T: rustc_type_ir::TypeFoldable>> TyBuilder> { - pub fn build(self, interner: DbInterner<'db>) -> T { + pub(crate) fn build(self, interner: DbInterner<'db>) -> T { let (b, subst) = self.build_internal(); let args: crate::next_solver::GenericArgs<'db> = subst.to_nextsolver(interner); b.instantiate(interner, args) @@ -372,24 +219,7 @@ impl<'db, T: rustc_type_ir::TypeFoldable>> TyBuilder TyBuilder>> { - pub fn def_ty( - db: &'db dyn HirDatabase, - def: TyDefId, - parent_subst: Option, - ) -> TyBuilder>> { - let poly_ty = db.ty(def); - let id: GenericDefId = match def { - TyDefId::BuiltinType(_) => { - assert!(parent_subst.is_none()); - return TyBuilder::new_empty(poly_ty); - } - TyDefId::AdtId(id) => id.into(), - TyDefId::TypeAliasId(id) => id.into(), - }; - TyBuilder::subst_for_def(db, id, parent_subst).with_data(poly_ty) - } - - pub fn impl_self_ty( + pub(crate) fn impl_self_ty( db: &'db dyn HirDatabase, def: hir_def::ImplId, ) -> TyBuilder>> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index 6bab30b40cc15..a315f699ddaae 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -3,47 +3,11 @@ use hir_def::{ItemContainerId, Lookup, TraitId}; use crate::{ - Binders, CallableSig, DynTy, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyKind, - db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, generics::generics, - to_chalk_trait_id, utils::ClosureSubst, + Binders, DynTy, Interner, ProjectionTy, Substitution, TraitRef, Ty, db::HirDatabase, + from_assoc_type_id, from_chalk_trait_id, generics::generics, to_chalk_trait_id, }; -pub(crate) trait TyExt { - fn is_unit(&self) -> bool; - fn is_unknown(&self) -> bool; - - fn as_tuple(&self) -> Option<&Substitution>; - - fn callable_sig(&self, db: &dyn HirDatabase) -> Option; -} - -impl TyExt for Ty { - fn is_unit(&self) -> bool { - matches!(self.kind(Interner), TyKind::Tuple(0, _)) - } - - fn is_unknown(&self) -> bool { - matches!(self.kind(Interner), TyKind::Error) - } - - fn as_tuple(&self) -> Option<&Substitution> { - match self.kind(Interner) { - TyKind::Tuple(_, substs) => Some(substs), - _ => None, - } - } - - fn callable_sig(&self, db: &dyn HirDatabase) -> Option { - match self.kind(Interner) { - TyKind::Function(fn_ptr) => Some(CallableSig::from_fn_ptr(fn_ptr)), - TyKind::FnDef(def, parameters) => Some(CallableSig::from_def(db, *def, parameters)), - TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty(db).callable_sig(db), - _ => None, - } - } -} - -pub trait ProjectionTyExt { +pub(crate) trait ProjectionTyExt { fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef; fn trait_(&self, db: &dyn HirDatabase) -> TraitId; fn self_type_parameter(&self, db: &dyn HirDatabase) -> Ty; @@ -88,7 +52,7 @@ impl DynTyExt for DynTy { } } -pub trait TraitRefExt { +pub(crate) trait TraitRefExt { fn hir_trait_id(&self) -> TraitId; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 80945adedbfdf..7ad76f35b1f2f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -24,7 +24,6 @@ use crate::{ lower::{Diagnostics, GenericDefaults, GenericPredicates}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, - traits::NextTraitSolveResult, }; #[query_group::query_group] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index af6795e6018a5..80b65ace77cd7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -18,7 +18,7 @@ use hir_def::{ use hir_expand::name::Name; use rustc_type_ir::inherent::{IntoKind, SliceLike}; use span::Edition; -use stdx::{always, never}; +use stdx::{always, never, variance::PhantomCovariantLifetime}; use crate::{ InferenceResult, @@ -299,8 +299,8 @@ impl<'a, 'db> PatCtxt<'a, 'db> { } } -impl HirDisplay for Pat<'_> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Pat<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match &*self.kind { PatKind::Wild => write!(f, "_"), PatKind::Never => write!(f, "!"), @@ -356,7 +356,7 @@ impl HirDisplay for Pat<'_> { .filter(|p| !matches!(*p.pattern.kind, PatKind::Wild)) .map(|p| { printed += 1; - WriteWith(|f| { + WriteWith::new(|f| { write!( f, "{}: ", @@ -382,7 +382,7 @@ impl HirDisplay for Pat<'_> { if num_fields != 0 || variant.is_none() { write!(f, "(")?; let subpats = (0..num_fields).map(|i| { - WriteWith(move |f| { + WriteWith::new(move |f| { let fid = LocalFieldId::from_raw((i as u32).into()); if let Some(p) = subpatterns.get(i) && p.field == fid @@ -420,15 +420,24 @@ impl HirDisplay for Pat<'_> { } } -struct WriteWith(F) +struct WriteWith<'db, F>(F, PhantomCovariantLifetime<'db>) where - F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>; + F: Fn(&mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError>; -impl HirDisplay for WriteWith +impl<'db, F> WriteWith<'db, F> where - F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>, + F: Fn(&mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError>, { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { + fn new(f: F) -> Self { + Self(f, PhantomCovariantLifetime::new()) + } +} + +impl<'db, F> HirDisplay<'db> for WriteWith<'db, F> +where + F: Fn(&mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError>, +{ + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { (self.0)(f) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index bcd93c6699ccd..210e1ac52e58a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -8,7 +8,6 @@ use std::{ }; use base_db::Crate; -use chalk_ir::{BoundVar, Safety, TyKind}; use either::Either; use hir_def::{ FindPathConfig, GeneralConstId, GenericDefId, HasModule, LocalFieldId, Lookup, ModuleDefId, @@ -36,39 +35,33 @@ use rustc_apfloat::{ Float, ieee::{Half as f16, Quad as f128}, }; +use rustc_ast_ir::FloatTy; use rustc_hash::FxHashSet; use rustc_type_ir::{ - AliasTyKind, CoroutineArgsParts, RegionKind, - inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike}, + AliasTyKind, CoroutineArgsParts, RegionKind, Upcast, + inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, Tys as _}, }; use smallvec::SmallVec; use span::Edition; use stdx::never; use triomphe::Arc; -use crate::next_solver::infer::traits::ObligationCause; -use crate::next_solver::{infer::DbInternerInferExt, mapping::NextSolverToChalk}; use crate::{ - AliasEq, AliasTy, Binders, CallableDefId, CallableSig, ConcreteConst, Const, ConstScalar, - ConstValue, DomainGoal, FnAbi, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, - LifetimeOutlives, MemoryMap, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, - TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause, consteval, + CallableDefId, FnAbi, ImplTraitId, MemoryMap, TraitEnvironment, consteval, db::{HirDatabase, InternedClosure}, - from_assoc_type_id, from_placeholder_idx, generics::generics, - infer::normalize, layout::Layout, - lt_from_placeholder_idx, mir::pad16, next_solver::{ - BoundExistentialPredicate, DbInterner, GenericArgs, SolverDefId, - mapping::{ - ChalkToNextSolver, convert_args_for_result, convert_const_for_result, - convert_region_for_result, convert_ty_for_result, - }, + AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder, + ExistentialPredicate, FnSig, GenericArg, GenericArgs, PolyFnSig, Region, SolverDefId, Term, + TraitRef, Ty, TyKind, TypingMode, + abi::Safety, + infer::{DbInternerInferExt, traits::ObligationCause}, + mapping::ChalkToNextSolver, }, - primitive, to_assoc_type_id, - utils::{self, ClosureSubst, detect_variant_from_bytes}, + primitive, + utils::{self, detect_variant_from_bytes}, }; pub trait HirWrite: fmt::Write { @@ -82,9 +75,10 @@ impl HirWrite for String {} // `core::Formatter` will ignore metadata impl HirWrite for fmt::Formatter<'_> {} -pub struct HirFormatter<'a> { +pub struct HirFormatter<'a, 'db> { /// The database handle - pub db: &'a dyn HirDatabase, + pub db: &'db dyn HirDatabase, + pub interner: DbInterner<'db>, /// The sink to write into fmt: &'a mut dyn HirWrite, /// A buffer to intercept writes with, this allows us to track the overall size of the formatted output. @@ -103,7 +97,7 @@ pub struct HirFormatter<'a> { display_lifetimes: DisplayLifetime, display_kind: DisplayKind, display_target: DisplayTarget, - bounds_formatting_ctx: BoundsFormattingCtx, + bounds_formatting_ctx: BoundsFormattingCtx<'db>, } // FIXME: To consider, ref and dyn trait lifetimes can be omitted if they are `'_`, path args should @@ -121,7 +115,7 @@ pub enum DisplayLifetime { } #[derive(Default)] -enum BoundsFormattingCtx { +enum BoundsFormattingCtx<'db> { Entered { /// We can have recursive bounds like the following case: /// ```ignore @@ -131,14 +125,14 @@ enum BoundsFormattingCtx { /// ``` /// So, record the projection types met while formatting bounds and //. prevent recursing into their bounds to avoid infinite loops. - projection_tys_met: FxHashSet, + projection_tys_met: FxHashSet>, }, #[default] Exited, } -impl BoundsFormattingCtx { - fn contains(&mut self, proj: &ProjectionTy) -> bool { +impl<'db> BoundsFormattingCtx<'db> { + fn contains(&self, proj: &AliasTy<'db>) -> bool { match self { BoundsFormattingCtx::Entered { projection_tys_met } => { projection_tys_met.contains(proj) @@ -148,7 +142,7 @@ impl BoundsFormattingCtx { } } -impl HirFormatter<'_> { +impl<'db> HirFormatter<'_, 'db> { fn start_location_link(&mut self, location: ModuleDefId) { self.fmt.start_location_link(location); } @@ -159,7 +153,7 @@ impl HirFormatter<'_> { fn format_bounds_with T>( &mut self, - target: ProjectionTy, + target: AliasTy<'db>, format_bounds: F, ) -> T { match self.bounds_formatting_ctx { @@ -181,52 +175,28 @@ impl HirFormatter<'_> { } } - fn render_lifetime(&self, lifetime: &Lifetime) -> bool { + fn render_region(&self, lifetime: Region<'db>) -> bool { match self.display_lifetimes { DisplayLifetime::Always => true, - DisplayLifetime::OnlyStatic => matches!(***lifetime.interned(), LifetimeData::Static), + DisplayLifetime::OnlyStatic => matches!(lifetime.kind(), RegionKind::ReStatic), DisplayLifetime::OnlyNamed => { - matches!(***lifetime.interned(), LifetimeData::Placeholder(_)) - } - DisplayLifetime::OnlyNamedOrStatic => matches!( - ***lifetime.interned(), - LifetimeData::Static | LifetimeData::Placeholder(_) - ), - DisplayLifetime::Never => false, - } - } - - fn render_region(&self, lifetime: crate::next_solver::Region<'_>) -> bool { - match self.display_lifetimes { - DisplayLifetime::Always => true, - DisplayLifetime::OnlyStatic => { - matches!(lifetime.kind(), rustc_type_ir::RegionKind::ReStatic) + matches!(lifetime.kind(), RegionKind::ReEarlyParam(_)) } - DisplayLifetime::OnlyNamed => { - matches!( - lifetime.kind(), - rustc_type_ir::RegionKind::RePlaceholder(_) - | rustc_type_ir::RegionKind::ReEarlyParam(_) - ) + DisplayLifetime::OnlyNamedOrStatic => { + matches!(lifetime.kind(), RegionKind::ReStatic | RegionKind::ReEarlyParam(_)) } - DisplayLifetime::OnlyNamedOrStatic => matches!( - lifetime.kind(), - rustc_type_ir::RegionKind::ReStatic - | rustc_type_ir::RegionKind::RePlaceholder(_) - | rustc_type_ir::RegionKind::ReEarlyParam(_) - ), DisplayLifetime::Never => false, } } } -pub trait HirDisplay { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError>; +pub trait HirDisplay<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError>; /// Returns a `Display`able type that is human-readable. fn into_displayable<'a>( &'a self, - db: &'a dyn HirDatabase, + db: &'db dyn HirDatabase, max_size: Option, limited_size: Option, omit_verbose_types: bool, @@ -234,7 +204,7 @@ pub trait HirDisplay { display_kind: DisplayKind, closure_style: ClosureStyle, show_container_bounds: bool, - ) -> HirDisplayWrapper<'a, Self> + ) -> HirDisplayWrapper<'a, 'db, Self> where Self: Sized, { @@ -260,9 +230,9 @@ pub trait HirDisplay { /// Use this for showing types to the user (e.g. diagnostics) fn display<'a>( &'a self, - db: &'a dyn HirDatabase, + db: &'db dyn HirDatabase, display_target: DisplayTarget, - ) -> HirDisplayWrapper<'a, Self> + ) -> HirDisplayWrapper<'a, 'db, Self> where Self: Sized, { @@ -284,10 +254,10 @@ pub trait HirDisplay { /// Use this for showing types to the user where space is constrained (e.g. doc popups) fn display_truncated<'a>( &'a self, - db: &'a dyn HirDatabase, + db: &'db dyn HirDatabase, max_size: Option, display_target: DisplayTarget, - ) -> HirDisplayWrapper<'a, Self> + ) -> HirDisplayWrapper<'a, 'db, Self> where Self: Sized, { @@ -309,10 +279,10 @@ pub trait HirDisplay { /// Use this for showing definitions which may contain too many items, like `trait`, `struct`, `enum` fn display_limited<'a>( &'a self, - db: &'a dyn HirDatabase, + db: &'db dyn HirDatabase, limited_size: Option, display_target: DisplayTarget, - ) -> HirDisplayWrapper<'a, Self> + ) -> HirDisplayWrapper<'a, 'db, Self> where Self: Sized, { @@ -334,13 +304,16 @@ pub trait HirDisplay { /// Use this when generating code (e.g. assists) fn display_source_code<'a>( &'a self, - db: &'a dyn HirDatabase, + db: &'db dyn HirDatabase, module_id: ModuleId, allow_opaque: bool, ) -> Result { let mut result = String::new(); + let interner = + DbInterner::new_with(db, Some(module_id.krate()), module_id.containing_block()); match self.hir_fmt(&mut HirFormatter { db, + interner, fmt: &mut result, buf: String::with_capacity(20), curr_size: 0, @@ -364,9 +337,9 @@ pub trait HirDisplay { /// Returns a String representation of `self` for test purposes fn display_test<'a>( &'a self, - db: &'a dyn HirDatabase, + db: &'db dyn HirDatabase, display_target: DisplayTarget, - ) -> HirDisplayWrapper<'a, Self> + ) -> HirDisplayWrapper<'a, 'db, Self> where Self: Sized, { @@ -388,10 +361,10 @@ pub trait HirDisplay { /// the container for functions fn display_with_container_bounds<'a>( &'a self, - db: &'a dyn HirDatabase, + db: &'db dyn HirDatabase, show_container_bounds: bool, display_target: DisplayTarget, - ) -> HirDisplayWrapper<'a, Self> + ) -> HirDisplayWrapper<'a, 'db, Self> where Self: Sized, { @@ -410,7 +383,7 @@ pub trait HirDisplay { } } -impl HirFormatter<'_> { +impl<'db> HirFormatter<'_, 'db> { pub fn krate(&self) -> Crate { self.display_target.krate } @@ -419,7 +392,7 @@ impl HirFormatter<'_> { self.display_target.edition } - pub fn write_joined( + pub fn write_joined>( &mut self, iter: impl IntoIterator, sep: &str, @@ -536,8 +509,8 @@ impl From for HirDisplayError { } } -pub struct HirDisplayWrapper<'a, T> { - db: &'a dyn HirDatabase, +pub struct HirDisplayWrapper<'a, 'db, T> { + db: &'db dyn HirDatabase, t: &'a T, max_size: Option, limited_size: Option, @@ -564,10 +537,17 @@ pub enum ClosureStyle { Hide, } -impl HirDisplayWrapper<'_, T> { +impl<'db, T: HirDisplay<'db>> HirDisplayWrapper<'_, 'db, T> { pub fn write_to(&self, f: &mut F) -> Result<(), HirDisplayError> { + let krate = self.display_target.krate; + let block = match self.display_kind { + DisplayKind::SourceCode { target_module_id, .. } => target_module_id.containing_block(), + DisplayKind::Diagnostics | DisplayKind::Test => None, + }; + let interner = DbInterner::new_with(self.db, Some(krate), block); self.t.hir_fmt(&mut HirFormatter { db: self.db, + interner, fmt: f, buf: String::with_capacity(self.max_size.unwrap_or(20)), curr_size: 0, @@ -594,9 +574,9 @@ impl HirDisplayWrapper<'_, T> { } } -impl fmt::Display for HirDisplayWrapper<'_, T> +impl<'db, T> fmt::Display for HirDisplayWrapper<'_, 'db, T> where - T: HirDisplay, + T: HirDisplay<'db>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.write_to(f) { @@ -614,196 +594,146 @@ where const TYPE_HINT_TRUNCATION: &str = "…"; -impl HirDisplay for &T { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db, T: HirDisplay<'db>> HirDisplay<'db> for &T { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { HirDisplay::hir_fmt(*self, f) } } -impl HirDisplay for Interned { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db, T: HirDisplay<'db> + Internable> HirDisplay<'db> for Interned { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { HirDisplay::hir_fmt(self.as_ref(), f) } } -impl HirDisplay for ProjectionTy { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{TYPE_HINT_TRUNCATION}"); - } - let trait_ref = self.trait_ref(f.db); - let self_ty = trait_ref.self_type_parameter(Interner); - - // if we are projection on a type parameter, check if the projection target has bounds - // itself, if so, we render them directly as `impl Bound` instead of the less useful - // `::Assoc` - if !f.display_kind.is_source_code() - && let TyKind::Placeholder(idx) = self_ty.kind(Interner) - && !f.bounds_formatting_ctx.contains(self) - { - let db = f.db; - let id = from_placeholder_idx(db, *idx).0; - let generics = generics(db, id.parent); - - let substs = generics.placeholder_subst(db); - let bounds = db - .generic_predicates(id.parent) - .iter() - .map(|pred| pred.clone().substitute(Interner, &substs)) +fn write_projection<'db>( + f: &mut HirFormatter<'_, 'db>, + alias: &AliasTy<'db>, +) -> Result<(), HirDisplayError> { + if f.should_truncate() { + return write!(f, "{TYPE_HINT_TRUNCATION}"); + } + let trait_ref = alias.trait_ref(f.interner); + let self_ty = trait_ref.self_ty(); + + // if we are projection on a type parameter, check if the projection target has bounds + // itself, if so, we render them directly as `impl Bound` instead of the less useful + // `::Assoc` + if !f.display_kind.is_source_code() + && let TyKind::Param(param) = self_ty.kind() + && !f.bounds_formatting_ctx.contains(alias) + { + // FIXME: We shouldn't use `param.id`, it should be removed. We should know the + // `GenericDefId` from the formatted type (store it inside the `HirFormatter`). + let bounds = + f.db.generic_predicates_ns(param.id.parent()) + .instantiate_identity() + .into_iter() + .flatten() .filter(|wc| { - let ty = match wc.skip_binders() { - WhereClause::Implemented(tr) => tr.self_type_parameter(Interner), - WhereClause::TypeOutlives(t) => t.ty.clone(), - // We shouldn't be here if these exist - WhereClause::AliasEq(_) | WhereClause::LifetimeOutlives(_) => { - return false; - } + let ty = match wc.kind().skip_binder() { + ClauseKind::Trait(tr) => tr.self_ty(), + ClauseKind::TypeOutlives(t) => t.0, + _ => return false, }; - let TyKind::Alias(AliasTy::Projection(proj)) = ty.kind(Interner) else { + let TyKind::Alias(AliasTyKind::Projection, a) = ty.kind() else { return false; }; - proj == self + a == *alias }) .collect::>(); - if !bounds.is_empty() { - return f.format_bounds_with(self.clone(), |f| { - write_bounds_like_dyn_trait_with_prefix( - f, - "impl", - Either::Left( - &TyKind::Alias(AliasTy::Projection(self.clone())).intern(Interner), - ), - &bounds, - SizedByDefault::NotSized, - ) - }); - } - } - - write!(f, "<")?; - self_ty.hir_fmt(f)?; - write!(f, " as ")?; - trait_ref.hir_fmt(f)?; - write!( - f, - ">::{}", - f.db.type_alias_signature(from_assoc_type_id(self.associated_ty_id)) - .name - .display(f.db, f.edition()) - )?; - let proj_params = - &self.substitution.as_slice(Interner)[trait_ref.substitution.len(Interner)..]; - hir_fmt_generics(f, proj_params, None, None) - } -} - -impl HirDisplay for OpaqueTy { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{TYPE_HINT_TRUNCATION}"); + if !bounds.is_empty() { + return f.format_bounds_with(*alias, |f| { + write_bounds_like_dyn_trait_with_prefix( + f, + "impl", + Either::Left(Ty::new_alias(f.interner, AliasTyKind::Projection, *alias)), + &bounds, + SizedByDefault::NotSized, + ) + }); } - - self.substitution.at(Interner, 0).hir_fmt(f) } -} -impl HirDisplay for GenericArg { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - match self.interned() { - crate::GenericArgData::Ty(ty) => ty.hir_fmt(f), - crate::GenericArgData::Lifetime(lt) => lt.hir_fmt(f), - crate::GenericArgData::Const(c) => c.hir_fmt(f), - } - } + write!(f, "<")?; + self_ty.hir_fmt(f)?; + write!(f, " as ")?; + trait_ref.hir_fmt(f)?; + write!( + f, + ">::{}", + f.db.type_alias_signature(alias.def_id.expect_type_alias()).name.display(f.db, f.edition()) + )?; + let proj_params = &alias.args.as_slice()[trait_ref.args.len()..]; + hir_fmt_generics(f, proj_params, None, None) } -impl<'db> HirDisplay for crate::next_solver::GenericArg<'db> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - match self.kind() { - rustc_type_ir::GenericArgKind::Type(ty) => ty.hir_fmt(f), - rustc_type_ir::GenericArgKind::Lifetime(lt) => lt.hir_fmt(f), - rustc_type_ir::GenericArgKind::Const(c) => c.hir_fmt(f), +impl<'db> HirDisplay<'db> for GenericArg<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { + match self { + GenericArg::Ty(ty) => ty.hir_fmt(f), + GenericArg::Lifetime(lt) => lt.hir_fmt(f), + GenericArg::Const(c) => c.hir_fmt(f), } } } -impl HirDisplay for Const { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - let c = self.to_nextsolver(DbInterner::new_with(f.db, None, None)); - c.hir_fmt(f) - } -} - -impl<'db> HirDisplay for crate::next_solver::Const<'db> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Const<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self.kind() { - rustc_type_ir::ConstKind::Placeholder(_) => write!(f, ""), - rustc_type_ir::ConstKind::Bound(db, bound_const) => { + ConstKind::Placeholder(_) => write!(f, ""), + ConstKind::Bound(db, bound_const) => { write!(f, "?{}.{}", db.as_u32(), bound_const.var.as_u32()) } - rustc_type_ir::ConstKind::Infer(..) => write!(f, "#c#"), - rustc_type_ir::ConstKind::Param(param) => { + ConstKind::Infer(..) => write!(f, "#c#"), + ConstKind::Param(param) => { let generics = generics(f.db, param.id.parent()); let param_data = &generics[param.id.local_id()]; write!(f, "{}", param_data.name().unwrap().display(f.db, f.edition()))?; Ok(()) } - rustc_type_ir::ConstKind::Value(const_bytes) => render_const_scalar_ns( + ConstKind::Value(const_bytes) => render_const_scalar( f, &const_bytes.value.inner().memory, &const_bytes.value.inner().memory_map, const_bytes.ty, ), - rustc_type_ir::ConstKind::Unevaluated(unev) => { + ConstKind::Unevaluated(unev) => { let c = match unev.def { SolverDefId::ConstId(id) => GeneralConstId::ConstId(id), SolverDefId::StaticId(id) => GeneralConstId::StaticId(id), _ => unreachable!(), }; write!(f, "{}", c.name(f.db))?; - hir_fmt_generics_ns(f, unev.args.as_slice(), c.generic_def(f.db), None)?; + hir_fmt_generics(f, unev.args.as_slice(), c.generic_def(f.db), None)?; Ok(()) } - rustc_type_ir::ConstKind::Error(..) => f.write_char('_'), - rustc_type_ir::ConstKind::Expr(..) => write!(f, ""), + ConstKind::Error(..) => f.write_char('_'), + ConstKind::Expr(..) => write!(f, ""), } } } -fn render_const_scalar( - f: &mut HirFormatter<'_>, +fn render_const_scalar<'db>( + f: &mut HirFormatter<'_, 'db>, b: &[u8], - memory_map: &MemoryMap<'_>, - ty: &Ty, + memory_map: &MemoryMap<'db>, + ty: Ty<'db>, ) -> Result<(), HirDisplayError> { let trait_env = TraitEnvironment::empty(f.krate()); - let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block); - let ty = normalize(f.db, trait_env.clone(), ty.clone()); - let ty = ty.to_nextsolver(interner); - render_const_scalar_inner(f, b, memory_map, ty, trait_env) -} - -fn render_const_scalar_ns( - f: &mut HirFormatter<'_>, - b: &[u8], - memory_map: &MemoryMap<'_>, - ty: crate::next_solver::Ty<'_>, -) -> Result<(), HirDisplayError> { - let trait_env = TraitEnvironment::empty(f.krate()); - let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block); - let infcx = interner.infer_ctxt().build(rustc_type_ir::TypingMode::PostAnalysis); + let infcx = f.interner.infer_ctxt().build(TypingMode::PostAnalysis); let ty = infcx.at(&ObligationCause::new(), trait_env.env).deeply_normalize(ty).unwrap_or(ty); render_const_scalar_inner(f, b, memory_map, ty, trait_env) } fn render_const_scalar_inner<'db>( - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, b: &[u8], - memory_map: &MemoryMap<'_>, - ty: crate::next_solver::Ty<'db>, + memory_map: &MemoryMap<'db>, + ty: Ty<'db>, trait_env: Arc>, ) -> Result<(), HirDisplayError> { - use rustc_type_ir::TyKind; + use TyKind; match ty.kind() { TyKind::Bool => write!(f, "{}", b[0] != 0), TyKind::Char => { @@ -822,7 +752,7 @@ fn render_const_scalar_inner<'db>( write!(f, "{it}") } TyKind::Float(fl) => match fl { - rustc_type_ir::FloatTy::F16 => { + FloatTy::F16 => { // FIXME(#17451): Replace with builtins once they are stabilised. let it = f16::from_bits(u16::from_le_bytes(b.try_into().unwrap()).into()); let s = it.to_string(); @@ -833,15 +763,15 @@ fn render_const_scalar_inner<'db>( write!(f, "{s}") } } - rustc_type_ir::FloatTy::F32 => { + FloatTy::F32 => { let it = f32::from_le_bytes(b.try_into().unwrap()); write!(f, "{it:?}") } - rustc_type_ir::FloatTy::F64 => { + FloatTy::F64 => { let it = f64::from_le_bytes(b.try_into().unwrap()); write!(f, "{it:?}") } - rustc_type_ir::FloatTy::F128 => { + FloatTy::F128 => { // FIXME(#17451): Replace with builtins once they are stabilised. let it = f128::from_bits(u128::from_le_bytes(b.try_into().unwrap())); let s = it.to_string(); @@ -890,7 +820,7 @@ fn render_const_scalar_inner<'db>( f.write_str(", ")?; } let offset = size_one * i; - render_const_scalar_ns(f, &bytes[offset..offset + size_one], memory_map, ty)?; + render_const_scalar(f, &bytes[offset..offset + size_one], memory_map, ty)?; } f.write_str("]") } @@ -908,7 +838,7 @@ fn render_const_scalar_inner<'db>( return f.write_str(""); }; f.write_str("&")?; - render_const_scalar_ns(f, bytes, memory_map, t) + render_const_scalar(f, bytes, memory_map, t) } TyKind::Adt(adt, _) if b.len() == 2 * size_of::() => match adt.def_id().0 { hir_def::AdtId::StructId(s) => { @@ -938,7 +868,7 @@ fn render_const_scalar_inner<'db>( return f.write_str(""); }; f.write_str("&")?; - render_const_scalar_ns(f, bytes, memory_map, t) + render_const_scalar(f, bytes, memory_map, t) } }, TyKind::Tuple(tys) => { @@ -959,7 +889,7 @@ fn render_const_scalar_inner<'db>( continue; }; let size = layout.size.bytes_usize(); - render_const_scalar_ns(f, &b[offset..offset + size], memory_map, ty)?; + render_const_scalar(f, &b[offset..offset + size], memory_map, ty)?; } f.write_str(")") } @@ -972,7 +902,7 @@ fn render_const_scalar_inner<'db>( hir_def::AdtId::StructId(s) => { let data = f.db.struct_signature(s); write!(f, "{}", data.name.display(f.db, f.edition()))?; - let field_types = f.db.field_types(s.into()); + let field_types = f.db.field_types_ns(s.into()); render_variant_after_name( s.fields(f.db), f, @@ -1004,7 +934,7 @@ fn render_const_scalar_inner<'db>( .1 .display(f.db, f.edition()) )?; - let field_types = f.db.field_types(var_id.into()); + let field_types = f.db.field_types_ns(var_id.into()); render_variant_after_name( var_id.fields(f.db), f, @@ -1041,7 +971,7 @@ fn render_const_scalar_inner<'db>( f.write_str(", ")?; } let offset = size_one * i; - render_const_scalar_ns(f, &b[offset..offset + size_one], memory_map, ty)?; + render_const_scalar(f, &b[offset..offset + size_one], memory_map, ty)?; } f.write_str("]") } @@ -1067,28 +997,24 @@ fn render_const_scalar_inner<'db>( fn render_variant_after_name<'db>( data: &VariantFields, - f: &mut HirFormatter<'_>, - field_types: &ArenaMap>, + f: &mut HirFormatter<'_, 'db>, + field_types: &ArenaMap>>, trait_env: Arc>, layout: &Layout, - args: GenericArgs<'_>, + args: GenericArgs<'db>, b: &[u8], - memory_map: &MemoryMap<'_>, + memory_map: &MemoryMap<'db>, ) -> Result<(), HirDisplayError> { - let interner = DbInterner::new_with(f.db, Some(trait_env.krate), trait_env.block); match data.shape { FieldsShape::Record | FieldsShape::Tuple => { - let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| { + let render_field = |f: &mut HirFormatter<'_, 'db>, id: LocalFieldId| { let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize(); - let ty = field_types[id] - .clone() - .substitute(Interner, &convert_args_for_result(interner, args.as_slice())); - let Ok(layout) = f.db.layout_of_ty(ty.to_nextsolver(interner), trait_env.clone()) - else { + let ty = field_types[id].instantiate(f.interner, args); + let Ok(layout) = f.db.layout_of_ty(ty, trait_env.clone()) else { return f.write_str(""); }; let size = layout.size.bytes_usize(); - render_const_scalar(f, &b[offset..offset + size], memory_map, &ty) + render_const_scalar(f, &b[offset..offset + size], memory_map, ty) }; let mut it = data.fields().iter(); if matches!(data.shape, FieldsShape::Record) { @@ -1120,33 +1046,17 @@ fn render_variant_after_name<'db>( } } -impl HirDisplay for BoundVar { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "?{}.{}", self.debruijn.depth(), self.index) - } -} - -impl HirDisplay for Ty { +impl<'db> HirDisplay<'db> for Ty<'db> { fn hir_fmt( &self, - f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_>, + f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_, 'db>, ) -> Result<(), HirDisplayError> { - let ty = self.to_nextsolver(DbInterner::new_with(db, None, None)); - ty.hir_fmt(f) - } -} - -impl<'db> HirDisplay for crate::next_solver::Ty<'db> { - fn hir_fmt( - &self, - f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_>, - ) -> Result<(), HirDisplayError> { - let interner = DbInterner::new_with(db, None, None); + let interner = f.interner; if f.should_truncate() { return write!(f, "{TYPE_HINT_TRUNCATION}"); } - use rustc_type_ir::TyKind; + use TyKind; match self.kind() { TyKind::Never => write!(f, "!")?, TyKind::Str => write!(f, "str")?, @@ -1164,14 +1074,14 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { write!(f, "[")?; t.hir_fmt(f)?; write!(f, "; ")?; - convert_const_for_result(interner, c).hir_fmt(f)?; + c.hir_fmt(f)?; write!(f, "]")?; } kind @ (TyKind::RawPtr(t, m) | TyKind::Ref(_, t, m)) => { if let TyKind::Ref(l, _, _) = kind { f.write_char('&')?; if f.render_region(l) { - convert_region_for_result(interner, l).hir_fmt(f)?; + l.hir_fmt(f)?; f.write_char(' ')?; } match m { @@ -1190,32 +1100,18 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } // FIXME: all this just to decide whether to use parentheses... - let contains_impl_fn = |bounds: &[QuantifiedWhereClause]| { - bounds.iter().any(|bound| { - if let WhereClause::Implemented(trait_ref) = bound.skip_binders() { - let trait_ = trait_ref.hir_trait_id(); - fn_traits(db, trait_).any(|it| it == trait_) - } else { - false - } - }) - }; - let contains_impl_fn_ns = |bounds: &[BoundExistentialPredicate<'_>]| { - bounds.iter().any(|bound| match bound.skip_binder() { - rustc_type_ir::ExistentialPredicate::Trait(trait_ref) => { - let trait_ = trait_ref.def_id.0; - fn_traits(db, trait_).any(|it| it == trait_) - } - _ => false, - }) - }; let (preds_to_print, has_impl_fn_pred) = match t.kind() { TyKind::Dynamic(bounds, region) => { + let contains_impl_fn = + bounds.iter().any(|bound| match bound.skip_binder() { + ExistentialPredicate::Trait(trait_ref) => { + let trait_ = trait_ref.def_id.0; + fn_traits(db, trait_).any(|it| it == trait_) + } + _ => false, + }); let render_lifetime = f.render_region(region); - ( - bounds.len() + render_lifetime as usize, - contains_impl_fn_ns(bounds.as_slice()), - ) + (bounds.len() + render_lifetime as usize, contains_impl_fn) } TyKind::Alias(AliasTyKind::Opaque, ty) => { let opaque_ty_id = match ty.def_id { @@ -1225,28 +1121,25 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id { let datas = db - .return_type_impl_traits(func) + .return_type_impl_traits_ns(func) .expect("impl trait id without data"); - let data = - (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); - let bounds = data.substitute( - Interner, - &convert_args_for_result(interner, ty.args.as_slice()), - ); - let mut len = bounds.skip_binders().len(); + let data = (*datas).as_ref().map_bound(|rpit| { + &rpit.impl_traits[idx.to_nextsolver(interner)].predicates + }); + let bounds = + || data.iter_instantiated_copied(f.interner, ty.args.as_slice()); + let mut len = bounds().count(); // Don't count Sized but count when it absent // (i.e. when explicit ?Sized bound is set). let default_sized = SizedByDefault::Sized { anchor: func.krate(db) }; - let sized_bounds = bounds - .skip_binders() - .iter() + let sized_bounds = bounds() .filter(|b| { matches!( - b.skip_binders(), - WhereClause::Implemented(trait_ref) + b.kind().skip_binder(), + ClauseKind::Trait(trait_ref) if default_sized.is_sized_trait( - trait_ref.hir_trait_id(), + trait_ref.def_id().0, db, ), ) @@ -1259,7 +1152,15 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } } - (len, contains_impl_fn(bounds.skip_binders())) + let contains_impl_fn = bounds().any(|bound| { + if let ClauseKind::Trait(trait_ref) = bound.kind().skip_binder() { + let trait_ = trait_ref.def_id().0; + fn_traits(db, trait_).any(|it| it == trait_) + } else { + false + } + }); + (len, contains_impl_fn) } else { (0, false) } @@ -1291,31 +1192,28 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } } TyKind::FnPtr(sig, header) => { - let sig = CallableSig::from_fn_sig_and_header(interner, sig, header); + let sig = sig.with(header); sig.hir_fmt(f)?; } TyKind::FnDef(def, args) => { let def = def.0; - let sig = db - .callable_item_signature(def) - .instantiate(interner, args) - .skip_binder() - .to_chalk(interner); + let sig = db.callable_item_signature(def).instantiate(interner, args); if f.display_kind.is_source_code() { // `FnDef` is anonymous and there's no surface syntax for it. Show it as a // function pointer type. return sig.hir_fmt(f); } - if let Safety::Unsafe = sig.safety { + if let Safety::Unsafe = sig.safety() { write!(f, "unsafe ")?; } - if !matches!(sig.abi, FnAbi::Rust | FnAbi::RustCall) { + if !matches!(sig.abi(), FnAbi::Rust | FnAbi::RustCall) { f.write_str("extern \"")?; - f.write_str(sig.abi.as_str())?; + f.write_str(sig.abi().as_str())?; f.write_str("\" ")?; } + let sig = sig.skip_binder(); write!(f, "fn ")?; f.start_location_link(def.into()); match def { @@ -1338,13 +1236,12 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { }; f.end_location_link(); - let parameters = convert_args_for_result(interner, args.as_slice()); - if parameters.len(Interner) > 0 { + if args.len() > 0 { let generic_def_id = GenericDefId::from_callable(db, def); let generics = generics(db, generic_def_id); let (parent_len, self_param, type_, const_, impl_, lifetime) = generics.provenance_split(); - let parameters = parameters.as_slice(Interner); + let parameters = args.as_slice(); debug_assert_eq!( parameters.len(), parent_len + self_param as usize + type_ + const_ + impl_ + lifetime @@ -1389,9 +1286,9 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } } write!(f, "(")?; - f.write_joined(sig.params(), ", ")?; + f.write_joined(sig.inputs(), ", ")?; write!(f, ")")?; - let ret = sig.ret(); + let ret = sig.output(); if !ret.is_unit() { write!(f, " -> ")?; ret.hir_fmt(f)?; @@ -1434,27 +1331,9 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } f.end_location_link(); - hir_fmt_generics( - f, - convert_args_for_result(interner, parameters.as_slice()).as_slice(Interner), - Some(def.def_id().0.into()), - None, - )?; - } - TyKind::Alias(AliasTyKind::Projection, alias_ty) => { - let type_alias = match alias_ty.def_id { - SolverDefId::TypeAliasId(id) => id, - _ => unreachable!(), - }; - let parameters = convert_args_for_result(interner, alias_ty.args.as_slice()); - - let projection_ty = ProjectionTy { - associated_ty_id: to_assoc_type_id(type_alias), - substitution: parameters.clone(), - }; - - projection_ty.hir_fmt(f)?; + hir_fmt_generics(f, parameters.as_slice(), Some(def.def_id().0.into()), None)?; } + TyKind::Alias(AliasTyKind::Projection, alias_ty) => write_projection(f, &alias_ty)?, TyKind::Foreign(alias) => { let type_alias = db.type_alias_signature(alias.0); f.start_location_link(alias.0.into()); @@ -1466,7 +1345,6 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { SolverDefId::InternedOpaqueTyId(id) => id, _ => unreachable!(), }; - let parameters = convert_args_for_result(interner, alias_ty.args.as_slice()); if !f.display_kind.allows_opaque() { return Err(HirDisplayError::DisplaySourceCodeError( DisplaySourceCodeError::OpaqueType, @@ -1475,32 +1353,41 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); match impl_trait_id { ImplTraitId::ReturnTypeImplTrait(func, idx) => { - let datas = - db.return_type_impl_traits(func).expect("impl trait id without data"); - let data = - (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); - let bounds = data.substitute(Interner, ¶meters); + let datas = db + .return_type_impl_traits_ns(func) + .expect("impl trait id without data"); + let data = (*datas).as_ref().map_bound(|rpit| { + &rpit.impl_traits[idx.to_nextsolver(interner)].predicates + }); + let bounds = data + .iter_instantiated_copied(interner, alias_ty.args.as_slice()) + .collect::>(); let krate = func.krate(db); write_bounds_like_dyn_trait_with_prefix( f, "impl", - Either::Left(&convert_ty_for_result(interner, *self)), - bounds.skip_binders(), + Either::Left(*self), + &bounds, SizedByDefault::Sized { anchor: krate }, )?; // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution } ImplTraitId::TypeAliasImplTrait(alias, idx) => { - let datas = - db.type_alias_impl_traits(alias).expect("impl trait id without data"); - let data = (*datas).as_ref().map(|it| it.impl_traits[idx].bounds.clone()); - let bounds = data.substitute(Interner, ¶meters); + let datas = db + .type_alias_impl_traits_ns(alias) + .expect("impl trait id without data"); + let data = (*datas).as_ref().map_bound(|rpit| { + &rpit.impl_traits[idx.to_nextsolver(interner)].predicates + }); + let bounds = data + .iter_instantiated_copied(interner, alias_ty.args.as_slice()) + .collect::>(); let krate = alias.krate(db); write_bounds_like_dyn_trait_with_prefix( f, "impl", - Either::Left(&convert_ty_for_result(interner, *self)), - bounds.skip_binders(), + Either::Left(*self), + &bounds, SizedByDefault::Sized { anchor: krate }, )?; } @@ -1528,14 +1415,13 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { f.end_location_link(); } write!(f, " = ")?; - parameters.at(Interner, 0).hir_fmt(f)?; + alias_ty.args.type_at(0).hir_fmt(f)?; write!(f, ">")?; } } } TyKind::Closure(id, substs) => { let id = id.0; - let substs = convert_args_for_result(interner, substs.as_slice()); if f.display_kind.is_source_code() { if !f.display_kind.allows_opaque() { return Err(HirDisplayError::DisplaySourceCodeError( @@ -1556,12 +1442,16 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } ClosureStyle::ClosureWithSubst => { write!(f, "{{closure#{:?}}}", salsa::plumbing::AsId::as_id(&id).index())?; - return hir_fmt_generics(f, substs.as_slice(Interner), None, None); + return hir_fmt_generics(f, substs.as_slice(), None, None); } _ => (), } - let sig = ClosureSubst(&substs).sig_ty(db).callable_sig(db); + let sig = substs + .split_closure_args_untupled() + .closure_sig_as_fn_ptr_ty + .callable_sig(interner); if let Some(sig) = sig { + let sig = sig.skip_binder(); let InternedClosure(def, _) = db.lookup_intern_closure(id); let infer = db.infer(def); let (_, kind) = infer.closure_info(id); @@ -1570,22 +1460,22 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { ClosureStyle::RANotation => write!(f, "|")?, _ => unreachable!(), } - if sig.params().is_empty() { + if sig.inputs().is_empty() { } else if f.should_truncate() { write!(f, "{TYPE_HINT_TRUNCATION}")?; } else { - f.write_joined(sig.params(), ", ")?; + f.write_joined(sig.inputs(), ", ")?; }; match f.closure_style { ClosureStyle::ImplFn => write!(f, ")")?, ClosureStyle::RANotation => write!(f, "|")?, _ => unreachable!(), } - if f.closure_style == ClosureStyle::RANotation || !sig.ret().is_unit() { + if f.closure_style == ClosureStyle::RANotation || !sig.output().is_unit() { write!(f, " -> ")?; // FIXME: We display `AsyncFn` as `-> impl Future`, but this is hard to fix because // we don't have a trait environment here, required to normalize `::Output`. - sig.ret().hir_fmt(f)?; + sig.output().hir_fmt(f)?; } } else { write!(f, "{{closure}}")?; @@ -1593,6 +1483,8 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } TyKind::Placeholder(_) => write!(f, "{{placeholder}}")?, TyKind::Param(param) => { + // FIXME: We should not access `param.id`, it should be removed, and we should know the + // parent from the formatted type. let generics = generics(db, param.id.parent()); let param_data = &generics[param.id.local_id()]; match param_data { @@ -1608,35 +1500,23 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { )? } TypeParamProvenance::ArgumentImplTrait => { - let substs = generics.placeholder_subst(db); let bounds = db - .generic_predicates(param.id.parent()) - .iter() - .map(|pred| pred.clone().substitute(Interner, &substs)) - .filter(|wc| match wc.skip_binders() { - WhereClause::Implemented(tr) => { - tr.self_type_parameter(Interner) - == convert_ty_for_result(interner, *self) - } - WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(proj), - ty: _, - }) => { - proj.self_type_parameter(db) - == convert_ty_for_result(interner, *self) - } - WhereClause::AliasEq(_) => false, - WhereClause::TypeOutlives(to) => { - to.ty == convert_ty_for_result(interner, *self) - } - WhereClause::LifetimeOutlives(_) => false, + .generic_predicates_ns(param.id.parent()) + .instantiate_identity() + .into_iter() + .flatten() + .filter(|wc| match wc.kind().skip_binder() { + ClauseKind::Trait(tr) => tr.self_ty() == *self, + ClauseKind::Projection(proj) => proj.self_ty() == *self, + ClauseKind::TypeOutlives(to) => to.0 == *self, + _ => false, }) .collect::>(); let krate = param.id.parent().module(db).krate(); write_bounds_like_dyn_trait_with_prefix( f, "impl", - Either::Left(&convert_ty_for_result(interner, *self)), + Either::Left(*self), &bounds, SizedByDefault::Sized { anchor: krate }, )?; @@ -1647,42 +1527,34 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } } } - TyKind::Bound(debruijn_index, ty) => { - let idx = chalk_ir::BoundVar { - debruijn: chalk_ir::DebruijnIndex::new(debruijn_index.as_u32()), - index: ty.var.as_usize(), - }; - idx.hir_fmt(f)? - } - TyKind::Dynamic(..) => { - let ty = convert_ty_for_result(interner, *self); - let chalk_ir::TyKind::Dyn(dyn_ty) = ty.kind(Interner) else { unreachable!() }; - // Reorder bounds to satisfy `write_bounds_like_dyn_trait()`'s expectation. - // FIXME: `Iterator::partition_in_place()` or `Vec::extract_if()` may make it - // more efficient when either of them hits stable. - let mut bounds: SmallVec<[_; 4]> = - dyn_ty.bounds.skip_binders().iter(Interner).cloned().collect(); - let (auto_traits, others): (SmallVec<[_; 4]>, _) = - bounds.drain(1..).partition(|b| b.skip_binders().trait_id().is_some()); - bounds.extend(others); - bounds.extend(auto_traits); - - if f.render_lifetime(&dyn_ty.lifetime) { - // we skip the binders in `write_bounds_like_dyn_trait_with_prefix` - bounds.push(Binders::empty( - Interner, - chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives { - ty: ty.clone(), - lifetime: dyn_ty.lifetime.clone(), - }), - )); + TyKind::Bound(debruijn, ty) => { + write!(f, "?{}.{}", debruijn.as_usize(), ty.var.as_usize())? + } + TyKind::Dynamic(bounds, region) => { + // We want to put auto traits after principal traits, regardless of their written order. + let mut bounds_to_display = SmallVec::<[_; 4]>::new(); + let mut auto_trait_bounds = SmallVec::<[_; 4]>::new(); + for bound in bounds.iter() { + let clause = bound.with_self_ty(interner, *self); + match bound.skip_binder() { + ExistentialPredicate::Trait(_) | ExistentialPredicate::Projection(_) => { + bounds_to_display.push(clause); + } + ExistentialPredicate::AutoTrait(_) => auto_trait_bounds.push(clause), + } + } + bounds_to_display.append(&mut auto_trait_bounds); + + if f.render_region(region) { + bounds_to_display + .push(rustc_type_ir::OutlivesPredicate(*self, region).upcast(interner)); } write_bounds_like_dyn_trait_with_prefix( f, "dyn", - Either::Left(&ty), - &bounds, + Either::Left(*self), + &bounds_to_display, SizedByDefault::NotSized, )?; } @@ -1722,11 +1594,11 @@ impl<'db> HirDisplay for crate::next_solver::Ty<'db> { } } -fn hir_fmt_generics( - f: &mut HirFormatter<'_>, - parameters: &[GenericArg], +fn hir_fmt_generics<'db>( + f: &mut HirFormatter<'_, 'db>, + parameters: &[GenericArg<'db>], generic_def: Option, - self_: Option<&Ty>, + self_: Option>, ) -> Result<(), HirDisplayError> { if parameters.is_empty() { return Ok(()); @@ -1743,70 +1615,23 @@ fn hir_fmt_generics( Ok(()) } -fn hir_fmt_generics_ns<'db>( - f: &mut HirFormatter<'_>, - parameters: &[crate::next_solver::GenericArg<'db>], +fn generic_args_sans_defaults<'ga, 'db>( + f: &mut HirFormatter<'_, 'db>, generic_def: Option, - self_: Option>, -) -> Result<(), HirDisplayError> { - if parameters.is_empty() { - return Ok(()); - } - - let parameters_to_write = generic_args_sans_defaults_ns(f, generic_def, parameters); - - if !parameters_to_write.is_empty() { - write!(f, "<")?; - hir_fmt_generic_arguments_ns(f, parameters_to_write, self_)?; - write!(f, ">")?; - } - - Ok(()) -} - -fn generic_args_sans_defaults<'ga>( - f: &mut HirFormatter<'_>, - generic_def: Option, - parameters: &'ga [GenericArg], -) -> &'ga [GenericArg] { + parameters: &'ga [GenericArg<'db>], +) -> &'ga [GenericArg<'db>] { if f.display_kind.is_source_code() || f.omit_verbose_types() { - match generic_def - .map(|generic_def_id| f.db.generic_defaults(generic_def_id)) - .filter(|it| !it.is_empty()) - { + match generic_def.map(|generic_def_id| f.db.generic_defaults_ns(generic_def_id)) { None => parameters, Some(default_parameters) => { - let should_show = |arg: &GenericArg, i: usize| { - let is_err = |arg: &GenericArg| match arg.data(Interner) { - chalk_ir::GenericArgData::Lifetime(it) => { - *it.data(Interner) == LifetimeData::Error - } - chalk_ir::GenericArgData::Ty(it) => *it.kind(Interner) == TyKind::Error, - chalk_ir::GenericArgData::Const(it) => matches!( - it.data(Interner).value, - ConstValue::Concrete(ConcreteConst { - interned: ConstScalar::Unknown, - .. - }) - ), - }; - // if the arg is error like, render it to inform the user - if is_err(arg) { - return true; - } - // otherwise, if the arg is equal to the param default, hide it (unless the - // default is an error which can happen for the trait Self type) - match default_parameters.get(i) { - None => true, - Some(default_parameter) => { - // !is_err(default_parameter.skip_binders()) - // && - arg != &default_parameter.clone().substitute(Interner, ¶meters[..i]) - } + let should_show = |arg: GenericArg<'db>, i: usize| match default_parameters.get(i) { + None => true, + Some(default_parameter) => { + arg != default_parameter.instantiate(f.interner, ¶meters[..i]) } }; let mut default_from = 0; - for (i, parameter) in parameters.iter().enumerate() { + for (i, ¶meter) in parameters.iter().enumerate() { if should_show(parameter, i) { default_from = i + 1; } @@ -1820,93 +1645,33 @@ fn generic_args_sans_defaults<'ga>( } fn hir_fmt_generic_args<'db>( - f: &mut HirFormatter<'_>, - parameters: &[crate::next_solver::GenericArg<'db>], + f: &mut HirFormatter<'_, 'db>, + parameters: &[GenericArg<'db>], generic_def: Option, - self_: Option>, + self_: Option>, ) -> Result<(), HirDisplayError> { if parameters.is_empty() { return Ok(()); } - let parameters_to_write = generic_args_sans_defaults_ns(f, generic_def, parameters); + let parameters_to_write = generic_args_sans_defaults(f, generic_def, parameters); if !parameters_to_write.is_empty() { write!(f, "<")?; - hir_fmt_generic_arguments_ns(f, parameters_to_write, self_)?; + hir_fmt_generic_arguments(f, parameters_to_write, self_)?; write!(f, ">")?; } Ok(()) } -fn generic_args_sans_defaults_ns<'ga, 'db>( - f: &mut HirFormatter<'_>, - generic_def: Option, - parameters: &'ga [crate::next_solver::GenericArg<'db>], -) -> &'ga [crate::next_solver::GenericArg<'db>] { - let interner = DbInterner::new_with(f.db, Some(f.krate()), None); - if f.display_kind.is_source_code() || f.omit_verbose_types() { - match generic_def - .map(|generic_def_id| f.db.generic_defaults(generic_def_id)) - .filter(|it| !it.is_empty()) - { - None => parameters, - Some(default_parameters) => { - let should_show = |arg: &crate::next_solver::GenericArg<'db>, i: usize| { - let is_err = |arg: &crate::next_solver::GenericArg<'db>| match arg.kind() { - rustc_type_ir::GenericArgKind::Lifetime(it) => { - matches!(it.kind(), RegionKind::ReError(..)) - } - rustc_type_ir::GenericArgKind::Type(it) => { - matches!(it.kind(), rustc_type_ir::TyKind::Error(..)) - } - rustc_type_ir::GenericArgKind::Const(it) => { - matches!(it.kind(), rustc_type_ir::ConstKind::Error(..),) - } - }; - // if the arg is error like, render it to inform the user - if is_err(arg) { - return true; - } - // otherwise, if the arg is equal to the param default, hide it (unless the - // default is an error which can happen for the trait Self type) - match default_parameters.get(i) { - None => true, - Some(default_parameter) => { - // !is_err(default_parameter.skip_binders()) - // && - arg != &default_parameter - .clone() - .substitute( - Interner, - &convert_args_for_result(interner, ¶meters[..i]), - ) - .to_nextsolver(interner) - } - } - }; - let mut default_from = 0; - for (i, parameter) in parameters.iter().enumerate() { - if should_show(parameter, i) { - default_from = i + 1; - } - } - ¶meters[0..default_from] - } - } - } else { - parameters - } -} - -fn hir_fmt_generic_arguments( - f: &mut HirFormatter<'_>, - parameters: &[GenericArg], - self_: Option<&Ty>, +fn hir_fmt_generic_arguments<'db>( + f: &mut HirFormatter<'_, 'db>, + parameters: &[GenericArg<'db>], + self_: Option>, ) -> Result<(), HirDisplayError> { let mut first = true; - let lifetime_offset = parameters.iter().position(|arg| arg.lifetime(Interner).is_some()); + let lifetime_offset = parameters.iter().position(|arg| arg.region().is_some()); let (ty_or_const, lifetimes) = match lifetime_offset { Some(offset) => parameters.split_at(offset), @@ -1917,40 +1682,35 @@ fn hir_fmt_generic_arguments( write!(f, ", ")?; } match self_ { - self_ @ Some(_) if generic_arg.ty(Interner) == self_ => write!(f, "Self")?, + self_ @ Some(_) if generic_arg.ty() == self_ => write!(f, "Self")?, _ => generic_arg.hir_fmt(f)?, } } Ok(()) } -fn hir_fmt_generic_arguments_ns<'db>( - f: &mut HirFormatter<'_>, - parameters: &[crate::next_solver::GenericArg<'db>], - self_: Option>, +fn hir_fmt_tys<'db>( + f: &mut HirFormatter<'_, 'db>, + tys: &[Ty<'db>], + self_: Option>, ) -> Result<(), HirDisplayError> { let mut first = true; - let lifetime_offset = parameters.iter().position(|arg| arg.region().is_some()); - let (ty_or_const, lifetimes) = match lifetime_offset { - Some(offset) => parameters.split_at(offset), - None => (parameters, &[][..]), - }; - for generic_arg in lifetimes.iter().chain(ty_or_const) { + for ty in tys { if !mem::take(&mut first) { write!(f, ", ")?; } match self_ { - self_ @ Some(_) if generic_arg.ty() == self_ => write!(f, "Self")?, - _ => generic_arg.hir_fmt(f)?, + Some(self_) if *ty == self_ => write!(f, "Self")?, + _ => ty.hir_fmt(f)?, } } Ok(()) } -impl HirDisplay for CallableSig { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - let CallableSig { params_and_return: _, is_varargs, safety, abi: _ } = *self; +impl<'db> HirDisplay<'db> for PolyFnSig<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { + let FnSig { inputs_and_output, c_variadic, safety, abi: _ } = self.skip_binder(); if let Safety::Unsafe = safety { write!(f, "unsafe ")?; } @@ -1961,16 +1721,16 @@ impl HirDisplay for CallableSig { // f.write_str("\" ")?; // } write!(f, "fn(")?; - f.write_joined(self.params(), ", ")?; - if is_varargs { - if self.params().is_empty() { + f.write_joined(inputs_and_output.inputs(), ", ")?; + if c_variadic { + if inputs_and_output.inputs().is_empty() { write!(f, "...")?; } else { write!(f, ", ...")?; } } write!(f, ")")?; - let ret = self.ret(); + let ret = inputs_and_output.output(); if !ret.is_unit() { write!(f, " -> ")?; ret.hir_fmt(f)?; @@ -1979,6 +1739,15 @@ impl HirDisplay for CallableSig { } } +impl<'db> HirDisplay<'db> for Term<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { + match self { + Term::Ty(it) => it.hir_fmt(f), + Term::Const(it) => it.hir_fmt(f), + } + } +} + fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator + '_ { let krate = trait_.lookup(db).container.krate(); utils::fn_traits(db, krate) @@ -2002,11 +1771,11 @@ impl SizedByDefault { } } -pub fn write_bounds_like_dyn_trait_with_prefix( - f: &mut HirFormatter<'_>, +pub fn write_bounds_like_dyn_trait_with_prefix<'db>( + f: &mut HirFormatter<'_, 'db>, prefix: &str, - this: Either<&Ty, &Lifetime>, - predicates: &[QuantifiedWhereClause], + this: Either, Region<'db>>, + predicates: &[Clause<'db>], default_sized: SizedByDefault, ) -> Result<(), HirDisplayError> { write!(f, "{prefix}")?; @@ -2020,10 +1789,10 @@ pub fn write_bounds_like_dyn_trait_with_prefix( } } -fn write_bounds_like_dyn_trait( - f: &mut HirFormatter<'_>, - this: Either<&Ty, &Lifetime>, - predicates: &[QuantifiedWhereClause], +fn write_bounds_like_dyn_trait<'db>( + f: &mut HirFormatter<'_, 'db>, + this: Either, Region<'db>>, + predicates: &[Clause<'db>], default_sized: SizedByDefault, ) -> Result<(), HirDisplayError> { // Note: This code is written to produce nice results (i.e. @@ -2036,10 +1805,10 @@ fn write_bounds_like_dyn_trait( let mut angle_open = false; let mut is_fn_trait = false; let mut is_sized = false; - for p in predicates.iter() { - match p.skip_binders() { - WhereClause::Implemented(trait_ref) => { - let trait_ = trait_ref.hir_trait_id(); + for p in predicates { + match p.kind().skip_binder() { + ClauseKind::Trait(trait_ref) => { + let trait_ = trait_ref.def_id().0; if default_sized.is_sized_trait(trait_, f.db) { is_sized = true; if matches!(default_sized, SizedByDefault::Sized { .. }) { @@ -2064,31 +1833,30 @@ fn write_bounds_like_dyn_trait( write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; f.end_location_link(); if is_fn_trait { - if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) - && let Some(args) = - params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple()) + if let [_self, params @ ..] = trait_ref.trait_ref.args.as_slice() + && let Some(args) = params.first().and_then(|it| it.ty()?.as_tuple()) { write!(f, "(")?; - hir_fmt_generic_arguments(f, args.as_slice(Interner), self_.ty(Interner))?; + hir_fmt_tys(f, args.as_slice(), Some(trait_ref.trait_ref.self_ty()))?; write!(f, ")")?; } } else { let params = generic_args_sans_defaults( f, Some(trait_.into()), - trait_ref.substitution.as_slice(Interner), + trait_ref.trait_ref.args.as_slice(), ); - if let [self_, params @ ..] = params + if let [_self, params @ ..] = params && !params.is_empty() { write!(f, "<")?; - hir_fmt_generic_arguments(f, params, self_.ty(Interner))?; + hir_fmt_generic_arguments(f, params, Some(trait_ref.trait_ref.self_ty()))?; // there might be assoc type bindings, so we leave the angle brackets open angle_open = true; } } } - WhereClause::TypeOutlives(to) if Either::Left(&to.ty) == this => { + ClauseKind::TypeOutlives(to) if Either::Left(to.0) == this => { if !is_fn_trait && angle_open { write!(f, ">")?; angle_open = false; @@ -2096,10 +1864,9 @@ fn write_bounds_like_dyn_trait( if !first { write!(f, " + ")?; } - to.lifetime.hir_fmt(f)?; + to.1.hir_fmt(f)?; } - WhereClause::TypeOutlives(_) => {} - WhereClause::LifetimeOutlives(lo) if Either::Right(&lo.a) == this => { + ClauseKind::RegionOutlives(lo) if Either::Right(lo.0) == this => { if !is_fn_trait && angle_open { write!(f, ">")?; angle_open = false; @@ -2107,17 +1874,16 @@ fn write_bounds_like_dyn_trait( if !first { write!(f, " + ")?; } - lo.b.hir_fmt(f)?; + lo.1.hir_fmt(f)?; } - WhereClause::LifetimeOutlives(_) => {} - WhereClause::AliasEq(alias_eq) if is_fn_trait => { + ClauseKind::Projection(projection) if is_fn_trait => { is_fn_trait = false; - if !alias_eq.ty.is_unit() { + if !projection.term.as_type().is_some_and(|it| it.is_unit()) { write!(f, " -> ")?; - alias_eq.ty.hir_fmt(f)?; + projection.term.hir_fmt(f)?; } } - WhereClause::AliasEq(AliasEq { ty, alias }) => { + ClauseKind::Projection(projection) => { // in types in actual Rust, these will always come // after the corresponding Implemented predicate if angle_open { @@ -2126,28 +1892,22 @@ fn write_bounds_like_dyn_trait( write!(f, "<")?; angle_open = true; } - if let AliasTy::Projection(proj) = alias { - let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id); - let type_alias = f.db.type_alias_signature(assoc_ty_id); - f.start_location_link(assoc_ty_id.into()); - write!(f, "{}", type_alias.name.display(f.db, f.edition()))?; - f.end_location_link(); - - let proj_arg_count = generics(f.db, assoc_ty_id.into()).len_self(); - let parent_len = proj.substitution.len(Interner) - proj_arg_count; - if proj_arg_count > 0 { - write!(f, "<")?; - hir_fmt_generic_arguments( - f, - &proj.substitution.as_slice(Interner)[parent_len..], - None, - )?; - write!(f, ">")?; - } - write!(f, " = ")?; + let assoc_ty_id = projection.def_id().expect_type_alias(); + let type_alias = f.db.type_alias_signature(assoc_ty_id); + f.start_location_link(assoc_ty_id.into()); + write!(f, "{}", type_alias.name.display(f.db, f.edition()))?; + f.end_location_link(); + + let own_args = projection.projection_term.own_args(f.interner); + if !own_args.is_empty() { + write!(f, "<")?; + hir_fmt_generic_arguments(f, own_args.as_slice(), None)?; + write!(f, ">")?; } - ty.hir_fmt(f)?; + write!(f, " = ")?; + projection.term.hir_fmt(f)?; } + _ => {} } first = false; } @@ -2177,154 +1937,49 @@ fn write_bounds_like_dyn_trait( Ok(()) } -impl HirDisplay for TraitRef { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - let trait_ = self.hir_trait_id(); - f.start_location_link(trait_.into()); - write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; - f.end_location_link(); - let substs = self.substitution.as_slice(Interner); - hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner)) - } -} - -impl<'db> HirDisplay for crate::next_solver::TraitRef<'db> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for TraitRef<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { let trait_ = self.def_id.0; f.start_location_link(trait_.into()); write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?; f.end_location_link(); let substs = self.args.as_slice(); - hir_fmt_generic_args(f, &substs[1..], None, substs[0].ty()) - } -} - -impl HirDisplay for WhereClause { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - if f.should_truncate() { - return write!(f, "{TYPE_HINT_TRUNCATION}"); - } - - match self { - WhereClause::Implemented(trait_ref) => { - trait_ref.self_type_parameter(Interner).hir_fmt(f)?; - write!(f, ": ")?; - trait_ref.hir_fmt(f)?; - } - WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => { - write!(f, "<")?; - let trait_ref = &projection_ty.trait_ref(f.db); - trait_ref.self_type_parameter(Interner).hir_fmt(f)?; - write!(f, " as ")?; - trait_ref.hir_fmt(f)?; - write!(f, ">::",)?; - let type_alias = from_assoc_type_id(projection_ty.associated_ty_id); - f.start_location_link(type_alias.into()); - write!( - f, - "{}", - f.db.type_alias_signature(type_alias).name.display(f.db, f.edition()), - )?; - f.end_location_link(); - write!(f, " = ")?; - ty.hir_fmt(f)?; - } - WhereClause::AliasEq(_) => write!(f, "{{error}}")?, - - // FIXME implement these - WhereClause::TypeOutlives(..) => {} - WhereClause::LifetimeOutlives(..) => {} - } - Ok(()) - } -} - -impl HirDisplay for LifetimeOutlives { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - self.a.hir_fmt(f)?; - write!(f, ": ")?; - self.b.hir_fmt(f) - } -} - -impl HirDisplay for Lifetime { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - self.interned().hir_fmt(f) + hir_fmt_generic_args(f, &substs[1..], None, Some(self.self_ty())) } } -impl HirDisplay for LifetimeData { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - match self { - LifetimeData::Placeholder(idx) => { - let id = lt_from_placeholder_idx(f.db, *idx).0; - let generics = generics(f.db, id.parent); - let param_data = &generics[id.local_id]; - write!(f, "{}", param_data.name.display(f.db, f.edition()))?; - Ok(()) - } - LifetimeData::BoundVar(idx) => idx.hir_fmt(f), - LifetimeData::InferenceVar(_) => write!(f, "_"), - LifetimeData::Static => write!(f, "'static"), - LifetimeData::Error => { - if cfg!(test) { - write!(f, "'?") - } else { - write!(f, "'_") - } - } - LifetimeData::Erased => write!(f, "'"), - LifetimeData::Phantom(void, _) => match *void {}, - } - } -} - -impl<'db> HirDisplay for crate::next_solver::Region<'db> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Region<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self.kind() { - rustc_type_ir::RegionKind::ReEarlyParam(param) => { + RegionKind::ReEarlyParam(param) => { let generics = generics(f.db, param.id.parent); let param_data = &generics[param.id.local_id]; write!(f, "{}", param_data.name.display(f.db, f.edition()))?; Ok(()) } - rustc_type_ir::RegionKind::ReBound(db, idx) => { + RegionKind::ReBound(db, idx) => { write!(f, "?{}.{}", db.as_u32(), idx.var.as_u32()) } - rustc_type_ir::RegionKind::ReVar(_) => write!(f, "_"), - rustc_type_ir::RegionKind::ReStatic => write!(f, "'static"), - rustc_type_ir::RegionKind::ReError(..) => { + RegionKind::ReVar(_) => write!(f, "_"), + RegionKind::ReStatic => write!(f, "'static"), + RegionKind::ReError(..) => { if cfg!(test) { write!(f, "'?") } else { write!(f, "'_") } } - rustc_type_ir::RegionKind::ReErased => write!(f, "'"), - rustc_type_ir::RegionKind::RePlaceholder(_) => write!(f, ""), - rustc_type_ir::RegionKind::ReLateParam(_) => write!(f, ""), + RegionKind::ReErased => write!(f, "'"), + RegionKind::RePlaceholder(_) => write!(f, ""), + RegionKind::ReLateParam(_) => write!(f, ""), } } } -impl HirDisplay for DomainGoal { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - match self { - DomainGoal::Holds(wc) => { - write!(f, "Holds(")?; - wc.hir_fmt(f)?; - write!(f, ")")?; - } - _ => write!(f, "_")?, - } - Ok(()) - } -} - -pub fn write_visibility( +pub fn write_visibility<'db>( module_id: ModuleId, vis: Visibility, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, ) -> Result<(), HirDisplayError> { match vis { Visibility::Public => write!(f, "pub "), @@ -2346,28 +2001,30 @@ pub fn write_visibility( } } -pub trait HirDisplayWithExpressionStore { +pub trait HirDisplayWithExpressionStore<'db> { fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, store: &ExpressionStore, ) -> Result<(), HirDisplayError>; } -impl HirDisplayWithExpressionStore for &'_ T { +impl<'db, T: ?Sized + HirDisplayWithExpressionStore<'db>> HirDisplayWithExpressionStore<'db> + for &'_ T +{ fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, store: &ExpressionStore, ) -> Result<(), HirDisplayError> { T::hir_fmt(&**self, f, store) } } -pub fn hir_display_with_store<'a, T: HirDisplayWithExpressionStore + 'a>( +pub fn hir_display_with_store<'a, 'db, T: HirDisplayWithExpressionStore<'db> + 'a>( value: T, store: &'a ExpressionStore, -) -> impl HirDisplay + 'a { +) -> impl HirDisplay<'db> + 'a { ExpressionStoreAdapter(value, store) } @@ -2379,15 +2036,15 @@ impl<'a, T> ExpressionStoreAdapter<'a, T> { } } -impl HirDisplay for ExpressionStoreAdapter<'_, T> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db, T: HirDisplayWithExpressionStore<'db>> HirDisplay<'db> for ExpressionStoreAdapter<'_, T> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { T::hir_fmt(&self.0, f, self.1) } } -impl HirDisplayWithExpressionStore for LifetimeRefId { +impl<'db> HirDisplayWithExpressionStore<'db> for LifetimeRefId { fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match &store[*self] { @@ -2407,10 +2064,10 @@ impl HirDisplayWithExpressionStore for LifetimeRefId { } } -impl HirDisplayWithExpressionStore for TypeRefId { +impl<'db> HirDisplayWithExpressionStore<'db> for TypeRefId { fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match &store[*self] { @@ -2536,10 +2193,10 @@ impl HirDisplayWithExpressionStore for TypeRefId { } } -impl HirDisplayWithExpressionStore for ConstRef { +impl<'db> HirDisplayWithExpressionStore<'db> for ConstRef { fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, _store: &ExpressionStore, ) -> Result<(), HirDisplayError> { // FIXME @@ -2549,10 +2206,10 @@ impl HirDisplayWithExpressionStore for ConstRef { } } -impl HirDisplayWithExpressionStore for TypeBound { +impl<'db> HirDisplayWithExpressionStore<'db> for TypeBound { fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match self { @@ -2593,10 +2250,10 @@ impl HirDisplayWithExpressionStore for TypeBound { } } -impl HirDisplayWithExpressionStore for Path { +impl<'db> HirDisplayWithExpressionStore<'db> for Path { fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match (self.type_anchor(), self.kind()) { @@ -2745,10 +2402,10 @@ impl HirDisplayWithExpressionStore for Path { } } -impl HirDisplayWithExpressionStore for hir_def::expr_store::path::GenericArg { +impl<'db> HirDisplayWithExpressionStore<'db> for hir_def::expr_store::path::GenericArg { fn hir_fmt( &self, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, store: &ExpressionStore, ) -> Result<(), HirDisplayError> { match self { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index e179e41b1cbe2..2053a099ed781 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -130,11 +130,16 @@ impl Generics { /// Returns total number of generic parameters in scope, including those from parent. pub(crate) fn len(&self) -> usize { - let parent = self.parent_generics().map_or(0, Generics::len); + let parent = self.len_parent(); let child = self.params.len(); parent + child } + #[inline] + pub(crate) fn len_parent(&self) -> usize { + self.parent_generics().map_or(0, Generics::len) + } + /// Returns numbers of generic parameters excluding those from parent. pub(crate) fn len_self(&self) -> usize { self.params.len() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 72498681aca50..041799be9602e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -55,8 +55,7 @@ use stdx::never; use triomphe::Arc; use crate::{ - ImplTraitId, IncorrectGenericsLenKind, Interner, PathLoweringDiagnostic, TargetFeatures, - TraitEnvironment, + ImplTraitId, IncorrectGenericsLenKind, PathLoweringDiagnostic, TargetFeatures, db::{HirDatabase, InternedClosureId, InternedOpaqueTyId}, generics::Generics, infer::{ @@ -77,7 +76,7 @@ use crate::{ DefineOpaqueTypes, traits::{Obligation, ObligationCause}, }, - mapping::{ChalkToNextSolver, NextSolverToChalk}, + mapping::ChalkToNextSolver, }, traits::FnTrait, utils::TargetFeatureIsSafeInTarget, @@ -166,31 +165,6 @@ pub(crate) fn infer_cycle_result( }) } -/// Fully normalize all the types found within `ty` in context of `owner` body definition. -/// -/// This is appropriate to use only after type-check: it assumes -/// that normalization will succeed, for example. -#[tracing::instrument(level = "debug", skip(db))] -pub(crate) fn normalize( - db: &dyn HirDatabase, - trait_env: Arc>, - ty: crate::Ty, -) -> crate::Ty { - // FIXME: TypeFlags::HAS_CT_PROJECTION is not implemented in chalk, so TypeFlags::HAS_PROJECTION only - // works for the type case, so we check array unconditionally. Remove the array part - // when the bug in chalk becomes fixed. - if !ty.data(Interner).flags.intersects(crate::TypeFlags::HAS_PROJECTION) - && !matches!(ty.kind(Interner), crate::TyKind::Array(..)) - { - return ty; - } - let mut table = unify::InferenceTable::new(db, trait_env); - - let ty_with_vars = table.normalize_associated_types_in(ty.to_nextsolver(table.interner())); - table.select_obligations_where_possible(); - table.resolve_completely(ty_with_vars).to_chalk(table.interner()) -} - /// Binding modes inferred for patterns. /// #[derive(Copy, Clone, Debug, Eq, PartialEq, Default)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index eb01ef104b61a..7277617bce8a1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -81,22 +81,17 @@ use syntax::ast::{ConstArg, make}; use traits::FnTrait; use triomphe::Arc; -#[cfg(not(debug_assertions))] -use crate::next_solver::ErrorGuaranteed; use crate::{ + builder::{ParamKind, TyBuilder}, + chalk_ext::*, db::HirDatabase, display::{DisplayTarget, HirDisplay}, generics::Generics, infer::unify::InferenceTable, - next_solver::{ - DbInterner, - mapping::{ChalkToNextSolver, NextSolverToChalk, convert_ty_for_result}, - }, + next_solver::DbInterner, }; pub use autoderef::autoderef; -pub use builder::{ParamKind, TyBuilder}; -pub use chalk_ext::*; pub use infer::{ Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, InferenceResult, InferenceTyDiagnosticSource, OverloadedDeref, PointerCast, @@ -156,7 +151,6 @@ pub(crate) type GenericArgData = chalk_ir::GenericArgData; pub(crate) type Ty = chalk_ir::Ty; pub type TyKind = chalk_ir::TyKind; -pub(crate) type TypeFlags = chalk_ir::TypeFlags; pub(crate) type DynTy = chalk_ir::DynTy; pub(crate) type FnPointer = chalk_ir::FnPointer; pub(crate) use chalk_ir::FnSubst; // a re-export so we don't lose the tuple constructor @@ -174,7 +168,6 @@ pub(crate) type ConstValue = chalk_ir::ConstValue; pub(crate) type Const = chalk_ir::Const; pub(crate) type ConstData = chalk_ir::ConstData; -pub(crate) type ConcreteConst = chalk_ir::ConcreteConst; pub(crate) type TraitRef = chalk_ir::TraitRef; pub(crate) type QuantifiedWhereClause = Binders; @@ -382,7 +375,7 @@ pub(crate) fn variable_kinds_from_iter( /// A function signature as seen by type inference: Several parameter types and /// one return type. #[derive(Clone, PartialEq, Eq, Debug)] -pub struct CallableSig { +pub(crate) struct CallableSig { params_and_return: Arc<[Ty]>, is_varargs: bool, safety: Safety, @@ -534,112 +527,6 @@ impl FnAbi { } } -/// A polymorphic function signature. -pub type PolyFnSig = Binders; - -impl CallableSig { - pub fn from_params_and_return( - params: impl Iterator, - ret: Ty, - is_varargs: bool, - safety: Safety, - abi: FnAbi, - ) -> CallableSig { - let mut params_and_return = Vec::with_capacity(params.size_hint().0 + 1); - params_and_return.extend(params); - params_and_return.push(ret); - CallableSig { params_and_return: params_and_return.into(), is_varargs, safety, abi } - } - - pub fn from_def(db: &dyn HirDatabase, def: FnDefId, substs: &Substitution) -> CallableSig { - let callable_def = ToChalk::from_chalk(db, def); - let interner = DbInterner::new_with(db, None, None); - let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner); - let sig = db.callable_item_signature(callable_def); - sig.instantiate(interner, args).skip_binder().to_chalk(interner) - } - pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig { - CallableSig { - // FIXME: what to do about lifetime params? -> return PolyFnSig - params_and_return: Arc::from_iter( - fn_ptr - .substitution - .clone() - .shifted_out_to(Interner, DebruijnIndex::ONE) - .expect("unexpected lifetime vars in fn ptr") - .0 - .as_slice(Interner) - .iter() - .map(|arg| arg.assert_ty_ref(Interner).clone()), - ), - is_varargs: fn_ptr.sig.variadic, - safety: fn_ptr.sig.safety, - abi: fn_ptr.sig.abi, - } - } - pub fn from_fn_sig_and_header<'db>( - interner: DbInterner<'db>, - sig: crate::next_solver::Binder<'db, rustc_type_ir::FnSigTys>>, - header: rustc_type_ir::FnHeader>, - ) -> CallableSig { - CallableSig { - // FIXME: what to do about lifetime params? -> return PolyFnSig - params_and_return: Arc::from_iter( - sig.skip_binder() - .inputs_and_output - .iter() - .map(|t| convert_ty_for_result(interner, t)), - ), - is_varargs: header.c_variadic, - safety: match header.safety { - next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe, - next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, - }, - abi: header.abi, - } - } - - pub fn to_fn_ptr(&self) -> FnPointer { - FnPointer { - num_binders: 0, - sig: FnSig { abi: self.abi, safety: self.safety, variadic: self.is_varargs }, - substitution: FnSubst(Substitution::from_iter( - Interner, - self.params_and_return.iter().cloned(), - )), - } - } - - pub fn abi(&self) -> FnAbi { - self.abi - } - - pub fn params(&self) -> &[Ty] { - &self.params_and_return[0..self.params_and_return.len() - 1] - } - - pub fn ret(&self) -> &Ty { - &self.params_and_return[self.params_and_return.len() - 1] - } -} - -impl TypeFoldable for CallableSig { - fn try_fold_with( - self, - folder: &mut dyn chalk_ir::fold::FallibleTypeFolder, - outer_binder: DebruijnIndex, - ) -> Result { - let vec = self.params_and_return.to_vec(); - let folded = vec.try_fold_with(folder, outer_binder)?; - Ok(CallableSig { - params_and_return: folded.into(), - is_varargs: self.is_varargs, - safety: self.safety, - abi: self.abi, - }) - } -} - #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum ImplTraitId { ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx), // FIXME(next-solver): Should be crate::nextsolver::ImplTraitIdx. @@ -764,7 +651,12 @@ where #[cfg(debug_assertions)] let error = || Err(()); #[cfg(not(debug_assertions))] - let error = || Ok(crate::next_solver::Ty::new_error(self.interner, ErrorGuaranteed)); + let error = || { + Ok(crate::next_solver::Ty::new_error( + self.interner, + crate::next_solver::ErrorGuaranteed, + )) + }; match t.kind() { crate::next_solver::TyKind::Error(_) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs index abca6b6bb9e52..aced46bf806bb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs @@ -17,17 +17,20 @@ use std::{ use base_db::Crate; use either::Either; -use hir_def::hir::generics::GenericParamDataRef; -use hir_def::item_tree::FieldsShape; use hir_def::{ - AdtId, AssocItemId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, - GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, - StructId, TraitId, TypeAliasId, TypeOrConstParamId, VariantId, + AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, + LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, + TypeParamId, VariantId, expr_store::{ ExpressionStore, path::{GenericArg, Path}, }, - hir::generics::{TypeOrConstParamData, WherePredicate}, + hir::generics::{ + GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance, + WherePredicate, + }, + item_tree::FieldsShape, lang_item::LangItem, resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, signatures::{FunctionSignature, TraitFlags, TypeAliasFlags}, @@ -36,7 +39,6 @@ use hir_def::{ TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId, }, }; -use hir_def::{ConstId, LifetimeParamId, StaticId, TypeParamId}; use hir_expand::name::Name; use intern::{Symbol, sym}; use la_arena::{Arena, ArenaMap, Idx}; @@ -48,20 +50,17 @@ use rustc_type_ir::{ AliasTyKind, ConstKind, DebruijnIndex, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate, TyKind::{self}, - TypeVisitableExt, + TypeFoldable, TypeFolder, TypeVisitableExt, Upcast, inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _}, }; -use rustc_type_ir::{TypeFoldable, TypeFolder, Upcast}; use salsa::plumbing::AsId; use smallvec::{SmallVec, smallvec}; use stdx::never; use triomphe::Arc; -use crate::ValueTyDefId; -use crate::next_solver::ParamConst; use crate::{ FnAbi, ImplTraitId, Interner, ParamKind, TraitEnvironment, TyDefId, TyLoweringDiagnostic, - TyLoweringDiagnosticKind, + TyLoweringDiagnosticKind, ValueTyDefId, consteval::{intern_const_ref, path_to_const, unknown_const_as_generic}, db::HirDatabase, generics::{Generics, generics, trait_self_param_idx}, @@ -69,8 +68,8 @@ use crate::{ next_solver::{ AdtDef, AliasTy, Binder, BoundExistentialPredicates, BoundRegionKind, BoundTyKind, BoundVarKind, BoundVarKinds, Clause, Clauses, Const, DbInterner, EarlyBinder, - EarlyParamRegion, ErrorGuaranteed, GenericArgs, ParamEnv, PolyFnSig, Predicate, Region, - SolverDefId, TraitPredicate, TraitRef, Ty, Tys, + EarlyParamRegion, ErrorGuaranteed, GenericArgs, ParamConst, ParamEnv, PolyFnSig, Predicate, + Region, SolverDefId, TraitPredicate, TraitRef, Ty, Tys, abi::Safety, mapping::{ChalkToNextSolver, convert_ty_for_result}, }, @@ -187,8 +186,9 @@ pub struct TyLoweringContext<'db, 'a> { pub(crate) unsized_types: FxHashSet>, pub(crate) diagnostics: Vec, lifetime_elision: LifetimeElisionKind<'db>, - /// We disallow referencing generic parameters that have an index greater than or equal to this number. - disallow_params_after: u32, + /// When lowering the defaults for generic params, this contains the index of the currently lowered param. + /// We disallow referring to later params, or to ADT's `Self`. + lowering_param_default: Option, } impl<'db, 'a> TyLoweringContext<'db, 'a> { @@ -213,7 +213,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { unsized_types: FxHashSet::default(), diagnostics: Vec::new(), lifetime_elision, - disallow_params_after: u32::MAX, + lowering_param_default: None, } } @@ -249,8 +249,8 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { self } - pub(crate) fn disallow_params_after(&mut self, after: u32) { - self.disallow_params_after = after; + pub(crate) fn lowering_param_default(&mut self, index: u32) { + self.lowering_param_default = Some(index); } pub(crate) fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) { @@ -333,8 +333,13 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { self.generics.get_or_init(|| generics(self.db, self.def)) } + fn param_index_is_disallowed(&self, index: u32) -> bool { + self.lowering_param_default + .is_some_and(|disallow_params_after| index >= disallow_params_after) + } + fn type_param(&mut self, id: TypeParamId, index: u32, name: Symbol) -> Ty<'db> { - if index >= self.disallow_params_after { + if self.param_index_is_disallowed(index) { // FIXME: Report an error. Ty::new_error(self.interner, ErrorGuaranteed) } else { @@ -343,7 +348,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } fn const_param(&mut self, id: ConstParamId, index: u32) -> Const<'db> { - if index >= self.disallow_params_after { + if self.param_index_is_disallowed(index) { // FIXME: Report an error. Const::error(self.interner) } else { @@ -352,7 +357,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } fn region_param(&mut self, id: LifetimeParamId, index: u32) -> Region<'db> { - if index >= self.disallow_params_after { + if self.param_index_is_disallowed(index) { // FIXME: Report an error. Region::error(self.interner) } else { @@ -394,7 +399,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { type_data .name .as_ref() - .map_or_else(|| sym::MISSING_NAME.clone(), |d| d.symbol().clone()), + .map_or_else(|| sym::MISSING_NAME, |d| d.symbol().clone()), ) } &TypeRef::RawPtr(inner, mutability) => { @@ -1603,8 +1608,6 @@ where for pred in maybe_parent_generics.where_predicates() { tracing::debug!(?pred); if filter(maybe_parent_generics.def()) { - // We deliberately use `generics` and not `maybe_parent_generics` here. This is not a mistake! - // If we use the parent generics predicates.extend(ctx.lower_where_predicate( pred, false, @@ -1619,49 +1622,53 @@ where let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); if let Some(sized_trait) = sized_trait { - let (mut generics, mut def_id) = - (crate::next_solver::generics::generics(db, def.into()), def); - loop { - if filter(def_id) { - let self_idx = trait_self_param_idx(db, def_id); - for (idx, p) in generics.own_params.iter().enumerate() { - if let Some(self_idx) = self_idx - && p.index() as usize == self_idx - { - continue; - } - let GenericParamId::TypeParamId(param_id) = p.id else { - continue; - }; - let idx = idx as u32 + generics.parent_count as u32; - let param_ty = Ty::new_param(interner, param_id, idx, p.name.clone()); - if explicitly_unsized_tys.contains(¶m_ty) { - continue; - } - let trait_ref = TraitRef::new_from_args( - interner, - sized_trait.into(), - GenericArgs::new_from_iter(interner, [param_ty.into()]), - ); - let clause = Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )); - predicates.push(clause); - } + let mut add_sized_clause = |param_idx, param_id, param_data| { + let ( + GenericParamId::TypeParamId(param_id), + GenericParamDataRef::TypeParamData(param_data), + ) = (param_id, param_data) + else { + return; + }; + + if param_data.provenance == TypeParamProvenance::TraitSelf { + return; } - if let Some(g) = generics.parent { - generics = crate::next_solver::generics::generics(db, g.into()); - def_id = g; - } else { - break; + let param_name = param_data + .name + .as_ref() + .map_or_else(|| sym::MISSING_NAME, |name| name.symbol().clone()); + let param_ty = Ty::new_param(interner, param_id, param_idx, param_name); + if explicitly_unsized_tys.contains(¶m_ty) { + return; } + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_iter(interner, [param_ty.into()]), + ); + let clause = Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )); + predicates.push(clause); + }; + if generics.parent_generics().is_some_and(|parent| filter(parent.def())) { + generics.iter_parent().enumerate().for_each(|(param_idx, (param_id, param_data))| { + add_sized_clause(param_idx as u32, param_id, param_data); + }); + } + if filter(def) { + let parent_params_len = generics.len_parent(); + generics.iter_self().enumerate().for_each(|(param_idx, (param_id, param_data))| { + add_sized_clause((param_idx + parent_params_len) as u32, param_id, param_data); + }); } } @@ -1860,10 +1867,7 @@ pub(crate) fn generic_defaults_with_diagnostics_query( p: GenericParamDataRef<'_>, generic_params: &Generics, ) -> (Option>>, bool) { - // Each default can only refer to previous parameters. - // Type variable default referring to parameter coming - // after it is forbidden. - ctx.disallow_params_after(idx as u32); + ctx.lowering_param_default(idx as u32); match p { GenericParamDataRef::TypeParamData(p) => { let ty = p.default.map(|ty| ctx.lower_ty(ty)); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs index ef2c392f08616..6bfe266b460c7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs @@ -314,7 +314,9 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { self.lower_ty_relative_path(ty, Some(resolution), infer_args) } - fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) { + /// This returns whether to keep the resolution (`true`) of throw it (`false`). + #[must_use] + fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) -> bool { let mut prohibit_generics_on_resolved = |reason| { if self.current_or_prev_segment.args_and_bindings.is_some() { let segment = self.current_segment_u32(); @@ -333,7 +335,13 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam) } TypeNs::AdtSelfType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy); + + if self.ctx.lowering_param_default.is_some() { + // Generic defaults are not allowed to refer to `Self`. + // FIXME: Emit an error. + return false; + } } TypeNs::BuiltinType(_) => { prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy) @@ -346,6 +354,8 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { | TypeNs::TypeAliasId(_) | TypeNs::TraitId(_) => {} } + + true } pub(crate) fn resolve_path_in_type_ns_fully(&mut self) -> Option { @@ -379,11 +389,6 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { self.current_or_prev_segment = segments.get(resolved_segment_idx).expect("should have resolved segment"); - if matches!(self.path, Path::BarePath(..)) { - // Bare paths cannot have generics, so skip them as an optimization. - return Some((resolution, remaining_index)); - } - for (i, mod_segment) in module_segments.iter().enumerate() { if mod_segment.args_and_bindings.is_some() { self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { @@ -403,7 +408,9 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { }); } - self.handle_type_ns_resolution(&resolution); + if !self.handle_type_ns_resolution(&resolution) { + return None; + } Some((resolution, remaining_index)) } @@ -475,7 +482,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { match resolution { ValueNs::ImplSelf(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy); } // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not // E0109 (generic arguments provided for a type that doesn't accept them) for @@ -499,7 +506,9 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { } } ResolveValueResult::Partial(resolution, _, _) => { - self.handle_type_ns_resolution(resolution); + if !self.handle_type_ns_resolution(resolution) { + return None; + } } }; Some(res) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index e46edb8159189..0c5a64935e498 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -118,10 +118,10 @@ enum LocalName<'db> { Binding(Name, LocalId<'db>), } -impl<'db> HirDisplay for LocalName<'db> { +impl<'db> HirDisplay<'db> for LocalName<'db> { fn hir_fmt( &self, - f: &mut crate::display::HirFormatter<'_>, + f: &mut crate::display::HirFormatter<'_, 'db>, ) -> Result<(), crate::display::HirDisplayError> { match self { LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())), @@ -489,7 +489,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> { } } - fn hir_display<'b, T: HirDisplay>(&self, ty: &'b T) -> impl Display + use<'a, 'b, 'db, T> + fn hir_display<'b, T: HirDisplay<'db>>(&self, ty: &'b T) -> impl Display + use<'a, 'b, 'db, T> where 'db: 'b, { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs index 2fc1fc4f45a5c..c5a1e7d315465 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs @@ -82,7 +82,11 @@ impl<'db> Const<'db> { } pub fn is_ct_infer(&self) -> bool { - matches!(&self.inner().internee, ConstKind::Infer(_)) + matches!(self.kind(), ConstKind::Infer(_)) + } + + pub fn is_error(&self) -> bool { + matches!(self.kind(), ConstKind::Error(_)) } pub fn is_trivially_wf(self) -> bool { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs index 9dda9d06da276..6a0a07705a8c0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs @@ -647,6 +647,26 @@ impl<'db> UpcastFrom, ty::OutlivesPredicate, Reg PredicateKind::Clause(ClauseKind::RegionOutlives(from)).upcast(interner) } } +impl<'db> UpcastFrom, ty::OutlivesPredicate, Ty<'db>>> + for Clause<'db> +{ + fn upcast_from( + from: ty::OutlivesPredicate, Ty<'db>>, + interner: DbInterner<'db>, + ) -> Self { + Clause(from.upcast(interner)) + } +} +impl<'db> UpcastFrom, ty::OutlivesPredicate, Region<'db>>> + for Clause<'db> +{ + fn upcast_from( + from: ty::OutlivesPredicate, Region<'db>>, + interner: DbInterner<'db>, + ) -> Self { + Clause(from.upcast(interner)) + } +} impl<'db> UpcastFrom, PolyRegionOutlivesPredicate<'db>> for Predicate<'db> { fn upcast_from(from: PolyRegionOutlivesPredicate<'db>, tcx: DbInterner<'db>) -> Self { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index cd125f3af8645..35c8a197f52cb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -12,22 +12,20 @@ use intern::sym; use rustc_next_trait_solver::solve::{HasChanged, SolverDelegateEvalExt}; use rustc_type_ir::{ InferCtxtLike, TypingMode, - inherent::{IntoKind, SliceLike, Span as _, Ty as _}, + inherent::{IntoKind, SliceLike, Span as _}, solve::Certainty, }; use span::Edition; -use stdx::never; use triomphe::Arc; use crate::{ - AliasEq, AliasTy, Canonical, DomainGoal, Goal, InEnvironment, Interner, ProjectionTy, - ProjectionTyExt, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause, + AliasEq, AliasTy, Canonical, DomainGoal, Goal, InEnvironment, Interner, ProjectionTyExt, + TraitRefExt, TyKind, WhereClause, db::HirDatabase, - from_assoc_type_id, next_solver::{ DbInterner, GenericArg, ParamEnv, Predicate, SolverContext, Span, infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause}, - mapping::{ChalkToNextSolver, NextSolverToChalk, convert_canonical_args_for_result}, + mapping::{ChalkToNextSolver, convert_canonical_args_for_result}, obligation_ctxt::ObligationCtxt, util::mini_canonicalize, }, @@ -94,47 +92,6 @@ pub fn structurally_normalize_ty<'db>( ty.replace_infer_with_error(infcx.interner) } -pub(crate) fn normalize_projection_query<'db>( - db: &'db dyn HirDatabase, - projection: ProjectionTy, - env: Arc>, -) -> Ty { - if projection.substitution.iter(Interner).any(|arg| { - arg.ty(Interner) - .is_some_and(|ty| ty.data(Interner).flags.intersects(TypeFlags::HAS_TY_INFER)) - }) { - never!( - "Invoking `normalize_projection_query` with a projection type containing inference var" - ); - return TyKind::Error.intern(Interner); - } - - let interner = DbInterner::new_with(db, Some(env.krate), env.block); - // FIXME(next-solver): I believe this should use `PostAnalysis` (this is only used for IDE things), - // but this causes some bug because of our incorrect impl of `type_of_opaque_hir_typeck()` for TAIT - // and async blocks. - let infcx = interner.infer_ctxt().build(TypingMode::Analysis { - defining_opaque_types_and_generators: crate::next_solver::SolverDefIds::new_from_iter( - interner, - [], - ), - }); - let alias_ty = crate::next_solver::Ty::new_alias( - interner, - rustc_type_ir::AliasTyKind::Projection, - crate::next_solver::AliasTy::new( - interner, - from_assoc_type_id(projection.associated_ty_id).into(), - >>::to_nextsolver(&projection.substitution, interner), - ), - ); - let mut ctxt = crate::next_solver::obligation_ctxt::ObligationCtxt::new(&infcx); - let normalized = ctxt - .structurally_normalize_ty(&ObligationCause::dummy(), env.env, alias_ty) - .unwrap_or(alias_ty); - normalized.replace_infer_with_error(interner).to_chalk(interner) -} - fn identity_subst( binders: chalk_ir::CanonicalVarKinds, ) -> chalk_ir::Canonical> { @@ -165,45 +122,6 @@ fn identity_subst( chalk_ir::Canonical { binders, value: identity_subst } } -/// Solve a trait goal using next trait solver. -pub(crate) fn trait_solve_query( - db: &dyn HirDatabase, - krate: Crate, - block: Option, - goal: Canonical>, -) -> NextTraitSolveResult { - let _p = tracing::info_span!("trait_solve_query", detail = ?match &goal.value.goal.data(Interner) { - GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => db - .trait_signature(it.hir_trait_id()) - .name - .display(db, Edition::LATEST) - .to_string(), - GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(), - _ => "??".to_owned(), - }) - .entered(); - - if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(projection_ty), - .. - }))) = &goal.value.goal.data(Interner) - && let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) - { - // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible - return NextTraitSolveResult::Uncertain(identity_subst(goal.binders.clone())); - } - - // Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So - // we should get rid of it when talking to chalk. - let goal = goal - .try_fold_with(&mut UnevaluatedConstEvaluatorFolder { db }, DebruijnIndex::INNERMOST) - .unwrap(); - - // We currently don't deal with universes (I think / hope they're not yet - // relevant for our use cases?) - next_trait_solve(db, krate, block, goal) -} - fn solve_nextsolver<'db>( db: &'db dyn HirDatabase, krate: Crate, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index e989e4c006fff..15359922c80e7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -25,8 +25,7 @@ use smallvec::{SmallVec, smallvec}; use span::Edition; use crate::{ - ChalkTraitId, Const, ConstScalar, Interner, Substitution, TargetFeatures, TraitRef, - TraitRefExt, Ty, + ChalkTraitId, Const, ConstScalar, Interner, TargetFeatures, TraitRef, TraitRefExt, consteval::unknown_const, db::HirDatabase, layout::{Layout, TagEncoding}, @@ -192,19 +191,6 @@ pub(super) fn associated_type_by_name_including_super_traits( }) } -pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution); - -impl<'a> ClosureSubst<'a> { - pub(crate) fn sig_ty(&self, db: &dyn HirDatabase) -> Ty { - let interner = DbInterner::new_with(db, None, None); - let subst = - >>::to_nextsolver( - self.0, interner, - ); - subst.split_closure_args_untupled().closure_sig_as_fn_ptr_ty.to_chalk(interner) - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Unsafety { Safe, diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 49bf843367d37..b31bb248e8397 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -11,14 +11,15 @@ use hir_def::{ type_ref::{TypeBound, TypeRef, TypeRefId}, }; use hir_ty::{ - AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyBuilder, TyKind, WhereClause, db::HirDatabase, display::{ HirDisplay, HirDisplayError, HirDisplayWithExpressionStore, HirFormatter, SizedByDefault, hir_display_with_store, write_bounds_like_dyn_trait_with_prefix, write_visibility, }, + next_solver::ClauseKind, }; use itertools::Itertools; +use rustc_type_ir::inherent::IntoKind; use crate::{ Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum, @@ -27,8 +28,8 @@ use crate::{ TypeAlias, TypeNs, TypeOrConstParam, TypeParam, Union, Variant, }; -impl HirDisplay for Function { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Function { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { let db = f.db; let data = db.function_signature(self.id); let container = self.as_assoc_item(db).map(|it| it.container(db)); @@ -184,7 +185,10 @@ impl HirDisplay for Function { } } -fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +fn write_impl_header<'db>( + impl_: &Impl, + f: &mut HirFormatter<'_, 'db>, +) -> Result<(), HirDisplayError> { let db = f.db; f.write_str("impl")?; @@ -202,8 +206,8 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi Ok(()) } -impl HirDisplay for SelfParam { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for SelfParam { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { let data = f.db.function_signature(self.func); let param = *data.params.first().unwrap(); match &data.store[param] { @@ -228,8 +232,8 @@ impl HirDisplay for SelfParam { } } -impl HirDisplay for Adt { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Adt { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self { Adt::Struct(it) => it.hir_fmt(f), Adt::Union(it) => it.hir_fmt(f), @@ -238,8 +242,8 @@ impl HirDisplay for Adt { } } -impl HirDisplay for Struct { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Struct { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { let module_id = self.module(f.db).id; // FIXME: Render repr if its set explicitly? write_visibility(module_id, self.visibility(f.db), f)?; @@ -279,8 +283,8 @@ impl HirDisplay for Struct { } } -impl HirDisplay for Enum { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Enum { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("enum ")?; write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; @@ -296,8 +300,8 @@ impl HirDisplay for Enum { } } -impl HirDisplay for Union { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Union { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("union ")?; write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; @@ -312,12 +316,12 @@ impl HirDisplay for Union { } } -fn write_fields( +fn write_fields<'db>( fields: &[Field], has_where_clause: bool, limit: usize, in_line: bool, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, ) -> Result<(), HirDisplayError> { let count = fields.len().min(limit); let (indent, separator) = if in_line { ("", ' ') } else { (" ", '\n') }; @@ -346,11 +350,11 @@ fn write_fields( Ok(()) } -fn write_variants( +fn write_variants<'db>( variants: &[Variant], has_where_clause: bool, limit: usize, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, ) -> Result<(), HirDisplayError> { let count = variants.len().min(limit); f.write_char(if !has_where_clause { ' ' } else { '\n' })?; @@ -386,23 +390,23 @@ fn write_variants( Ok(()) } -impl HirDisplay for Field { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Field { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?; write!(f, "{}: ", self.name(f.db).display(f.db, f.edition()))?; self.ty(f.db).hir_fmt(f) } } -impl HirDisplay for TupleField { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for TupleField { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write!(f, "pub {}: ", self.name().display(f.db, f.edition()))?; self.ty(f.db).hir_fmt(f) } } -impl HirDisplay for Variant { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Variant { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; let data = self.id.fields(f.db); match data.shape { @@ -431,20 +435,20 @@ impl HirDisplay for Variant { } } -impl HirDisplay for Type<'_> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Type<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { self.ty.hir_fmt(f) } } -impl HirDisplay for TypeNs<'_> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for TypeNs<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { self.ty.hir_fmt(f) } } -impl HirDisplay for ExternCrateDecl { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for ExternCrateDecl { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("extern crate ")?; write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?; @@ -455,8 +459,8 @@ impl HirDisplay for ExternCrateDecl { } } -impl HirDisplay for GenericParam { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for GenericParam { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self { GenericParam::TypeParam(it) => it.hir_fmt(f), GenericParam::ConstParam(it) => it.hir_fmt(f), @@ -465,8 +469,8 @@ impl HirDisplay for GenericParam { } } -impl HirDisplay for TypeOrConstParam { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for TypeOrConstParam { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self.split(f.db) { either::Either::Left(it) => it.hir_fmt(f), either::Either::Right(it) => it.hir_fmt(f), @@ -474,27 +478,22 @@ impl HirDisplay for TypeOrConstParam { } } -impl HirDisplay for TypeParam { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for TypeParam { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { let params = f.db.generic_params(self.id.parent()); let param_data = ¶ms[self.id.local_id()]; - let substs = TyBuilder::placeholder_subst(f.db, self.id.parent()); let krate = self.id.parent().krate(f.db).id; - let ty = TyKind::Placeholder(hir_ty::to_placeholder_idx_no_index(f.db, self.id.into())) - .intern(Interner); - let predicates = f.db.generic_predicates(self.id.parent()); + let ty = self.ty(f.db).ty; + let predicates = f.db.generic_predicates_ns(self.id.parent()); let predicates = predicates - .iter() - .cloned() - .map(|pred| pred.substitute(Interner, &substs)) - .filter(|wc| match wc.skip_binders() { - WhereClause::Implemented(tr) => tr.self_type_parameter(Interner) == ty, - WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), ty: _ }) => { - proj.self_type_parameter(f.db) == ty - } - WhereClause::AliasEq(_) => false, - WhereClause::TypeOutlives(to) => to.ty == ty, - WhereClause::LifetimeOutlives(_) => false, + .instantiate_identity() + .into_iter() + .flatten() + .filter(|wc| match wc.kind().skip_binder() { + ClauseKind::Trait(tr) => tr.self_ty() == ty, + ClauseKind::Projection(proj) => proj.self_ty() == ty, + ClauseKind::TypeOutlives(to) => to.0 == ty, + _ => false, }) .collect::>(); @@ -507,7 +506,7 @@ impl HirDisplay for TypeParam { return write_bounds_like_dyn_trait_with_prefix( f, "impl", - Either::Left(&ty), + Either::Left(ty), &predicates, SizedByDefault::Sized { anchor: krate }, ); @@ -523,23 +522,18 @@ impl HirDisplay for TypeParam { } let sized_trait = LangItem::Sized.resolve_trait(f.db, krate); - let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() { - WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait, - _ => false, - }); + let has_only_sized_bound = + predicates.iter().all(move |pred| match pred.kind().skip_binder() { + ClauseKind::Trait(it) => Some(it.def_id().0) == sized_trait, + _ => false, + }); let has_only_not_sized_bound = predicates.is_empty(); if !has_only_sized_bound || has_only_not_sized_bound { let default_sized = SizedByDefault::Sized { anchor: krate }; write_bounds_like_dyn_trait_with_prefix( f, ":", - Either::Left( - &hir_ty::TyKind::Placeholder(hir_ty::to_placeholder_idx_no_index( - f.db, - self.id.into(), - )) - .intern(Interner), - ), + Either::Left(ty), &predicates, default_sized, )?; @@ -548,22 +542,22 @@ impl HirDisplay for TypeParam { } } -impl HirDisplay for LifetimeParam { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for LifetimeParam { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write!(f, "{}", self.name(f.db).display(f.db, f.edition())) } } -impl HirDisplay for ConstParam { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for ConstParam { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write!(f, "const {}: ", self.name(f.db).display(f.db, f.edition()))?; self.ty(f.db).hir_fmt(f) } } -fn write_generic_params( +fn write_generic_params<'db>( def: GenericDefId, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, ) -> Result<(), HirDisplayError> { let (params, store) = f.db.generic_params_and_store(def); if params.iter_lt().next().is_none() @@ -578,7 +572,7 @@ fn write_generic_params( f.write_char('<')?; let mut first = true; - let mut delim = |f: &mut HirFormatter<'_>| { + let mut delim = |f: &mut HirFormatter<'_, 'db>| { if first { first = false; Ok(()) @@ -622,9 +616,9 @@ fn write_generic_params( Ok(()) } -fn write_where_clause( +fn write_where_clause<'db>( def: GenericDefId, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, ) -> Result { let (params, store) = f.db.generic_params_and_store(def); if !has_disaplayable_predicates(f.db, ¶ms, &store) { @@ -653,10 +647,10 @@ fn has_disaplayable_predicates( }) } -fn write_where_predicates( +fn write_where_predicates<'db>( params: &GenericParams, store: &ExpressionStore, - f: &mut HirFormatter<'_>, + f: &mut HirFormatter<'_, 'db>, ) -> Result<(), HirDisplayError> { use WherePredicate::*; @@ -717,8 +711,8 @@ fn write_where_predicates( Ok(()) } -impl HirDisplay for Const { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Const { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { let db = f.db; let container = self.as_assoc_item(db).map(|it| it.container(db)); let mut module = self.module(db); @@ -738,8 +732,8 @@ impl HirDisplay for Const { } } -impl HirDisplay for Static { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Static { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; let data = f.db.static_signature(self.id); f.write_str("static ")?; @@ -752,14 +746,14 @@ impl HirDisplay for Static { } } -impl HirDisplay for TraitRef<'_> { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for TraitRef<'db> { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { self.trait_ref.hir_fmt(f) } } -impl HirDisplay for Trait { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Trait { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { // FIXME(trait-alias) needs special handling to print the equal sign write_trait_header(self, f)?; let def_id = GenericDefId::TraitId(self.id); @@ -798,7 +792,10 @@ impl HirDisplay for Trait { } } -fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +fn write_trait_header<'db>( + trait_: &Trait, + f: &mut HirFormatter<'_, 'db>, +) -> Result<(), HirDisplayError> { write_visibility(trait_.module(f.db).id, trait_.visibility(f.db), f)?; let data = f.db.trait_signature(trait_.id); if data.flags.contains(TraitFlags::UNSAFE) { @@ -812,8 +809,8 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi Ok(()) } -impl HirDisplay for TypeAlias { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for TypeAlias { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; let data = f.db.type_alias_signature(self.id); write!(f, "type {}", data.name.display(f.db, f.edition()))?; @@ -835,8 +832,8 @@ impl HirDisplay for TypeAlias { } } -impl HirDisplay for Module { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Module { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self.parent(f.db) { Some(m) => write_visibility(m.id, self.visibility(f.db), f)?, None => { @@ -853,8 +850,8 @@ impl HirDisplay for Module { } } -impl HirDisplay for Crate { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Crate { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self.display_name(f.db) { Some(name) => write!(f, "extern crate {name}"), None => f.write_str("extern crate {unknown}"), @@ -862,8 +859,8 @@ impl HirDisplay for Crate { } } -impl HirDisplay for Macro { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl<'db> HirDisplay<'db> for Macro { + fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> { match self.id { hir_def::MacroId::Macro2Id(_) => f.write_str("macro"), hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"), diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index df1800616803e..6c4a074d46d68 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -9392,7 +9392,7 @@ fn main(a$0: T) {} *a* ```rust - a: T + a: T ``` --- diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index f1aa03c8f2672..4aa9eb98a1e15 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -384,7 +384,7 @@ fn def_to_non_local_moniker( }) } -fn display(db: &RootDatabase, module: hir::Module, it: T) -> String { +fn display<'db, T: HirDisplay<'db>>(db: &'db RootDatabase, module: hir::Module, it: T) -> String { match it.display_source_code(db, module.into(), true) { Ok(result) => result, // Fallback on display variant that always succeeds diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index db1298385b113..93090e2a0203b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -382,7 +382,7 @@ impl ToNavFromAst for hir::Trait { impl TryToNav for D where - D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay + HasCrate, + D: HasSource + ToNavFromAst + Copy + HasDocs + for<'db> HirDisplay<'db> + HasCrate, D::Ast: ast::HasName, { fn try_to_nav( From b2566ff07bcdf70ac368ebc1e1a1a03ed257ef0d Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Wed, 15 Oct 2025 19:26:59 +0800 Subject: [PATCH 24/76] Migrate `add_braces` assist, because edit_in_place uses ted - And fix indent Example --- ```rust fn foo() { { match n { Some(n) $0=> foo( 29, 30, ), _ => () }; } } ``` **Before this PR**: ```rust fn main() { { match n { Some(n) => { foo( 29, 30, ) }, _ => () }; } } ``` **After this PR**: ```rust fn foo() { { match n { Some(n) => { foo( 29, 30, ) }, _ => () }; } } ``` --- .../ide-assists/src/handlers/add_braces.rs | 43 +++++++++++++++++-- 1 file changed, 39 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs index 5af622eaf28b0..d855fb771846a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs @@ -1,7 +1,7 @@ use either::Either; use syntax::{ AstNode, - ast::{self, edit_in_place::Indent, syntax_factory::SyntaxFactory}, + ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory}, }; use crate::{AssistContext, AssistId, Assists}; @@ -43,10 +43,10 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let make = SyntaxFactory::with_mappings(); let mut editor = builder.make_editor(expr.syntax()); - let block_expr = make.block_expr(None, Some(expr.clone())); - block_expr.indent(expr.indent_level()); + let new_expr = expr.reset_indent().indent(1.into()); + let block_expr = make.block_expr(None, Some(new_expr)); - editor.replace(expr.syntax(), block_expr.syntax()); + editor.replace(expr.syntax(), block_expr.indent(expr.indent_level()).syntax()); editor.add_mappings(make.finish_with_mappings()); builder.add_file_edits(ctx.vfs_file_id(), editor); @@ -171,6 +171,41 @@ fn foo() { ); } + #[test] + fn multiple_indent() { + check_assist( + add_braces, + r#" +fn foo() { + { + match n { + Some(n) $0=> foo( + 29, + 30, + ), + _ => () + }; + } +} +"#, + r#" +fn foo() { + { + match n { + Some(n) => { + foo( + 29, + 30, + ) + }, + _ => () + }; + } +} +"#, + ); + } + #[test] fn no_assist_for_match_with_braces() { check_assist_not_applicable( From d41a190c687137ce9ec5495e198dc839c2b071be Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Thu, 16 Oct 2025 14:29:36 +0800 Subject: [PATCH 25/76] Migrate `add_missing_match_arms` assist, because edit_in_place uses ted - And fix indentations Example --- ```rust fn main() { match None$0 { None => { foo( "foo", "bar", ); } } } ``` **Before this PR**: ```rust fn main() { match None { None => { foo( "foo", "bar", ); } Some(_) => todo!(), } } ``` **After this PR**: ```rust fn main() { match None { None => { foo( "foo", "bar", ); } Some(${1:_}) => ${2:todo!()},$0 } } ``` --- .../src/handlers/add_missing_match_arms.rs | 39 +++++++++++++++++-- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 8802a54e7f240..7843ab9e8f25b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -8,8 +8,7 @@ use ide_db::syntax_helpers::suggest_name; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast}; use itertools::Itertools; use syntax::ToSmolStr; -use syntax::ast::edit::IndentLevel; -use syntax::ast::edit_in_place::Indent; +use syntax::ast::edit::{AstNodeEdit, IndentLevel}; use syntax::ast::syntax_factory::SyntaxFactory; use syntax::ast::{self, AstNode, MatchArmList, MatchExpr, Pat, make}; @@ -261,6 +260,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) true } }) + .map(|arm| arm.reset_indent().indent(IndentLevel(1))) .collect(); let first_new_arm_idx = arms.len(); @@ -300,7 +300,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) }; let mut editor = builder.make_editor(&old_place); - new_match_arm_list.indent(IndentLevel::from_node(&old_place)); + let new_match_arm_list = new_match_arm_list.indent(IndentLevel::from_node(&old_place)); editor.replace(old_place, new_match_arm_list.syntax()); if let Some(cap) = ctx.config.snippet_cap { @@ -917,6 +917,39 @@ fn main() { ); } + #[test] + fn partial_fill_option_with_indentation() { + check_assist( + add_missing_match_arms, + r#" +//- minicore: option +fn main() { + match None$0 { + None => { + foo( + "foo", + "bar", + ); + } + } +} +"#, + r#" +fn main() { + match None { + None => { + foo( + "foo", + "bar", + ); + } + Some(${1:_}) => ${2:todo!()},$0 + } +} +"#, + ); + } + #[test] fn partial_fill_or_pat() { check_assist( From e9bba4f598c423125d3608effcfd2fd026b460ef Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 16 Oct 2025 13:08:27 +0300 Subject: [PATCH 26/76] Do not use `force-always-assert` in `xtask install` by default But add a flag to do so. --- src/tools/rust-analyzer/xtask/src/flags.rs | 11 ++++++++++- src/tools/rust-analyzer/xtask/src/install.rs | 16 ++++++++++++++-- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs index 72f6215d4c3ff..8f70a1861893a 100644 --- a/src/tools/rust-analyzer/xtask/src/flags.rs +++ b/src/tools/rust-analyzer/xtask/src/flags.rs @@ -49,6 +49,9 @@ xflags::xflags! { /// build in release with debug info set to 2. optional --dev-rel + /// Make `never!()`, `always!()` etc. panic instead of just logging an error. + optional --force-always-assert + /// Apply PGO optimizations optional --pgo pgo: PgoTrainingCrate } @@ -124,6 +127,7 @@ pub struct Install { pub jemalloc: bool, pub proc_macro_server: bool, pub dev_rel: bool, + pub force_always_assert: bool, pub pgo: Option, } @@ -300,7 +304,12 @@ impl Install { } else { Malloc::System }; - Some(ServerOpt { malloc, dev_rel: self.dev_rel, pgo: self.pgo.clone() }) + Some(ServerOpt { + malloc, + dev_rel: self.dev_rel, + pgo: self.pgo.clone(), + force_always_assert: self.force_always_assert, + }) } pub(crate) fn proc_macro_server(&self) -> Option { if !self.proc_macro_server { diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs index b794f53e761e9..975e361ba50b5 100644 --- a/src/tools/rust-analyzer/xtask/src/install.rs +++ b/src/tools/rust-analyzer/xtask/src/install.rs @@ -39,6 +39,18 @@ pub(crate) struct ServerOpt { pub(crate) malloc: Malloc, pub(crate) dev_rel: bool, pub(crate) pgo: Option, + pub(crate) force_always_assert: bool, +} + +impl ServerOpt { + fn to_features(&self) -> Vec<&'static str> { + let mut features = Vec::new(); + features.extend(self.malloc.to_features()); + if self.force_always_assert { + features.extend(["--features", "force-always-assert"]); + } + features + } } pub(crate) struct ProcMacroServerOpt { @@ -136,7 +148,7 @@ fn install_client(sh: &Shell, client_opt: ClientOpt) -> anyhow::Result<()> { } fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> { - let features = opts.malloc.to_features(); + let features = &opts.to_features(); let profile = if opts.dev_rel { "dev-rel" } else { "release" }; let mut install_cmd = cmd!( @@ -148,7 +160,7 @@ fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> { let target = detect_target(sh); let build_cmd = cmd!( sh, - "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --target {target} --profile={profile} --locked --features force-always-assert {features...}" + "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --target {target} --profile={profile} --locked {features...}" ); let profile = crate::pgo::gather_pgo_profile(sh, build_cmd, &target, train_crate)?; From 468150578ff2e9f9eb9d6a7cadd16f76c98b8285 Mon Sep 17 00:00:00 2001 From: Ed Page Date: Thu, 16 Oct 2025 10:58:31 -0500 Subject: [PATCH 27/76] refactor(parser): Push higher level content --- src/tools/rust-analyzer/crates/parser/src/lexed_str.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index edc3f406a67e8..c2d26b5dacf2e 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -38,8 +38,7 @@ impl<'a> LexedStr<'a> { let _p = tracing::info_span!("LexedStr::new").entered(); let mut conv = Converter::new(edition, text); if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { - conv.res.push(SHEBANG, conv.offset); - conv.offset = shebang_len; + conv.push(SHEBANG, shebang_len, Vec::new()); }; // Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer From 223e141085c6e866752763fe54809cb9bdd40aa0 Mon Sep 17 00:00:00 2001 From: Ed Page Date: Thu, 16 Oct 2025 11:06:41 -0500 Subject: [PATCH 28/76] test(parser): Show current frontmatter behavior --- .../test_data/lexer/ok/frontmatter.rast | 29 ++++++++++++++++++ .../parser/test_data/lexer/ok/frontmatter.rs | 8 +++++ .../lexer/ok/shebang_frontmatter.rast | 30 +++++++++++++++++++ .../test_data/lexer/ok/shebang_frontmatter.rs | 9 ++++++ 4 files changed, 76 insertions(+) create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rs create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rs diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast new file mode 100644 index 0000000000000..94fbd3ebefe6b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast @@ -0,0 +1,29 @@ +WHITESPACE "\n" +MINUS "-" +MINUS "-" +MINUS "-" +WHITESPACE "\n" +L_BRACK "[" +IDENT "dependencies" +R_BRACK "]" +WHITESPACE "\n" +IDENT "clap" +WHITESPACE " " +EQ "=" +WHITESPACE " " +STRING "\"4\"" +WHITESPACE "\n" +MINUS "-" +MINUS "-" +MINUS "-" +WHITESPACE "\n\n" +FN_KW "fn" +WHITESPACE " " +IDENT "main" +L_PAREN "(" +R_PAREN ")" +WHITESPACE " " +L_CURLY "{" +WHITESPACE "\n" +R_CURLY "}" +WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rs new file mode 100644 index 0000000000000..be7bf74fdba22 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rs @@ -0,0 +1,8 @@ + +--- +[dependencies] +clap = "4" +--- + +fn main() { +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast new file mode 100644 index 0000000000000..8b1344a1b830d --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast @@ -0,0 +1,30 @@ +SHEBANG "#!/usr/bin/env cargo" +WHITESPACE "\n\n" +MINUS "-" +MINUS "-" +MINUS "-" +WHITESPACE "\n" +L_BRACK "[" +IDENT "dependencies" +R_BRACK "]" +WHITESPACE "\n" +IDENT "clap" +WHITESPACE " " +EQ "=" +WHITESPACE " " +STRING "\"4\"" +WHITESPACE "\n" +MINUS "-" +MINUS "-" +MINUS "-" +WHITESPACE "\n\n" +FN_KW "fn" +WHITESPACE " " +IDENT "main" +L_PAREN "(" +R_PAREN ")" +WHITESPACE " " +L_CURLY "{" +WHITESPACE "\n" +R_CURLY "}" +WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rs new file mode 100644 index 0000000000000..090b7713feb35 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rs @@ -0,0 +1,9 @@ +#!/usr/bin/env cargo + +--- +[dependencies] +clap = "4" +--- + +fn main() { +} From 77d9b8ec15e6a5c7cd80af2c4954cd8e2f3b890d Mon Sep 17 00:00:00 2001 From: Ed Page Date: Thu, 16 Oct 2025 11:01:12 -0500 Subject: [PATCH 29/76] feat(parser): Don't error on frontmatter --- src/tools/rust-analyzer/Cargo.lock | 1 + .../rust-analyzer/crates/parser/Cargo.toml | 1 + .../crates/parser/src/frontmatter.rs | 348 ++++++++++++++++++ .../crates/parser/src/lexed_str.rs | 12 +- .../rust-analyzer/crates/parser/src/lib.rs | 1 + .../test_data/lexer/ok/frontmatter.rast | 19 +- .../lexer/ok/shebang_frontmatter.rast | 21 +- .../lexer/ok/single_line_comments.rast | 3 +- .../parser/err/0002_duplicate_shebang.rast | 3 +- src/tools/rust-analyzer/xtask/src/tidy.rs | 2 +- 10 files changed, 367 insertions(+), 44 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/parser/src/frontmatter.rs diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 55e5bdc138d9c..027d38df2cdf0 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -1579,6 +1579,7 @@ dependencies = [ "rustc-literal-escaper 0.0.4", "stdx", "tracing", + "winnow", ] [[package]] diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml index c7da654de6d98..8384d5bec21a9 100644 --- a/src/tools/rust-analyzer/crates/parser/Cargo.toml +++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml @@ -19,6 +19,7 @@ rustc-literal-escaper.workspace = true tracing = { workspace = true, optional = true } edition.workspace = true +winnow = { version = "0.7.13", default-features = false } [dev-dependencies] expect-test = "1.5.1" diff --git a/src/tools/rust-analyzer/crates/parser/src/frontmatter.rs b/src/tools/rust-analyzer/crates/parser/src/frontmatter.rs new file mode 100644 index 0000000000000..2747db4327c56 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/src/frontmatter.rs @@ -0,0 +1,348 @@ +// Copied from https://github.com/rust-lang/cargo/blob/367fd9f213750cd40317803dd0a5a3ce3f0c676d/src/cargo/util/frontmatter.rs +#![expect(dead_code)] // avoid editing +#![expect(unreachable_pub)] // avoid editing +#![expect(clippy::useless_format)] // avoid editing + +type Span = std::ops::Range; + +#[derive(Debug)] +pub struct ScriptSource<'s> { + /// The full file + raw: &'s str, + /// The `#!/usr/bin/env cargo` line, if present + shebang: Option, + /// The code fence opener (`---`) + open: Option, + /// Trailing text after `ScriptSource::open` that identifies the meaning of + /// `ScriptSource::frontmatter` + info: Option, + /// The lines between `ScriptSource::open` and `ScriptSource::close` + frontmatter: Option, + /// The code fence closer (`---`) + close: Option, + /// All content after the frontmatter and shebang + content: Span, +} + +impl<'s> ScriptSource<'s> { + pub fn parse(raw: &'s str) -> Result { + use winnow::stream::FindSlice as _; + use winnow::stream::Location as _; + use winnow::stream::Offset as _; + use winnow::stream::Stream as _; + + let content_end = raw.len(); + let mut source = Self { + raw, + shebang: None, + open: None, + info: None, + frontmatter: None, + close: None, + content: 0..content_end, + }; + + let mut input = winnow::stream::LocatingSlice::new(raw); + + if let Some(shebang_end) = strip_shebang(input.as_ref()) { + let shebang_start = input.current_token_start(); + let _ = input.next_slice(shebang_end); + let shebang_end = input.current_token_start(); + source.shebang = Some(shebang_start..shebang_end); + source.content = shebang_end..content_end; + } + + // Whitespace may precede a frontmatter but must end with a newline + if let Some(nl_end) = strip_ws_lines(input.as_ref()) { + let _ = input.next_slice(nl_end); + } + + // Opens with a line that starts with 3 or more `-` followed by an optional identifier + const FENCE_CHAR: char = '-'; + let fence_length = input + .as_ref() + .char_indices() + .find_map(|(i, c)| (c != FENCE_CHAR).then_some(i)) + .unwrap_or_else(|| input.eof_offset()); + let open_start = input.current_token_start(); + let fence_pattern = input.next_slice(fence_length); + let open_end = input.current_token_start(); + match fence_length { + 0 => { + return Ok(source); + } + 1 | 2 => { + // either not a frontmatter or invalid frontmatter opening + return Err(FrontmatterError::new( + format!( + "found {fence_length} `{FENCE_CHAR}` in rust frontmatter, expected at least 3" + ), + raw.len()..raw.len(), + ).push_visible_span(open_start..open_end)); + } + _ => {} + } + source.open = Some(open_start..open_end); + let Some(info_nl) = input.find_slice("\n") else { + return Err(FrontmatterError::new( + format!("unclosed frontmatter; expected `{fence_pattern}`"), + raw.len()..raw.len(), + ) + .push_visible_span(open_start..open_end)); + }; + let info = input.next_slice(info_nl.start); + let info = info.strip_suffix('\r').unwrap_or(info); // already excludes `\n` + let info = info.trim_matches(is_horizontal_whitespace); + if !info.is_empty() { + let info_start = info.offset_from(&raw); + let info_end = info_start + info.len(); + source.info = Some(info_start..info_end); + } + + // Ends with a line that starts with a matching number of `-` only followed by whitespace + let nl_fence_pattern = format!("\n{fence_pattern}"); + let Some(frontmatter_nl) = input.find_slice(nl_fence_pattern.as_str()) else { + for len in (2..(nl_fence_pattern.len() - 1)).rev() { + let Some(frontmatter_nl) = input.find_slice(&nl_fence_pattern[0..len]) else { + continue; + }; + let _ = input.next_slice(frontmatter_nl.start + 1); + let close_start = input.current_token_start(); + let _ = input.next_slice(len); + let close_end = input.current_token_start(); + let fewer_dashes = fence_length - len; + return Err(FrontmatterError::new( + format!( + "closing code fence has {fewer_dashes} less `-` than the opening fence" + ), + close_start..close_end, + ) + .push_visible_span(open_start..open_end)); + } + return Err(FrontmatterError::new( + format!("unclosed frontmatter; expected `{fence_pattern}`"), + raw.len()..raw.len(), + ) + .push_visible_span(open_start..open_end)); + }; + let frontmatter_start = input.current_token_start() + 1; // skip nl from infostring + let _ = input.next_slice(frontmatter_nl.start + 1); + let frontmatter_end = input.current_token_start(); + source.frontmatter = Some(frontmatter_start..frontmatter_end); + let close_start = input.current_token_start(); + let _ = input.next_slice(fence_length); + let close_end = input.current_token_start(); + source.close = Some(close_start..close_end); + + let nl = input.find_slice("\n"); + let after_closing_fence = + input.next_slice(nl.map(|span| span.end).unwrap_or_else(|| input.eof_offset())); + let content_start = input.current_token_start(); + let extra_dashes = after_closing_fence.chars().take_while(|b| *b == FENCE_CHAR).count(); + if 0 < extra_dashes { + let extra_start = close_end; + let extra_end = extra_start + extra_dashes; + return Err(FrontmatterError::new( + format!("closing code fence has {extra_dashes} more `-` than the opening fence"), + extra_start..extra_end, + ) + .push_visible_span(open_start..open_end)); + } else { + let after_closing_fence = strip_newline(after_closing_fence); + let after_closing_fence = after_closing_fence.trim_matches(is_horizontal_whitespace); + if !after_closing_fence.is_empty() { + // extra characters beyond the original fence pattern + let after_start = after_closing_fence.offset_from(&raw); + let after_end = after_start + after_closing_fence.len(); + return Err(FrontmatterError::new( + format!("unexpected characters after frontmatter close"), + after_start..after_end, + ) + .push_visible_span(open_start..open_end)); + } + } + + source.content = content_start..content_end; + + if let Some(nl_end) = strip_ws_lines(input.as_ref()) { + let _ = input.next_slice(nl_end); + } + let fence_length = input + .as_ref() + .char_indices() + .find_map(|(i, c)| (c != FENCE_CHAR).then_some(i)) + .unwrap_or_else(|| input.eof_offset()); + if 0 < fence_length { + let fence_start = input.current_token_start(); + let fence_end = fence_start + fence_length; + return Err(FrontmatterError::new( + format!("only one frontmatter is supported"), + fence_start..fence_end, + ) + .push_visible_span(open_start..open_end) + .push_visible_span(close_start..close_end)); + } + + Ok(source) + } + + pub fn shebang(&self) -> Option<&'s str> { + self.shebang.clone().map(|span| &self.raw[span]) + } + + pub fn shebang_span(&self) -> Option { + self.shebang.clone() + } + + pub fn open_span(&self) -> Option { + self.open.clone() + } + + pub fn info(&self) -> Option<&'s str> { + self.info.clone().map(|span| &self.raw[span]) + } + + pub fn info_span(&self) -> Option { + self.info.clone() + } + + pub fn frontmatter(&self) -> Option<&'s str> { + self.frontmatter.clone().map(|span| &self.raw[span]) + } + + pub fn frontmatter_span(&self) -> Option { + self.frontmatter.clone() + } + + pub fn close_span(&self) -> Option { + self.close.clone() + } + + pub fn content(&self) -> &'s str { + &self.raw[self.content.clone()] + } + + pub fn content_span(&self) -> Span { + self.content.clone() + } +} + +/// Returns the index after the shebang line, if present +pub fn strip_shebang(input: &str) -> Option { + // See rust-lang/rust's compiler/rustc_lexer/src/lib.rs's `strip_shebang` + // Shebang must start with `#!` literally, without any preceding whitespace. + // For simplicity we consider any line starting with `#!` a shebang, + // regardless of restrictions put on shebangs by specific platforms. + if let Some(rest) = input.strip_prefix("#!") { + // Ok, this is a shebang but if the next non-whitespace token is `[`, + // then it may be valid Rust code, so consider it Rust code. + // + // NOTE: rustc considers line and block comments to be whitespace but to avoid + // any more awareness of Rust grammar, we are excluding it. + if !rest.trim_start().starts_with('[') { + // No other choice than to consider this a shebang. + let newline_end = input.find('\n').map(|pos| pos + 1).unwrap_or(input.len()); + return Some(newline_end); + } + } + None +} + +/// Returns the index after any lines with only whitespace, if present +pub fn strip_ws_lines(input: &str) -> Option { + let ws_end = input.find(|c| !is_whitespace(c)).unwrap_or(input.len()); + if ws_end == 0 { + return None; + } + + let nl_start = input[0..ws_end].rfind('\n')?; + let nl_end = nl_start + 1; + Some(nl_end) +} + +/// True if `c` is considered a whitespace according to Rust language definition. +/// See [Rust language reference](https://doc.rust-lang.org/reference/whitespace.html) +/// for definitions of these classes. +fn is_whitespace(c: char) -> bool { + // This is Pattern_White_Space. + // + // Note that this set is stable (ie, it doesn't change with different + // Unicode versions), so it's ok to just hard-code the values. + + matches!( + c, + // End-of-line characters + | '\u{000A}' // line feed (\n) + | '\u{000B}' // vertical tab + | '\u{000C}' // form feed + | '\u{000D}' // carriage return (\r) + | '\u{0085}' // next line (from latin1) + | '\u{2028}' // LINE SEPARATOR + | '\u{2029}' // PARAGRAPH SEPARATOR + + // `Default_Ignorable_Code_Point` characters + | '\u{200E}' // LEFT-TO-RIGHT MARK + | '\u{200F}' // RIGHT-TO-LEFT MARK + + // Horizontal space characters + | '\u{0009}' // tab (\t) + | '\u{0020}' // space + ) +} + +/// True if `c` is considered horizontal whitespace according to Rust language definition. +fn is_horizontal_whitespace(c: char) -> bool { + // This is Pattern_White_Space. + // + // Note that this set is stable (ie, it doesn't change with different + // Unicode versions), so it's ok to just hard-code the values. + + matches!( + c, + // Horizontal space characters + '\u{0009}' // tab (\t) + | '\u{0020}' // space + ) +} + +fn strip_newline(text: &str) -> &str { + text.strip_suffix("\r\n").or_else(|| text.strip_suffix('\n')).unwrap_or(text) +} + +#[derive(Debug)] +pub struct FrontmatterError { + message: String, + primary_span: Span, + visible_spans: Vec, +} + +impl FrontmatterError { + pub fn new(message: impl Into, span: Span) -> Self { + Self { message: message.into(), primary_span: span, visible_spans: Vec::new() } + } + + pub fn push_visible_span(mut self, span: Span) -> Self { + self.visible_spans.push(span); + self + } + + pub fn message(&self) -> &str { + self.message.as_str() + } + + pub fn primary_span(&self) -> Span { + self.primary_span.clone() + } + + pub fn visible_spans(&self) -> &[Span] { + &self.visible_spans + } +} + +impl std::fmt::Display for FrontmatterError { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.message.fmt(fmt) + } +} + +impl std::error::Error for FrontmatterError {} diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index c2d26b5dacf2e..7c78ba8faf5f4 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -37,9 +37,17 @@ impl<'a> LexedStr<'a> { pub fn new(edition: Edition, text: &'a str) -> LexedStr<'a> { let _p = tracing::info_span!("LexedStr::new").entered(); let mut conv = Converter::new(edition, text); - if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { + if let Ok(script) = crate::frontmatter::ScriptSource::parse(text) { + if let Some(shebang) = script.shebang_span() { + conv.push(SHEBANG, shebang.end - shebang.start, Vec::new()); + } + if script.frontmatter().is_some() { + conv.push(FRONTMATTER, script.content_span().start - conv.offset, Vec::new()); + } + } else if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { + // Leave error reporting to `rustc_lexer` conv.push(SHEBANG, shebang_len, Vec::new()); - }; + } // Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer // but we want to split it to two in edition <2024. diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs index 7963f00bb25ce..53444ef52cff1 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lib.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs @@ -26,6 +26,7 @@ extern crate ra_ap_rustc_lexer as rustc_lexer; extern crate rustc_lexer; mod event; +mod frontmatter; mod grammar; mod input; mod lexed_str; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast index 94fbd3ebefe6b..2c7d3cdb1227c 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/frontmatter.rast @@ -1,22 +1,5 @@ +FRONTMATTER "\n---\n[dependencies]\nclap = \"4\"\n---\n" WHITESPACE "\n" -MINUS "-" -MINUS "-" -MINUS "-" -WHITESPACE "\n" -L_BRACK "[" -IDENT "dependencies" -R_BRACK "]" -WHITESPACE "\n" -IDENT "clap" -WHITESPACE " " -EQ "=" -WHITESPACE " " -STRING "\"4\"" -WHITESPACE "\n" -MINUS "-" -MINUS "-" -MINUS "-" -WHITESPACE "\n\n" FN_KW "fn" WHITESPACE " " IDENT "main" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast index 8b1344a1b830d..fb4787f4001fb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/shebang_frontmatter.rast @@ -1,23 +1,6 @@ -SHEBANG "#!/usr/bin/env cargo" -WHITESPACE "\n\n" -MINUS "-" -MINUS "-" -MINUS "-" +SHEBANG "#!/usr/bin/env cargo\n" +FRONTMATTER "\n---\n[dependencies]\nclap = \"4\"\n---\n" WHITESPACE "\n" -L_BRACK "[" -IDENT "dependencies" -R_BRACK "]" -WHITESPACE "\n" -IDENT "clap" -WHITESPACE " " -EQ "=" -WHITESPACE " " -STRING "\"4\"" -WHITESPACE "\n" -MINUS "-" -MINUS "-" -MINUS "-" -WHITESPACE "\n\n" FN_KW "fn" WHITESPACE " " IDENT "main" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast index a7681e9f5086a..c4e531b449f7b 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/single_line_comments.rast @@ -1,5 +1,4 @@ -SHEBANG "#!/usr/bin/env bash" -WHITESPACE "\n" +SHEBANG "#!/usr/bin/env bash\n" COMMENT "// hello" WHITESPACE "\n" COMMENT "//! World" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast index 3159a15a3b1c7..7ee1ecfbb1591 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0002_duplicate_shebang.rast @@ -1,6 +1,5 @@ SOURCE_FILE - SHEBANG "#!/use/bin/env rusti" - WHITESPACE "\n" + SHEBANG "#!/use/bin/env rusti\n" ATTR POUND "#" BANG "!" diff --git a/src/tools/rust-analyzer/xtask/src/tidy.rs b/src/tools/rust-analyzer/xtask/src/tidy.rs index 0462835f0675a..40997eb93d351 100644 --- a/src/tools/rust-analyzer/xtask/src/tidy.rs +++ b/src/tools/rust-analyzer/xtask/src/tidy.rs @@ -259,7 +259,7 @@ impl TidyDocs { } fn is_exclude_file(d: &Path) -> bool { - let file_names = ["tests.rs", "famous_defs_fixture.rs"]; + let file_names = ["tests.rs", "famous_defs_fixture.rs", "frontmatter.rs"]; d.file_name() .unwrap_or_default() From db6734e22f2b4d7370ca6d588bb4b097abd3cfec Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 16 Oct 2025 19:34:14 +0300 Subject: [PATCH 30/76] Improve fixture support Support more features beside highlighting, and support items from minicore. --- src/tools/rust-analyzer/Cargo.lock | 4 + .../crates/ide-completion/Cargo.toml | 1 + .../crates/ide-completion/src/completions.rs | 5 + .../src/completions/ra_fixture.rs | 113 ++++ .../crates/ide-completion/src/config.rs | 5 +- .../crates/ide-completion/src/context.rs | 3 + .../ide-completion/src/context/tests.rs | 2 +- .../crates/ide-completion/src/item.rs | 3 +- .../crates/ide-completion/src/lib.rs | 3 +- .../crates/ide-completion/src/tests.rs | 3 +- .../ide-completion/src/tests/flyimport.rs | 3 +- .../rust-analyzer/crates/ide-db/Cargo.toml | 9 +- .../rust-analyzer/crates/ide-db/src/lib.rs | 26 + .../crates/ide-db/src/ra_fixture.rs | 532 ++++++++++++++++++ .../crates/ide-db/src/range_mapper.rs | 65 +++ .../crates/ide-db/src/source_change.rs | 3 +- .../crates/ide-db/src/text_edit.rs | 5 +- .../crates/ide-diagnostics/src/tests.rs | 6 +- src/tools/rust-analyzer/crates/ide/Cargo.toml | 1 + .../crates/ide/src/annotations.rs | 30 +- .../crates/ide/src/call_hierarchy.rs | 39 +- .../crates/ide/src/goto_declaration.rs | 20 +- .../crates/ide/src/goto_definition.rs | 146 +++-- .../rust-analyzer/crates/ide/src/hover.rs | 61 +- .../crates/ide/src/hover/render.rs | 22 +- .../crates/ide/src/hover/tests.rs | 5 +- .../crates/ide/src/inlay_hints.rs | 48 +- .../crates/ide/src/inlay_hints/adjustment.rs | 2 +- .../crates/ide/src/inlay_hints/bind_pat.rs | 2 +- .../ide/src/inlay_hints/binding_mode.rs | 2 +- .../crates/ide/src/inlay_hints/bounds.rs | 2 +- .../crates/ide/src/inlay_hints/chaining.rs | 4 +- .../ide/src/inlay_hints/closing_brace.rs | 2 +- .../ide/src/inlay_hints/closure_captures.rs | 2 +- .../crates/ide/src/inlay_hints/closure_ret.rs | 2 +- .../ide/src/inlay_hints/discriminant.rs | 4 +- .../ide/src/inlay_hints/extern_block.rs | 8 +- .../ide/src/inlay_hints/generic_param.rs | 2 +- .../ide/src/inlay_hints/implicit_drop.rs | 4 +- .../ide/src/inlay_hints/implicit_static.rs | 2 +- .../ide/src/inlay_hints/implied_dyn_trait.rs | 2 +- .../crates/ide/src/inlay_hints/lifetime.rs | 8 +- .../crates/ide/src/inlay_hints/param_name.rs | 2 +- .../crates/ide/src/inlay_hints/ra_fixture.rs | 32 ++ .../ide/src/inlay_hints/range_exclusive.rs | 2 +- src/tools/rust-analyzer/crates/ide/src/lib.rs | 109 ++-- .../rust-analyzer/crates/ide/src/markup.rs | 4 + .../crates/ide/src/navigation_target.rs | 39 ++ .../crates/ide/src/references.rs | 48 +- .../rust-analyzer/crates/ide/src/runnables.rs | 6 +- .../crates/ide/src/static_index.rs | 4 +- .../crates/ide/src/syntax_highlighting.rs | 25 +- .../ide/src/syntax_highlighting/html.rs | 6 +- .../ide/src/syntax_highlighting/inject.rs | 164 +++--- .../ide/src/syntax_highlighting/injector.rs | 77 --- .../test_data/highlight_injection.html | 21 +- .../test_data/highlight_injection_2.html | 61 ++ .../ide/src/syntax_highlighting/tests.rs | 39 +- .../rust-analyzer/crates/macros/src/lib.rs | 39 ++ .../rust-analyzer/src/cli/analysis_stats.rs | 28 +- .../crates/rust-analyzer/src/config.rs | 53 +- .../crates/rust-analyzer/src/global_state.rs | 25 +- .../rust-analyzer/src/handlers/request.rs | 68 ++- .../src/integrated_benchmarks.rs | 5 +- .../crates/rust-analyzer/src/lsp/to_proto.rs | 27 +- .../crates/rust-analyzer/src/main_loop.rs | 7 + .../crates/syntax/src/ast/token_ext.rs | 4 + .../crates/test-fixture/src/lib.rs | 26 +- .../crates/test-utils/src/fixture.rs | 32 +- 69 files changed, 1652 insertions(+), 512 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/ide-completion/src/completions/ra_fixture.rs create mode 100644 src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs create mode 100644 src/tools/rust-analyzer/crates/ide-db/src/range_mapper.rs create mode 100644 src/tools/rust-analyzer/crates/ide/src/inlay_hints/ra_fixture.rs delete mode 100644 src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs create mode 100644 src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 55e5bdc138d9c..539f8cf1b9330 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -930,6 +930,7 @@ dependencies = [ "ide-diagnostics", "ide-ssr", "itertools", + "macros", "nohash-hasher", "oorandom", "profile", @@ -976,6 +977,7 @@ dependencies = [ "hir", "ide-db", "itertools", + "macros", "smallvec", "stdx", "syntax", @@ -1000,6 +1002,7 @@ dependencies = [ "indexmap", "itertools", "line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "macros", "memchr", "nohash-hasher", "parser", @@ -1009,6 +1012,7 @@ dependencies = [ "rustc-hash 2.1.1", "salsa", "salsa-macros", + "smallvec", "span", "stdx", "syntax", diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 9bad21fc8e90e..277d5dfa495c5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -28,6 +28,7 @@ syntax.workspace = true # completions crate should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true +macros.workspace = true [dev-dependencies] expect-test = "1.5.1" diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index b822f53d7b7b7..ed58e862d437f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -16,6 +16,7 @@ pub(crate) mod lifetime; pub(crate) mod mod_; pub(crate) mod pattern; pub(crate) mod postfix; +pub(crate) mod ra_fixture; pub(crate) mod record; pub(crate) mod snippet; pub(crate) mod r#type; @@ -74,6 +75,10 @@ impl Completions { self.buf.push(item) } + fn add_many(&mut self, items: impl IntoIterator) { + self.buf.extend(items) + } + fn add_opt(&mut self, item: Option) { if let Some(item) = item { self.buf.push(item) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/ra_fixture.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/ra_fixture.rs new file mode 100644 index 0000000000000..b44c90757f687 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/ra_fixture.rs @@ -0,0 +1,113 @@ +//! Injected completions for `#[rust_analyzer::rust_fixture]`. + +use hir::FilePositionWrapper; +use ide_db::{ + impl_empty_upmap_from_ra_fixture, + ra_fixture::{RaFixtureAnalysis, UpmapFromRaFixture}, +}; +use syntax::ast; + +use crate::{ + CompletionItemKind, CompletionItemRefMode, CompletionRelevance, completions::Completions, + context::CompletionContext, item::CompletionItemLabel, +}; + +pub(crate) fn complete_ra_fixture( + acc: &mut Completions, + ctx: &CompletionContext<'_>, + original: &ast::String, + expanded: &ast::String, +) -> Option<()> { + let analysis = RaFixtureAnalysis::analyze_ra_fixture( + &ctx.sema, + original.clone(), + expanded, + ctx.config.minicore, + &mut |_| {}, + )?; + let (virtual_file_id, virtual_offset) = analysis.map_offset_down(ctx.position.offset)?; + let completions = hir::attach_db_allow_change(&analysis.db, || { + crate::completions( + &analysis.db, + ctx.config, + FilePositionWrapper { file_id: virtual_file_id, offset: virtual_offset }, + ctx.trigger_character, + ) + })?; + let completions = + completions.upmap_from_ra_fixture(&analysis, virtual_file_id, ctx.position.file_id).ok()?; + acc.add_many(completions); + Some(()) +} + +impl_empty_upmap_from_ra_fixture!( + CompletionItemLabel, + CompletionItemKind, + CompletionRelevance, + CompletionItemRefMode, +); + +#[cfg(test)] +mod tests { + use expect_test::expect; + + use crate::tests::check; + + #[test] + fn it_works() { + check( + r##" +fn fixture(#[rust_analyzer::rust_fixture] ra_fixture: &str) {} + +fn foo() { + fixture(r#" +fn complete_me() {} + +fn baz() { + let foo_bar_baz = 123; + f$0 +} + "#); +} + "##, + expect![[r#" + fn baz() fn() + fn complete_me() fn() + lc foo_bar_baz i32 + bt u32 u32 + kw async + kw const + kw crate:: + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs index b7367cb62f099..5623257a2792a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs @@ -6,13 +6,13 @@ use hir::FindPathConfig; use ide_db::{ - SnippetCap, + MiniCore, SnippetCap, imports::{import_assets::ImportPathConfig, insert_use::InsertUseConfig}, }; use crate::{CompletionFieldsToResolve, snippet::Snippet}; -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug)] pub struct CompletionConfig<'a> { pub enable_postfix_completions: bool, pub enable_imports_on_the_fly: bool, @@ -35,6 +35,7 @@ pub struct CompletionConfig<'a> { pub fields_to_resolve: CompletionFieldsToResolve, pub exclude_flyimport: Vec<(String, AutoImportExclusionType)>, pub exclude_traits: &'a [String], + pub minicore: MiniCore<'a>, } #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 4032329ac658e..fc2cc3b796ec9 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -440,6 +440,7 @@ pub(crate) struct CompletionContext<'a> { pub(crate) config: &'a CompletionConfig<'a>, pub(crate) position: FilePosition, + pub(crate) trigger_character: Option, /// The token before the cursor, in the original file. pub(crate) original_token: SyntaxToken, /// The token before the cursor, in the macro-expanded file. @@ -703,6 +704,7 @@ impl<'db> CompletionContext<'db> { db: &'db RootDatabase, position @ FilePosition { file_id, offset }: FilePosition, config: &'db CompletionConfig<'db>, + trigger_character: Option, ) -> Option<(CompletionContext<'db>, CompletionAnalysis<'db>)> { let _p = tracing::info_span!("CompletionContext::new").entered(); let sema = Semantics::new(db); @@ -871,6 +873,7 @@ impl<'db> CompletionContext<'db> { db, config, position, + trigger_character, original_token, token, krate, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs index e798f3b23af4c..51d28bd4ff98c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/tests.rs @@ -10,7 +10,7 @@ fn check_expected_type_and_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, let (db, pos) = position(ra_fixture); let config = TEST_CONFIG; let (completion_context, _analysis) = - hir::attach_db(&db, || CompletionContext::new(&db, pos, &config).unwrap()); + hir::attach_db(&db, || CompletionContext::new(&db, pos, &config, None).unwrap()); let ty = completion_context .expected_type diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 5fb9dc93c93da..303c71230d606 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -9,6 +9,7 @@ use ide_db::{ imports::import_assets::LocatedImport, }; use itertools::Itertools; +use macros::UpmapFromRaFixture; use smallvec::SmallVec; use stdx::{format_to, impl_from, never}; use syntax::{Edition, SmolStr, TextRange, TextSize, format_smolstr}; @@ -23,7 +24,7 @@ use crate::{ /// /// It is basically a POD with various properties. To construct a [`CompletionItem`], /// use [`Builder::new`] method and the [`Builder`] struct. -#[derive(Clone)] +#[derive(Clone, UpmapFromRaFixture)] #[non_exhaustive] pub struct CompletionItem { /// Label in the completion pop up which identifies completion. diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index a70a1138d2f42..8a0aaf3f0cc26 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -187,7 +187,7 @@ pub fn completions( position: FilePosition, trigger_character: Option, ) -> Option> { - let (ctx, analysis) = &CompletionContext::new(db, position, config)?; + let (ctx, analysis) = &CompletionContext::new(db, position, config, trigger_character)?; let mut completions = Completions::default(); // prevent `(` from triggering unwanted completion noise @@ -241,6 +241,7 @@ pub fn completions( completions::extern_abi::complete_extern_abi(acc, ctx, expanded); completions::format_string::format_string(acc, ctx, original, expanded); completions::env_vars::complete_cargo_env_vars(acc, ctx, original, expanded); + completions::ra_fixture::complete_ra_fixture(acc, ctx, original, expanded); } CompletionAnalysis::UnexpandedAttrTT { colon_prefix, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index ec9cd9fdf3782..b32a895457268 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -29,7 +29,7 @@ use expect_test::Expect; use hir::db::HirDatabase; use hir::{PrefixKind, setup_tracing}; use ide_db::{ - FilePosition, RootDatabase, SnippetCap, + FilePosition, MiniCore, RootDatabase, SnippetCap, imports::insert_use::{ImportGranularity, InsertUseConfig}, }; use itertools::Itertools; @@ -90,6 +90,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig<'_> = CompletionConfig { exclude_traits: &[], enable_auto_await: true, enable_auto_iter: true, + minicore: MiniCore::default(), }; pub(crate) fn completion_list(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 2d3ebad9340c7..0cd42089b4875 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -16,7 +16,8 @@ fn check_with_config( expect: Expect, ) { let (db, position) = crate::tests::position(ra_fixture); - let (ctx, analysis) = crate::context::CompletionContext::new(&db, position, &config).unwrap(); + let (ctx, analysis) = + crate::context::CompletionContext::new(&db, position, &config, None).unwrap(); let mut acc = crate::completions::Completions::default(); hir::attach_db(ctx.db, || { diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index e065adb0f0baa..b7148160182c5 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -30,6 +30,7 @@ query-group.workspace = true triomphe.workspace = true nohash-hasher.workspace = true bitflags.workspace = true +smallvec.workspace = true # local deps base-db.workspace = true @@ -42,15 +43,15 @@ vfs.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true +macros.workspace = true + +test-utils.workspace = true +test-fixture.workspace = true line-index.workspace = true [dev-dependencies] expect-test = "1.5.1" -# local deps -test-utils.workspace = true -test-fixture.workspace = true - [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 44bccd86d8709..7efa97be55732 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -2,6 +2,8 @@ //! //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. +extern crate self as ide_db; + mod apply_change; pub mod active_parameter; @@ -14,6 +16,8 @@ pub mod items_locator; pub mod label; pub mod path_transform; pub mod prime_caches; +pub mod ra_fixture; +pub mod range_mapper; pub mod rename; pub mod rust_doc; pub mod search; @@ -364,3 +368,25 @@ pub enum Severity { WeakWarning, Allow, } + +#[derive(Debug, Clone, Copy)] +pub struct MiniCore<'a>(&'a str); + +impl<'a> MiniCore<'a> { + #[inline] + pub fn new(minicore: &'a str) -> Self { + Self(minicore) + } + + #[inline] + pub const fn default() -> Self { + Self(test_utils::MiniCore::RAW_SOURCE) + } +} + +impl<'a> Default for MiniCore<'a> { + #[inline] + fn default() -> Self { + Self::default() + } +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs new file mode 100644 index 0000000000000..1f056a835bc62 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs @@ -0,0 +1,532 @@ +//! Working with the fixtures in r-a tests, and providing IDE services for them. + +use std::hash::{BuildHasher, Hash}; + +use hir::{CfgExpr, FilePositionWrapper, FileRangeWrapper, Semantics}; +use smallvec::SmallVec; +use span::{TextRange, TextSize}; +use syntax::{ + AstToken, SmolStr, + ast::{self, IsString}, +}; + +use crate::{ + MiniCore, RootDatabase, SymbolKind, active_parameter::ActiveParameter, + documentation::Documentation, range_mapper::RangeMapper, search::ReferenceCategory, +}; + +pub use span::FileId; + +impl RootDatabase { + fn from_ra_fixture( + text: &str, + minicore: MiniCore<'_>, + ) -> Result<(RootDatabase, Vec<(FileId, usize)>, Vec), ()> { + // We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`. + std::panic::catch_unwind(|| { + let mut db = RootDatabase::default(); + let fixture = test_fixture::ChangeFixture::parse_with_proc_macros( + &db, + text, + minicore.0, + Vec::new(), + ); + db.apply_change(fixture.change); + let files = fixture + .files + .into_iter() + .zip(fixture.file_lines) + .map(|(file_id, range)| (file_id.file_id(&db), range)) + .collect(); + (db, files, fixture.sysroot_files) + }) + .map_err(|error| { + tracing::error!( + "cannot crate the crate graph: {}\nCrate graph:\n{}\n", + if let Some(&s) = error.downcast_ref::<&'static str>() { + s + } else if let Some(s) = error.downcast_ref::() { + s.as_str() + } else { + "Box" + }, + text, + ); + }) + } +} + +pub struct RaFixtureAnalysis { + pub db: RootDatabase, + tmp_file_ids: Vec<(FileId, usize)>, + line_offsets: Vec, + virtual_file_id_to_line: Vec, + mapper: RangeMapper, + literal: ast::String, + // `minicore` etc.. + sysroot_files: Vec, + combined_len: TextSize, +} + +impl RaFixtureAnalysis { + pub fn analyze_ra_fixture( + sema: &Semantics<'_, RootDatabase>, + literal: ast::String, + expanded: &ast::String, + minicore: MiniCore<'_>, + on_cursor: &mut dyn FnMut(TextRange), + ) -> Option { + if !literal.is_raw() { + return None; + } + + let active_parameter = ActiveParameter::at_token(sema, expanded.syntax().clone())?; + let has_rust_fixture_attr = active_parameter.attrs().is_some_and(|attrs| { + attrs.filter_map(|attr| attr.as_simple_path()).any(|path| { + path.segments() + .zip(["rust_analyzer", "rust_fixture"]) + .all(|(seg, name)| seg.name_ref().map_or(false, |nr| nr.text() == name)) + }) + }); + if !has_rust_fixture_attr { + return None; + } + let value = literal.value().ok()?; + + let mut mapper = RangeMapper::default(); + + // This is used for the `Injector`, to resolve precise location in the string literal, + // which will then be used to resolve precise location in the enclosing file. + let mut offset_with_indent = TextSize::new(0); + // This is used to resolve the location relative to the virtual file into a location + // relative to the indentation-trimmed file which will then (by the `Injector`) used + // to resolve to a location in the actual file. + // Besides indentation, we also skip `$0` cursors for this, since they are not included + // in the virtual files. + let mut offset_without_indent = TextSize::new(0); + + let mut text = &*value; + if let Some(t) = text.strip_prefix('\n') { + offset_with_indent += TextSize::of("\n"); + text = t; + } + // This stores the offsets of each line, **after we remove indentation**. + let mut line_offsets = Vec::new(); + for mut line in text.split_inclusive('\n') { + line_offsets.push(offset_without_indent); + + if line.starts_with("@@") { + // Introducing `//` into a fixture inside fixture causes all sorts of problems, + // so for testing purposes we escape it as `@@` and replace it here. + mapper.add("//", TextRange::at(offset_with_indent, TextSize::of("@@"))); + line = &line["@@".len()..]; + offset_with_indent += TextSize::of("@@"); + offset_without_indent += TextSize::of("@@"); + } + + // Remove indentation to simplify the mapping with fixture (which de-indents). + // Removing indentation shouldn't affect highlighting. + let mut unindented_line = line.trim_start(); + if unindented_line.is_empty() { + // The whole line was whitespaces, but we need the newline. + unindented_line = "\n"; + } + offset_with_indent += TextSize::of(line) - TextSize::of(unindented_line); + + let marker = "$0"; + match unindented_line.find(marker) { + Some(marker_pos) => { + let (before_marker, after_marker) = unindented_line.split_at(marker_pos); + let after_marker = &after_marker[marker.len()..]; + + mapper.add( + before_marker, + TextRange::at(offset_with_indent, TextSize::of(before_marker)), + ); + offset_with_indent += TextSize::of(before_marker); + offset_without_indent += TextSize::of(before_marker); + + if let Some(marker_range) = literal + .map_range_up(TextRange::at(offset_with_indent, TextSize::of(marker))) + { + on_cursor(marker_range); + } + offset_with_indent += TextSize::of(marker); + + mapper.add( + after_marker, + TextRange::at(offset_with_indent, TextSize::of(after_marker)), + ); + offset_with_indent += TextSize::of(after_marker); + offset_without_indent += TextSize::of(after_marker); + } + None => { + mapper.add( + unindented_line, + TextRange::at(offset_with_indent, TextSize::of(unindented_line)), + ); + offset_with_indent += TextSize::of(unindented_line); + offset_without_indent += TextSize::of(unindented_line); + } + } + } + + let combined = mapper.take_text(); + let combined_len = TextSize::of(&combined); + let (analysis, tmp_file_ids, sysroot_files) = + RootDatabase::from_ra_fixture(&combined, minicore).ok()?; + + // We use a `Vec` because we know the `FileId`s will always be close. + let mut virtual_file_id_to_line = Vec::new(); + for &(file_id, line) in &tmp_file_ids { + virtual_file_id_to_line.resize(file_id.index() as usize + 1, usize::MAX); + virtual_file_id_to_line[file_id.index() as usize] = line; + } + + Some(RaFixtureAnalysis { + db: analysis, + tmp_file_ids, + line_offsets, + virtual_file_id_to_line, + mapper, + literal, + sysroot_files, + combined_len, + }) + } + + pub fn files(&self) -> impl Iterator { + self.tmp_file_ids.iter().map(|(file, _)| *file) + } + + /// This returns `None` for minicore or other sysroot files. + fn virtual_file_id_to_line(&self, file_id: FileId) -> Option { + if self.is_sysroot_file(file_id) { + None + } else { + Some(self.virtual_file_id_to_line[file_id.index() as usize]) + } + } + + pub fn map_offset_down(&self, offset: TextSize) -> Option<(FileId, TextSize)> { + let inside_literal_range = self.literal.map_offset_down(offset)?; + let combined_offset = self.mapper.map_offset_down(inside_literal_range)?; + // There is usually a small number of files, so a linear search is smaller and faster. + let (_, &(file_id, file_line)) = + self.tmp_file_ids.iter().enumerate().find(|&(idx, &(_, file_line))| { + let file_start = self.line_offsets[file_line]; + let file_end = self + .tmp_file_ids + .get(idx + 1) + .map(|&(_, next_file_line)| self.line_offsets[next_file_line]) + .unwrap_or_else(|| self.combined_len); + TextRange::new(file_start, file_end).contains(combined_offset) + })?; + let file_line_offset = self.line_offsets[file_line]; + let file_offset = combined_offset - file_line_offset; + Some((file_id, file_offset)) + } + + pub fn map_range_down(&self, range: TextRange) -> Option<(FileId, TextRange)> { + let (start_file_id, start_offset) = self.map_offset_down(range.start())?; + let (end_file_id, end_offset) = self.map_offset_down(range.end())?; + if start_file_id != end_file_id { + None + } else { + Some((start_file_id, TextRange::new(start_offset, end_offset))) + } + } + + pub fn map_range_up( + &self, + virtual_file: FileId, + range: TextRange, + ) -> impl Iterator { + // This could be `None` if the file is empty. + self.virtual_file_id_to_line(virtual_file) + .and_then(|line| self.line_offsets.get(line)) + .into_iter() + .flat_map(move |&tmp_file_offset| { + // Resolve the offset relative to the virtual file to an offset relative to the combined indentation-trimmed file + let range = range + tmp_file_offset; + // Then resolve that to an offset relative to the real file. + self.mapper.map_range_up(range) + }) + // And finally resolve the offset relative to the literal to relative to the file. + .filter_map(|range| self.literal.map_range_up(range)) + } + + pub fn map_offset_up(&self, virtual_file: FileId, offset: TextSize) -> Option { + self.map_range_up(virtual_file, TextRange::empty(offset)).next().map(|range| range.start()) + } + + pub fn is_sysroot_file(&self, file_id: FileId) -> bool { + self.sysroot_files.contains(&file_id) + } +} + +pub trait UpmapFromRaFixture: Sized { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result; +} + +trait IsEmpty { + fn is_empty(&self) -> bool; +} + +impl IsEmpty for Vec { + fn is_empty(&self) -> bool { + self.is_empty() + } +} + +impl IsEmpty for SmallVec<[T; N]> { + fn is_empty(&self) -> bool { + self.is_empty() + } +} + +#[allow(clippy::disallowed_types)] +impl IsEmpty for std::collections::HashMap { + fn is_empty(&self) -> bool { + self.is_empty() + } +} + +fn upmap_collection( + collection: Collection, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + real_file_id: FileId, +) -> Result +where + T: UpmapFromRaFixture, + Collection: IntoIterator + FromIterator + IsEmpty, +{ + if collection.is_empty() { + // The collection was already empty, don't mark it as failing just because of that. + return Ok(collection); + } + let result = collection + .into_iter() + .filter_map(|item| item.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id).ok()) + .collect::(); + if result.is_empty() { + // The collection was emptied by the upmapping - all items errored, therefore mark it as erroring as well. + Err(()) + } else { + Ok(result) + } +} + +impl UpmapFromRaFixture for Option { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + Ok(match self { + Some(it) => Some(it.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id)?), + None => None, + }) + } +} + +impl UpmapFromRaFixture for Vec { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + upmap_collection(self, analysis, virtual_file_id, real_file_id) + } +} + +impl UpmapFromRaFixture for SmallVec<[T; N]> { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + upmap_collection(self, analysis, virtual_file_id, real_file_id) + } +} + +#[allow(clippy::disallowed_types)] +impl + UpmapFromRaFixture for std::collections::HashMap +{ + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + upmap_collection(self, analysis, virtual_file_id, real_file_id) + } +} + +// A map of `FileId`s is treated as associating the ranges in the values with the keys. +#[allow(clippy::disallowed_types)] +impl UpmapFromRaFixture + for std::collections::HashMap +{ + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + _virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + if self.is_empty() { + return Ok(self); + } + let result = self + .into_iter() + .filter_map(|(virtual_file_id, value)| { + Some(( + real_file_id, + value.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id).ok()?, + )) + }) + .collect::>(); + if result.is_empty() { Err(()) } else { Ok(result) } + } +} + +macro_rules! impl_tuple { + () => {}; // Base case. + ( $first:ident, $( $rest:ident, )* ) => { + impl< + $first: UpmapFromRaFixture, + $( $rest: UpmapFromRaFixture, )* + > UpmapFromRaFixture for ( $first, $( $rest, )* ) { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + #[allow(non_snake_case)] + let ( $first, $($rest,)* ) = self; + Ok(( + $first.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id)?, + $( $rest.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id)?, )* + )) + } + } + + impl_tuple!( $($rest,)* ); + }; +} +impl_tuple!(A, B, C, D, E,); + +impl UpmapFromRaFixture for TextSize { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + _real_file_id: FileId, + ) -> Result { + analysis.map_offset_up(virtual_file_id, self).ok_or(()) + } +} + +impl UpmapFromRaFixture for TextRange { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + virtual_file_id: FileId, + _real_file_id: FileId, + ) -> Result { + analysis.map_range_up(virtual_file_id, self).next().ok_or(()) + } +} + +// Deliberately do not implement that, as it's easy to get things misbehave and be treated with the wrong FileId: +// +// impl UpmapFromRaFixture for FileId { +// fn upmap_from_ra_fixture( +// self, +// _analysis: &RaFixtureAnalysis, +// _virtual_file_id: FileId, +// real_file_id: FileId, +// ) -> Result { +// Ok(real_file_id) +// } +// } + +impl UpmapFromRaFixture for FilePositionWrapper { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + _virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + Ok(FilePositionWrapper { + file_id: real_file_id, + offset: self.offset.upmap_from_ra_fixture(analysis, self.file_id, real_file_id)?, + }) + } +} + +impl UpmapFromRaFixture for FileRangeWrapper { + fn upmap_from_ra_fixture( + self, + analysis: &RaFixtureAnalysis, + _virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + Ok(FileRangeWrapper { + file_id: real_file_id, + range: self.range.upmap_from_ra_fixture(analysis, self.file_id, real_file_id)?, + }) + } +} + +#[macro_export] +macro_rules! impl_empty_upmap_from_ra_fixture { + ( $( $ty:ty ),* $(,)? ) => { + $( + impl $crate::ra_fixture::UpmapFromRaFixture for $ty { + fn upmap_from_ra_fixture( + self, + _analysis: &$crate::ra_fixture::RaFixtureAnalysis, + _virtual_file_id: $crate::ra_fixture::FileId, + _real_file_id: $crate::ra_fixture::FileId, + ) -> Result { + Ok(self) + } + } + )* + }; +} + +impl_empty_upmap_from_ra_fixture!( + bool, + i8, + i16, + i32, + i64, + i128, + u8, + u16, + u32, + u64, + u128, + f32, + f64, + &str, + String, + SmolStr, + Documentation, + SymbolKind, + CfgExpr, + ReferenceCategory, +); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/range_mapper.rs b/src/tools/rust-analyzer/crates/ide-db/src/range_mapper.rs new file mode 100644 index 0000000000000..ef84888b83b47 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-db/src/range_mapper.rs @@ -0,0 +1,65 @@ +//! Maps between ranges in documents. + +use std::cmp::Ordering; + +use stdx::equal_range_by; +use syntax::{TextRange, TextSize}; + +#[derive(Default)] +pub struct RangeMapper { + buf: String, + ranges: Vec<(TextRange, Option)>, +} + +impl RangeMapper { + pub fn add(&mut self, text: &str, source_range: TextRange) { + let len = TextSize::of(text); + assert_eq!(len, source_range.len()); + self.add_impl(text, Some(source_range.start())); + } + + pub fn add_unmapped(&mut self, text: &str) { + self.add_impl(text, None); + } + + fn add_impl(&mut self, text: &str, source: Option) { + let len = TextSize::of(text); + let target_range = TextRange::at(TextSize::of(&self.buf), len); + self.ranges.push((target_range, source.map(|it| TextRange::at(it, len)))); + self.buf.push_str(text); + } + + pub fn take_text(&mut self) -> String { + std::mem::take(&mut self.buf) + } + + pub fn map_range_up(&self, range: TextRange) -> impl Iterator + '_ { + equal_range_by(&self.ranges, |&(r, _)| { + if range.is_empty() && r.contains(range.start()) { + Ordering::Equal + } else { + TextRange::ordering(r, range) + } + }) + .filter_map(move |i| { + let (target_range, source_range) = self.ranges[i]; + let intersection = target_range.intersect(range).unwrap(); + let source_range = source_range?; + Some(intersection - target_range.start() + source_range.start()) + }) + } + + pub fn map_offset_down(&self, offset: TextSize) -> Option { + // Using a binary search here is a bit complicated because of the `None` entries. + // But the number of lines in fixtures is usually low. + let (target_range, source_range) = + self.ranges.iter().find_map(|&(target_range, source_range)| { + let source_range = source_range?; + if !source_range.contains(offset) { + return None; + } + Some((target_range, source_range)) + })?; + Some(offset - source_range.start() + target_range.start()) + } +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs index 16c0d8d97a7db..57072bb5ba36c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs @@ -10,6 +10,7 @@ use crate::text_edit::{TextEdit, TextEditBuilder}; use crate::{SnippetCap, assists::Command, syntax_helpers::tree_diff::diff}; use base_db::AnchoredPathBuf; use itertools::Itertools; +use macros::UpmapFromRaFixture; use nohash_hasher::IntMap; use rustc_hash::FxHashMap; use span::FileId; @@ -20,7 +21,7 @@ use syntax::{ }; /// An annotation ID associated with an indel, to describe changes. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, UpmapFromRaFixture)] pub struct ChangeAnnotationId(u32); impl fmt::Display for ChangeAnnotationId { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs index 6e9bd7bdcc21a..d2a73710d58bb 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs @@ -5,6 +5,7 @@ //! rust-analyzer. use itertools::Itertools; +use macros::UpmapFromRaFixture; pub use span::{TextRange, TextSize}; use std::cmp::max; @@ -13,14 +14,14 @@ use crate::source_change::ChangeAnnotationId; /// `InsertDelete` -- a single "atomic" change to text /// /// Must not overlap with other `InDel`s -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, UpmapFromRaFixture)] pub struct Indel { pub insert: String, /// Refers to offsets in the original text pub delete: TextRange, } -#[derive(Default, Debug, Clone)] +#[derive(Default, Debug, Clone, UpmapFromRaFixture)] pub struct TextEdit { /// Invariant: disjoint and sorted by `delete`. indels: Vec, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index 37af05e0d1bbf..3dc155efe96b9 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -311,7 +311,7 @@ fn minicore_smoke_test() { } fn check(minicore: MiniCore) { - let source = minicore.source_code(); + let source = minicore.source_code(MiniCore::RAW_SOURCE); let mut config = DiagnosticsConfig::test_sample(); // This should be ignored since we conditionally remove code which creates single item use with braces config.disabled.insert("unused_braces".to_owned()); @@ -321,7 +321,7 @@ fn minicore_smoke_test() { } // Checks that there is no diagnostic in minicore for each flag. - for flag in MiniCore::available_flags() { + for flag in MiniCore::available_flags(MiniCore::RAW_SOURCE) { if flag == "clone" { // Clone without copy has `moved-out-of-ref`, so ignoring. // FIXME: Maybe we should merge copy and clone in a single flag? @@ -332,5 +332,5 @@ fn minicore_smoke_test() { } // And one time for all flags, to check codes which are behind multiple flags + prevent name collisions eprintln!("Checking all minicore flags"); - check(MiniCore::from_flags(MiniCore::available_flags())) + check(MiniCore::from_flags(MiniCore::available_flags(MiniCore::RAW_SOURCE))) } diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml index 06d2776ebe87a..08ffd391c02de 100644 --- a/src/tools/rust-analyzer/crates/ide/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml @@ -42,6 +42,7 @@ span.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true +macros.workspace = true [target.'cfg(not(any(target_arch = "wasm32", target_os = "emscripten")))'.dependencies] toolchain.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs index dec1889926dad..36c44044bb5da 100644 --- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs +++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs @@ -1,6 +1,6 @@ use hir::{HasSource, InFile, InRealFile, Semantics}; use ide_db::{ - FileId, FilePosition, FileRange, FxIndexSet, RootDatabase, defs::Definition, + FileId, FilePosition, FileRange, FxIndexSet, MiniCore, RootDatabase, defs::Definition, helpers::visit_file_defs, }; use itertools::Itertools; @@ -11,7 +11,7 @@ use crate::{ annotations::fn_references::find_all_methods, goto_implementation::goto_implementation, navigation_target, - references::find_all_refs, + references::{FindAllRefsConfig, find_all_refs}, runnables::{Runnable, runnables}, }; @@ -36,7 +36,7 @@ pub enum AnnotationKind { HasReferences { pos: FilePosition, data: Option> }, } -pub struct AnnotationConfig { +pub struct AnnotationConfig<'a> { pub binary_target: bool, pub annotate_runnables: bool, pub annotate_impls: bool, @@ -44,6 +44,7 @@ pub struct AnnotationConfig { pub annotate_method_references: bool, pub annotate_enum_variant_references: bool, pub location: AnnotationLocation, + pub minicore: MiniCore<'a>, } pub enum AnnotationLocation { @@ -53,7 +54,7 @@ pub enum AnnotationLocation { pub(crate) fn annotations( db: &RootDatabase, - config: &AnnotationConfig, + config: &AnnotationConfig<'_>, file_id: FileId, ) -> Vec { let mut annotations = FxIndexSet::default(); @@ -196,13 +197,22 @@ pub(crate) fn annotations( .collect() } -pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation { +pub(crate) fn resolve_annotation( + db: &RootDatabase, + config: &AnnotationConfig<'_>, + mut annotation: Annotation, +) -> Annotation { match annotation.kind { AnnotationKind::HasImpls { pos, ref mut data } => { *data = goto_implementation(db, pos).map(|range| range.info); } AnnotationKind::HasReferences { pos, ref mut data } => { - *data = find_all_refs(&Semantics::new(db), pos, None).map(|result| { + *data = find_all_refs( + &Semantics::new(db), + pos, + &FindAllRefsConfig { search_scope: None, minicore: config.minicore }, + ) + .map(|result| { result .into_iter() .flat_map(|res| res.references) @@ -228,12 +238,13 @@ fn should_skip_runnable(kind: &RunnableKind, binary_target: bool) -> bool { #[cfg(test)] mod tests { use expect_test::{Expect, expect}; + use ide_db::MiniCore; use crate::{Annotation, AnnotationConfig, fixture}; use super::AnnotationLocation; - const DEFAULT_CONFIG: AnnotationConfig = AnnotationConfig { + const DEFAULT_CONFIG: AnnotationConfig<'_> = AnnotationConfig { binary_target: true, annotate_runnables: true, annotate_impls: true, @@ -241,12 +252,13 @@ mod tests { annotate_method_references: true, annotate_enum_variant_references: true, location: AnnotationLocation::AboveName, + minicore: MiniCore::default(), }; fn check_with_config( #[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect, - config: &AnnotationConfig, + config: &AnnotationConfig<'_>, ) { let (analysis, file_id) = fixture::file(ra_fixture); @@ -254,7 +266,7 @@ mod tests { .annotations(config, file_id) .unwrap() .into_iter() - .map(|annotation| analysis.resolve_annotation(annotation).unwrap()) + .map(|annotation| analysis.resolve_annotation(&DEFAULT_CONFIG, annotation).unwrap()) .collect(); expect.assert_debug_eq(&annotations); diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs index f42cead3501d1..aded911a8db11 100644 --- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs +++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs @@ -4,14 +4,16 @@ use std::iter; use hir::Semantics; use ide_db::{ - FileRange, FxIndexMap, RootDatabase, + FileRange, FxIndexMap, MiniCore, RootDatabase, defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, search::FileReference, }; use syntax::{AstNode, SyntaxKind::IDENT, ast}; -use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav, goto_definition}; +use crate::{ + FilePosition, GotoDefinitionConfig, NavigationTarget, RangeInfo, TryToNav, goto_definition, +}; #[derive(Debug, Clone)] pub struct CallItem { @@ -19,22 +21,28 @@ pub struct CallItem { pub ranges: Vec, } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct CallHierarchyConfig { +#[derive(Debug, Clone, Copy)] +pub struct CallHierarchyConfig<'a> { /// Whether to exclude tests from the call hierarchy pub exclude_tests: bool, + pub minicore: MiniCore<'a>, } pub(crate) fn call_hierarchy( db: &RootDatabase, position: FilePosition, + config: &CallHierarchyConfig<'_>, ) -> Option>> { - goto_definition::goto_definition(db, position) + goto_definition::goto_definition( + db, + position, + &GotoDefinitionConfig { minicore: config.minicore }, + ) } pub(crate) fn incoming_calls( db: &RootDatabase, - CallHierarchyConfig { exclude_tests }: CallHierarchyConfig, + config: &CallHierarchyConfig<'_>, FilePosition { file_id, offset }: FilePosition, ) -> Option> { let sema = &Semantics::new(db); @@ -71,7 +79,7 @@ pub(crate) fn incoming_calls( }); if let Some((def, nav)) = def_nav { - if exclude_tests && def.is_test(db) { + if config.exclude_tests && def.is_test(db) { continue; } @@ -89,7 +97,7 @@ pub(crate) fn incoming_calls( pub(crate) fn outgoing_calls( db: &RootDatabase, - CallHierarchyConfig { exclude_tests }: CallHierarchyConfig, + config: &CallHierarchyConfig<'_>, FilePosition { file_id, offset }: FilePosition, ) -> Option> { let sema = Semantics::new(db); @@ -119,7 +127,7 @@ pub(crate) fn outgoing_calls( let callable = sema.type_of_expr(&expr)?.original.as_callable(db)?; match callable.kind() { hir::CallableKind::Function(it) => { - if exclude_tests && it.is_test(db) { + if config.exclude_tests && it.is_test(db) { return None; } it.try_to_nav(&sema) @@ -132,7 +140,7 @@ pub(crate) fn outgoing_calls( } ast::CallableExpr::MethodCall(expr) => { let function = sema.resolve_method_call(&expr)?; - if exclude_tests && function.is_test(db) { + if config.exclude_tests && function.is_test(db) { return None; } function @@ -166,7 +174,7 @@ impl CallLocations { #[cfg(test)] mod tests { use expect_test::{Expect, expect}; - use ide_db::FilePosition; + use ide_db::{FilePosition, MiniCore}; use itertools::Itertools; use crate::fixture; @@ -189,21 +197,20 @@ mod tests { ) } + let config = crate::CallHierarchyConfig { exclude_tests, minicore: MiniCore::default() }; let (analysis, pos) = fixture::position(ra_fixture); - let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; + let mut navs = analysis.call_hierarchy(pos, &config).unwrap().unwrap().info; assert_eq!(navs.len(), 1); let nav = navs.pop().unwrap(); expected_nav.assert_eq(&nav.debug_render()); - let config = crate::CallHierarchyConfig { exclude_tests }; - let item_pos = FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; - let incoming_calls = analysis.incoming_calls(config, item_pos).unwrap().unwrap(); + let incoming_calls = analysis.incoming_calls(&config, item_pos).unwrap().unwrap(); expected_incoming.assert_eq(&incoming_calls.into_iter().map(debug_render).join("\n")); - let outgoing_calls = analysis.outgoing_calls(config, item_pos).unwrap().unwrap(); + let outgoing_calls = analysis.outgoing_calls(&config, item_pos).unwrap().unwrap(); expected_outgoing.assert_eq(&outgoing_calls.into_iter().map(debug_render).join("\n")); } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs index 686dbe2412933..375ce94bf644f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs @@ -6,8 +6,8 @@ use ide_db::{ use syntax::{AstNode, SyntaxKind::*, T, ast, match_ast}; use crate::{ - FilePosition, NavigationTarget, RangeInfo, goto_definition::goto_definition, - navigation_target::TryToNav, + FilePosition, GotoDefinitionConfig, NavigationTarget, RangeInfo, + goto_definition::goto_definition, navigation_target::TryToNav, }; // Feature: Go to Declaration @@ -21,6 +21,7 @@ use crate::{ pub(crate) fn goto_declaration( db: &RootDatabase, position @ FilePosition { file_id, offset }: FilePosition, + config: &GotoDefinitionConfig<'_>, ) -> Option>> { let sema = Semantics::new(db); let file = sema.parse_guess_edition(file_id).syntax().clone(); @@ -69,20 +70,27 @@ pub(crate) fn goto_declaration( .flatten() .collect(); - if info.is_empty() { goto_definition(db, position) } else { Some(RangeInfo::new(range, info)) } + if info.is_empty() { + goto_definition(db, position, config) + } else { + Some(RangeInfo::new(range, info)) + } } #[cfg(test)] mod tests { - use ide_db::FileRange; + use ide_db::{FileRange, MiniCore}; use itertools::Itertools; - use crate::fixture; + use crate::{GotoDefinitionConfig, fixture}; + + const TEST_CONFIG: GotoDefinitionConfig<'_> = + GotoDefinitionConfig { minicore: MiniCore::default() }; fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); let navs = analysis - .goto_declaration(position) + .goto_declaration(position, &TEST_CONFIG) .unwrap() .expect("no declaration or definition found") .info; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 2dcb13d9e7aa1..e335989ab2b07 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -1,5 +1,6 @@ use std::{iter, mem::discriminant}; +use crate::Analysis; use crate::{ FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult, doc_links::token_as_doc_comment, @@ -8,6 +9,7 @@ use crate::{ use hir::{ AsAssocItem, AssocItem, CallableKind, FileRange, HasCrate, InFile, ModuleDef, Semantics, sym, }; +use ide_db::{MiniCore, ra_fixture::UpmapFromRaFixture}; use ide_db::{ RootDatabase, SymbolKind, base_db::{AnchoredPath, SourceDatabase}, @@ -25,6 +27,11 @@ use syntax::{ match_ast, }; +#[derive(Debug)] +pub struct GotoDefinitionConfig<'a> { + pub minicore: MiniCore<'a>, +} + // Feature: Go to Definition // // Navigates to the definition of an identifier. @@ -39,6 +46,7 @@ use syntax::{ pub(crate) fn goto_definition( db: &RootDatabase, FilePosition { file_id, offset }: FilePosition, + config: &GotoDefinitionConfig<'_>, ) -> Option>> { let sema = &Semantics::new(db); let file = sema.parse_guess_edition(file_id).syntax().clone(); @@ -83,52 +91,64 @@ pub(crate) fn goto_definition( return Some(RangeInfo::new(original_token.text_range(), navs)); } - let navs = sema - .descend_into_macros_no_opaque(original_token.clone(), false) - .into_iter() - .filter_map(|token| { - if let Some(navs) = find_definition_for_known_blanket_dual_impls(sema, &token.value) { - return Some(navs); - } + let tokens = sema.descend_into_macros_no_opaque(original_token.clone(), false); + let mut navs = Vec::new(); + for token in tokens { + if let Some(n) = find_definition_for_known_blanket_dual_impls(sema, &token.value) { + navs.extend(n); + continue; + } - let parent = token.value.parent()?; + if let Some(token) = ast::String::cast(token.value.clone()) + && let Some(original_token) = ast::String::cast(original_token.clone()) + && let Some((analysis, fixture_analysis)) = + Analysis::from_ra_fixture(sema, original_token, &token, config.minicore) + && let Some((virtual_file_id, file_offset)) = fixture_analysis.map_offset_down(offset) + { + return hir::attach_db_allow_change(&analysis.db, || { + goto_definition( + &analysis.db, + FilePosition { file_id: virtual_file_id, offset: file_offset }, + config, + ) + }) + .and_then(|navs| { + navs.upmap_from_ra_fixture(&fixture_analysis, virtual_file_id, file_id).ok() + }); + } - let token_file_id = token.file_id; - if let Some(token) = ast::String::cast(token.value.clone()) - && let Some(x) = - try_lookup_include_path(sema, InFile::new(token_file_id, token), file_id) - { - return Some(vec![x]); - } + let parent = token.value.parent()?; - if ast::TokenTree::can_cast(parent.kind()) - && let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) - { - return Some(vec![x]); - } + let token_file_id = token.file_id; + if let Some(token) = ast::String::cast(token.value.clone()) + && let Some(x) = + try_lookup_include_path(sema, InFile::new(token_file_id, token), file_id) + { + navs.push(x); + continue; + } - Some( - IdentClass::classify_node(sema, &parent)? - .definitions() + if ast::TokenTree::can_cast(parent.kind()) + && let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.value) + { + navs.push(x); + continue; + } + + let Some(ident_class) = IdentClass::classify_node(sema, &parent) else { continue }; + navs.extend(ident_class.definitions().into_iter().flat_map(|(def, _)| { + if let Definition::ExternCrateDecl(crate_def) = def { + return crate_def + .resolved_crate(db) + .map(|it| it.root_module().to_nav(sema.db)) .into_iter() - .flat_map(|(def, _)| { - if let Definition::ExternCrateDecl(crate_def) = def { - return crate_def - .resolved_crate(db) - .map(|it| it.root_module().to_nav(sema.db)) - .into_iter() - .flatten() - .collect(); - } - try_filter_trait_item_definition(sema, &def) - .unwrap_or_else(|| def_to_nav(sema, def)) - }) - .collect(), - ) - }) - .flatten() - .unique() - .collect::>(); + .flatten() + .collect(); + } + try_filter_trait_item_definition(sema, &def).unwrap_or_else(|| def_to_nav(sema, def)) + })); + } + let navs = navs.into_iter().unique().collect(); Some(RangeInfo::new(original_token.text_range(), navs)) } @@ -584,15 +604,22 @@ fn expr_to_nav( #[cfg(test)] mod tests { - use crate::fixture; - use ide_db::FileRange; + use crate::{GotoDefinitionConfig, fixture}; + use ide_db::{FileRange, MiniCore}; use itertools::Itertools; use syntax::SmolStr; + const TEST_CONFIG: GotoDefinitionConfig<'_> = + GotoDefinitionConfig { minicore: MiniCore::default() }; + #[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, expected) = fixture::annotations(ra_fixture); - let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; + let navs = analysis + .goto_definition(position, &TEST_CONFIG) + .unwrap() + .expect("no definition found") + .info; let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start()); let navs = navs @@ -611,14 +638,22 @@ mod tests { fn check_unresolved(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position) = fixture::position(ra_fixture); - let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; + let navs = analysis + .goto_definition(position, &TEST_CONFIG) + .unwrap() + .expect("no definition found") + .info; assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {navs:?}") } fn check_name(expected_name: &str, #[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, _) = fixture::annotations(ra_fixture); - let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; + let navs = analysis + .goto_definition(position, &TEST_CONFIG) + .unwrap() + .expect("no definition found") + .info; assert!(navs.len() < 2, "expected single navigation target but encountered {}", navs.len()); let Some(target) = navs.into_iter().next() else { panic!("expected single navigation target but encountered none"); @@ -3961,4 +3996,23 @@ mod prim_str {} "#, ); } + + #[test] + fn ra_fixture() { + check( + r##" +fn fixture(#[rust_analyzer::rust_fixture] ra_fixture: &str) {} + +fn foo() { + fixture(r#" +fn foo() {} +// ^^^ +fn bar() { + f$0oo(); +} + "#) +} + "##, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index c4fb6d1a5b4b4..e1d18b0c41162 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -11,29 +11,32 @@ use hir::{ db::DefDatabase, }; use ide_db::{ - FileRange, FxIndexSet, Ranker, RootDatabase, + FileRange, FxIndexSet, MiniCore, Ranker, RootDatabase, defs::{Definition, IdentClass, NameRefClass, OperatorClass}, famous_defs::FamousDefs, helpers::pick_best_token, + ra_fixture::UpmapFromRaFixture, }; use itertools::{Itertools, multizip}; -use span::Edition; +use macros::UpmapFromRaFixture; +use span::{Edition, TextRange}; use syntax::{ - AstNode, + AstNode, AstToken, SyntaxKind::{self, *}, SyntaxNode, T, ast, }; use crate::{ - FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, + Analysis, FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, doc_links::token_as_doc_comment, markdown_remove::remove_markdown, markup::Markup, navigation_target::UpmappingResult, runnables::{runnable_fn, runnable_mod}, }; -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct HoverConfig { + +#[derive(Clone, Debug)] +pub struct HoverConfig<'a> { pub links_in_hover: bool, pub memory_layout: Option, pub documentation: bool, @@ -44,6 +47,7 @@ pub struct HoverConfig { pub max_enum_variants_count: Option, pub max_subst_ty_len: SubstTyLen, pub show_drop_glue: bool, + pub minicore: MiniCore<'a>, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -75,7 +79,7 @@ pub enum HoverDocFormat { PlainText, } -#[derive(Debug, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq, UpmapFromRaFixture)] pub enum HoverAction { Runnable(Runnable), Implementation(FilePosition), @@ -108,14 +112,14 @@ impl HoverAction { } } -#[derive(Debug, Clone, Eq, PartialEq, Hash)] +#[derive(Debug, Clone, Eq, PartialEq, Hash, UpmapFromRaFixture)] pub struct HoverGotoTypeData { pub mod_path: String, pub nav: NavigationTarget, } /// Contains the results when hovering over an item -#[derive(Clone, Debug, Default, Hash, PartialEq, Eq)] +#[derive(Clone, Debug, Default, Hash, PartialEq, Eq, UpmapFromRaFixture)] pub struct HoverResult { pub markup: Markup, pub actions: Vec, @@ -130,7 +134,7 @@ pub struct HoverResult { pub(crate) fn hover( db: &RootDatabase, frange @ FileRange { file_id, range }: FileRange, - config: &HoverConfig, + config: &HoverConfig<'_>, ) -> Option> { let sema = &hir::Semantics::new(db); let file = sema.parse_guess_edition(file_id).syntax().clone(); @@ -161,7 +165,7 @@ fn hover_offset( sema: &Semantics<'_, RootDatabase>, FilePosition { file_id, offset }: FilePosition, file: SyntaxNode, - config: &HoverConfig, + config: &HoverConfig<'_>, edition: Edition, display_target: DisplayTarget, ) -> Option> { @@ -219,6 +223,21 @@ fn hover_offset( return Some(RangeInfo::new(range, res)); } + if let Some(literal) = ast::String::cast(original_token.clone()) + && let Some((analysis, fixture_analysis)) = + Analysis::from_ra_fixture(sema, literal.clone(), &literal, config.minicore) + { + let (virtual_file_id, virtual_offset) = fixture_analysis.map_offset_down(offset)?; + return analysis + .hover( + config, + FileRange { file_id: virtual_file_id, range: TextRange::empty(virtual_offset) }, + ) + .ok()?? + .upmap_from_ra_fixture(&fixture_analysis, virtual_file_id, file_id) + .ok(); + } + // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let mut descended = sema.descend_into_macros(original_token.clone()); @@ -383,9 +402,9 @@ fn hover_offset( fn hover_ranged( sema: &Semantics<'_, RootDatabase>, - FileRange { range, .. }: FileRange, + FileRange { file_id, range }: FileRange, file: SyntaxNode, - config: &HoverConfig, + config: &HoverConfig<'_>, edition: Edition, display_target: DisplayTarget, ) -> Option> { @@ -404,6 +423,20 @@ fn hover_ranged( { render::deref_expr(sema, config, prefix_expr, edition, display_target) } + Either::Left(ast::Expr::Literal(literal)) => { + if let Some(literal) = ast::String::cast(literal.token()) + && let Some((analysis, fixture_analysis)) = + Analysis::from_ra_fixture(sema, literal.clone(), &literal, config.minicore) + { + let (virtual_file_id, virtual_range) = fixture_analysis.map_range_down(range)?; + return analysis + .hover(config, FileRange { file_id: virtual_file_id, range: virtual_range }) + .ok()?? + .upmap_from_ra_fixture(&fixture_analysis, virtual_file_id, file_id) + .ok(); + } + None + } _ => None, }; let res = @@ -426,7 +459,7 @@ pub(crate) fn hover_for_definition( scope_node: &SyntaxNode, macro_arm: Option, render_extras: bool, - config: &HoverConfig, + config: &HoverConfig<'_>, edition: Edition, display_target: DisplayTarget, ) -> HoverResult { diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index f29ccc985c18d..a1eff3aaee789 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -35,7 +35,7 @@ use crate::{ pub(super) fn type_info_of( sema: &Semantics<'_, RootDatabase>, - _config: &HoverConfig, + _config: &HoverConfig<'_>, expr_or_pat: &Either, edition: Edition, display_target: DisplayTarget, @@ -49,7 +49,7 @@ pub(super) fn type_info_of( pub(super) fn closure_expr( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + config: &HoverConfig<'_>, c: ast::ClosureExpr, edition: Edition, display_target: DisplayTarget, @@ -60,7 +60,7 @@ pub(super) fn closure_expr( pub(super) fn try_expr( sema: &Semantics<'_, RootDatabase>, - _config: &HoverConfig, + _config: &HoverConfig<'_>, try_expr: &ast::TryExpr, edition: Edition, display_target: DisplayTarget, @@ -155,7 +155,7 @@ pub(super) fn try_expr( pub(super) fn deref_expr( sema: &Semantics<'_, RootDatabase>, - _config: &HoverConfig, + _config: &HoverConfig<'_>, deref_expr: &ast::PrefixExpr, edition: Edition, display_target: DisplayTarget, @@ -219,7 +219,7 @@ pub(super) fn deref_expr( pub(super) fn underscore( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + config: &HoverConfig<'_>, token: &SyntaxToken, edition: Edition, display_target: DisplayTarget, @@ -263,7 +263,7 @@ pub(super) fn underscore( pub(super) fn keyword( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + config: &HoverConfig<'_>, token: &SyntaxToken, edition: Edition, display_target: DisplayTarget, @@ -290,7 +290,7 @@ pub(super) fn keyword( /// i.e. `let S {a, ..} = S {a: 1, b: 2}` pub(super) fn struct_rest_pat( sema: &Semantics<'_, RootDatabase>, - _config: &HoverConfig, + _config: &HoverConfig<'_>, pattern: &ast::RecordPat, edition: Edition, display_target: DisplayTarget, @@ -371,7 +371,7 @@ pub(super) fn process_markup( def: Definition, markup: &Markup, markup_range_map: Option, - config: &HoverConfig, + config: &HoverConfig<'_>, ) -> Markup { let markup = markup.as_str(); let markup = if config.links_in_hover { @@ -481,7 +481,7 @@ pub(super) fn definition( macro_arm: Option, render_extras: bool, subst_types: Option<&Vec<(Symbol, Type<'_>)>>, - config: &HoverConfig, + config: &HoverConfig<'_>, edition: Edition, display_target: DisplayTarget, ) -> (Markup, Option) { @@ -979,7 +979,7 @@ fn render_notable_trait( fn type_info( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + config: &HoverConfig<'_>, ty: TypeInfo<'_>, edition: Edition, display_target: DisplayTarget, @@ -1038,7 +1038,7 @@ fn type_info( fn closure_ty( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + config: &HoverConfig<'_>, TypeInfo { original, adjusted }: &TypeInfo<'_>, edition: Edition, display_target: DisplayTarget, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index df1800616803e..91fb4d0a67153 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -1,5 +1,5 @@ use expect_test::{Expect, expect}; -use ide_db::{FileRange, base_db::SourceDatabase}; +use ide_db::{FileRange, MiniCore, base_db::SourceDatabase}; use syntax::TextRange; use crate::{ @@ -8,7 +8,7 @@ use crate::{ use hir::setup_tracing; -const HOVER_BASE_CONFIG: HoverConfig = HoverConfig { +const HOVER_BASE_CONFIG: HoverConfig<'_> = HoverConfig { links_in_hover: false, memory_layout: Some(MemoryLayoutHoverConfig { size: Some(MemoryLayoutHoverRenderKind::Both), @@ -25,6 +25,7 @@ const HOVER_BASE_CONFIG: HoverConfig = HoverConfig { max_enum_variants_count: Some(5), max_subst_ty_len: super::SubstTyLen::Unlimited, show_drop_glue: true, + minicore: MiniCore::default(), }; fn check_hover_no_result(#[rust_analyzer::rust_fixture] ra_fixture: &str) { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index f7b09b43813d6..21550d5e66658 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -8,9 +8,12 @@ use hir::{ ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError, HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym, }; -use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder}; +use ide_db::{ + FileRange, MiniCore, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder, +}; use ide_db::{FxHashSet, text_edit::TextEdit}; use itertools::Itertools; +use macros::UpmapFromRaFixture; use smallvec::{SmallVec, smallvec}; use stdx::never; use syntax::{ @@ -37,6 +40,7 @@ mod implicit_static; mod implied_dyn_trait; mod lifetime; mod param_name; +mod ra_fixture; mod range_exclusive; // Feature: Inlay Hints @@ -80,7 +84,7 @@ pub(crate) fn inlay_hints( db: &RootDatabase, file_id: FileId, range_limit: Option, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, ) -> Vec { let _p = tracing::info_span!("inlay_hints").entered(); let sema = Semantics::new(db); @@ -132,7 +136,7 @@ pub(crate) fn inlay_hints_resolve( file_id: FileId, resolve_range: TextRange, hash: u64, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, hasher: impl Fn(&InlayHint) -> u64, ) -> Option { let _p = tracing::info_span!("inlay_hints_resolve").entered(); @@ -208,7 +212,7 @@ fn hints( hints: &mut Vec, ctx: &mut InlayHintCtx, famous_defs @ FamousDefs(sema, _krate): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, file_id: EditionedFileId, display_target: DisplayTarget, node: SyntaxNode, @@ -239,6 +243,7 @@ fn hints( closure_ret::hints(hints, famous_defs, config, display_target, it) }, ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, it), + ast::Expr::Literal(it) => ra_fixture::hints(hints, famous_defs.0, file_id, config, it), _ => Some(()), } }, @@ -294,8 +299,8 @@ fn hints( }; } -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct InlayHintsConfig { +#[derive(Clone, Debug)] +pub struct InlayHintsConfig<'a> { pub render_colons: bool, pub type_hints: bool, pub sized_bound: bool, @@ -321,9 +326,10 @@ pub struct InlayHintsConfig { pub max_length: Option, pub closing_brace_hints_min_lines: Option, pub fields_to_resolve: InlayFieldsToResolve, + pub minicore: MiniCore<'a>, } -impl InlayHintsConfig { +impl InlayHintsConfig<'_> { fn lazy_text_edit(&self, finish: impl FnOnce() -> TextEdit) -> LazyProperty { if self.fields_to_resolve.resolve_text_edits { LazyProperty::Lazy @@ -466,7 +472,7 @@ pub enum InlayHintPosition { After, } -#[derive(Debug)] +#[derive(Debug, UpmapFromRaFixture)] pub struct InlayHint { /// The text range this inlay hint applies to. pub range: TextRange, @@ -485,9 +491,10 @@ pub struct InlayHint { } /// A type signaling that a value is either computed, or is available for computation. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default, UpmapFromRaFixture)] pub enum LazyProperty { Computed(T), + #[default] Lazy, } @@ -537,7 +544,7 @@ pub enum InlayTooltip { Markdown(String), } -#[derive(Default, Hash)] +#[derive(Default, Hash, UpmapFromRaFixture)] pub struct InlayHintLabel { pub parts: SmallVec<[InlayHintLabelPart; 1]>, } @@ -623,6 +630,7 @@ impl fmt::Debug for InlayHintLabel { } } +#[derive(UpmapFromRaFixture)] pub struct InlayHintLabelPart { pub text: String, /// Source location represented by this label part. The client will use this to fetch the part's @@ -724,7 +732,7 @@ impl InlayHintLabelBuilder<'_> { fn label_of_ty( famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, ty: &hir::Type<'_>, display_target: DisplayTarget, ) -> Option { @@ -734,7 +742,7 @@ fn label_of_ty( mut max_length: Option, ty: &hir::Type<'_>, label_builder: &mut InlayHintLabelBuilder<'_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, display_target: DisplayTarget, ) -> Result<(), HirDisplayError> { hir::attach_db(sema.db, || { @@ -829,7 +837,7 @@ fn hint_iterator<'db>( fn ty_to_text_edit( sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, node_for_hint: &SyntaxNode, ty: &hir::Type<'_>, offset_to_insert_ty: TextSize, @@ -860,6 +868,7 @@ mod tests { use expect_test::Expect; use hir::ClosureStyle; + use ide_db::MiniCore; use itertools::Itertools; use test_utils::extract_annotations; @@ -869,7 +878,7 @@ mod tests { use super::{ClosureReturnTypeHints, GenericParameterHints, InlayFieldsToResolve}; - pub(super) const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig { + pub(super) const DISABLED_CONFIG: InlayHintsConfig<'_> = InlayHintsConfig { discriminant_hints: DiscriminantHints::Never, render_colons: false, type_hints: false, @@ -899,8 +908,9 @@ mod tests { fields_to_resolve: InlayFieldsToResolve::empty(), implicit_drop_hints: false, range_exclusive_hints: false, + minicore: MiniCore::default(), }; - pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { + pub(super) const TEST_CONFIG: InlayHintsConfig<'_> = InlayHintsConfig { type_hints: true, parameter_hints: true, chaining_hints: true, @@ -917,7 +927,7 @@ mod tests { #[track_caller] pub(super) fn check_with_config( - config: InlayHintsConfig, + config: InlayHintsConfig<'_>, #[rust_analyzer::rust_fixture] ra_fixture: &str, ) { let (analysis, file_id) = fixture::file(ra_fixture); @@ -936,7 +946,7 @@ mod tests { #[track_caller] pub(super) fn check_expect( - config: InlayHintsConfig, + config: InlayHintsConfig<'_>, #[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect, ) { @@ -951,7 +961,7 @@ mod tests { /// expect test. #[track_caller] pub(super) fn check_edit( - config: InlayHintsConfig, + config: InlayHintsConfig<'_>, #[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect, ) { @@ -974,7 +984,7 @@ mod tests { #[track_caller] pub(super) fn check_no_edit( - config: InlayHintsConfig, + config: InlayHintsConfig<'_>, #[rust_analyzer::rust_fixture] ra_fixture: &str, ) { let (analysis, file_id) = fixture::file(ra_fixture); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 7231a3194d095..ebb0d57525017 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -23,7 +23,7 @@ use crate::{ pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, display_target: DisplayTarget, expr: &ast::Expr, ) -> Option<()> { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 121b16b97e871..de207c7821da0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -20,7 +20,7 @@ use crate::{ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, display_target: DisplayTarget, pat: &ast::IdentPat, ) -> Option<()> { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs index 169ab92342ba0..e8d305afb3b96 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs @@ -15,7 +15,7 @@ use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, Inla pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, pat: &ast::Pat, ) -> Option<()> { if !config.binding_mode_hints { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs index 4abd67b91f5ec..c9fbdf3ae7546 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bounds.rs @@ -13,7 +13,7 @@ use crate::{ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, params: ast::GenericParamList, ) -> Option<()> { if !config.sized_bound { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index a8bb652fda226..cf3149c9461b8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -13,7 +13,7 @@ use super::label_of_ty; pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, display_target: DisplayTarget, expr: &ast::Expr, ) -> Option<()> { @@ -93,7 +93,7 @@ mod tests { #[track_caller] pub(super) fn check_expect_clear_loc( - config: InlayHintsConfig, + config: InlayHintsConfig<'_>, #[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect, ) { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index 9d246eda57e04..ab3ce5b05b01c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -19,7 +19,7 @@ use crate::{ pub(super) fn hints( acc: &mut Vec, sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, display_target: DisplayTarget, InRealFile { file_id, value: node }: InRealFile, ) -> Option<()> { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs index 3186a566d2bce..f8d4ddc6eb57a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs @@ -13,7 +13,7 @@ use crate::{ pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, closure: ast::ClosureExpr, ) -> Option<()> { if !config.closure_capture_hints { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs index fef1cb83c1195..7765dc4f087c2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs @@ -13,7 +13,7 @@ use crate::{ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, display_target: DisplayTarget, closure: ast::ClosureExpr, ) -> Option<()> { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs index a2a702835a792..5b9267126f8ab 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs @@ -17,7 +17,7 @@ use crate::{ pub(super) fn enum_hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, enum_: ast::Enum, ) -> Option<()> { if let DiscriminantHints::Never = config.discriminant_hints { @@ -41,7 +41,7 @@ pub(super) fn enum_hints( fn variant_hints( acc: &mut Vec, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, sema: &Semantics<'_, RootDatabase>, enum_: &ast::Enum, variant: &ast::Variant, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs index 491018a4dda84..8dd6c4db4c045 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/extern_block.rs @@ -7,7 +7,7 @@ use crate::{InlayHint, InlayHintsConfig}; pub(super) fn extern_block_hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, extern_block: ast::ExternBlock, ) -> Option<()> { if extern_block.unsafe_token().is_some() { @@ -33,7 +33,7 @@ pub(super) fn extern_block_hints( pub(super) fn fn_hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, fn_: &ast::Fn, extern_block: &ast::ExternBlock, ) -> Option<()> { @@ -51,7 +51,7 @@ pub(super) fn fn_hints( pub(super) fn static_hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, static_: &ast::Static, extern_block: &ast::ExternBlock, ) -> Option<()> { @@ -67,7 +67,7 @@ pub(super) fn static_hints( } fn item_hint( - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, extern_block: &ast::ExternBlock, token: SyntaxToken, ) -> InlayHint { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs index 1fddb6fbe01d1..27d14f7a73cd1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/generic_param.rs @@ -16,7 +16,7 @@ use super::param_name::is_argument_similar_to_param_name; pub(crate) fn hints( acc: &mut Vec, FamousDefs(sema, krate): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, node: AnyHasGenericArgs, ) -> Option<()> { let GenericParameterHints { type_hints, lifetime_hints, const_hints } = diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index 1e272fe3ba827..951a672d4b793 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -23,7 +23,7 @@ use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, Inla pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, display_target: hir::DisplayTarget, node: &ast::Fn, ) -> Option<()> { @@ -147,7 +147,7 @@ mod tests { inlay_hints::tests::{DISABLED_CONFIG, check_with_config}, }; - const ONLY_DROP_CONFIG: InlayHintsConfig = + const ONLY_DROP_CONFIG: InlayHintsConfig<'_> = InlayHintsConfig { implicit_drop_hints: true, ..DISABLED_CONFIG }; #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs index bddce904dfdea..0492991790c8e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs @@ -15,7 +15,7 @@ use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, LifetimeE pub(super) fn hints( acc: &mut Vec, FamousDefs(_sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, statik_or_const: Either, ) -> Option<()> { if config.lifetime_elision_hints != LifetimeElisionHints::Always { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs index 0da1785234aec..562eb1e00213c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs @@ -11,7 +11,7 @@ use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, Inla pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, path: Either, ) -> Option<()> { let parent = path.syntax().parent()?; diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs index a89c53e00b3b0..4982b60f1dc8e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs @@ -21,7 +21,7 @@ pub(super) fn fn_hints( acc: &mut Vec, ctx: &mut InlayHintCtx, fd: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, func: ast::Fn, ) -> Option<()> { if config.lifetime_elision_hints == LifetimeElisionHints::Never { @@ -70,7 +70,7 @@ pub(super) fn fn_ptr_hints( acc: &mut Vec, ctx: &mut InlayHintCtx, fd: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, func: ast::FnPtrType, ) -> Option<()> { if config.lifetime_elision_hints == LifetimeElisionHints::Never { @@ -135,7 +135,7 @@ pub(super) fn fn_path_hints( acc: &mut Vec, ctx: &mut InlayHintCtx, fd: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, func: &ast::PathType, ) -> Option<()> { if config.lifetime_elision_hints == LifetimeElisionHints::Never { @@ -196,7 +196,7 @@ fn hints_( acc: &mut Vec, ctx: &mut InlayHintCtx, FamousDefs(_, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, params: impl Iterator, ast::Type)>, generic_param_list: Option, ret_type: Option, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 754707784055a..3e555e88303dc 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -18,7 +18,7 @@ use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, Inla pub(super) fn hints( acc: &mut Vec, FamousDefs(sema, krate): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, file_id: EditionedFileId, expr: ast::Expr, ) -> Option<()> { diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/ra_fixture.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/ra_fixture.rs new file mode 100644 index 0000000000000..bee18416424cf --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/ra_fixture.rs @@ -0,0 +1,32 @@ +//! Injected inlay hints for `#[rust_analyzer::rust_fixture]`. + +use hir::{EditionedFileId, Semantics}; +use ide_db::{RootDatabase, impl_empty_upmap_from_ra_fixture, ra_fixture::UpmapFromRaFixture}; +use syntax::{AstToken, ast}; + +use crate::{Analysis, InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind, InlayTooltip}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + file_id: EditionedFileId, + config: &InlayHintsConfig<'_>, + literal: ast::Literal, +) -> Option<()> { + let file_id = file_id.file_id(sema.db); + let literal = ast::String::cast(literal.token())?; + let (analysis, fixture_analysis) = + Analysis::from_ra_fixture(sema, literal.clone(), &literal, config.minicore)?; + for virtual_file_id in fixture_analysis.files() { + acc.extend( + analysis + .inlay_hints(config, virtual_file_id, None) + .ok()? + .upmap_from_ra_fixture(&fixture_analysis, virtual_file_id, file_id) + .ok()?, + ); + } + Some(()) +} + +impl_empty_upmap_from_ra_fixture!(InlayHintPosition, InlayKind, InlayTooltip); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs index 47bd6d737f820..a446908e736b3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/range_exclusive.rs @@ -11,7 +11,7 @@ use crate::{InlayHint, InlayHintsConfig}; pub(super) fn hints( acc: &mut Vec, FamousDefs(_sema, _): &FamousDefs<'_, '_>, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, range: impl ast::RangeItem, ) -> Option<()> { (config.range_exclusive_hints && range.end().is_some()) diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index f7d21c9479505..857252832ffe1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -62,7 +62,7 @@ use std::panic::{AssertUnwindSafe, UnwindSafe}; use cfg::CfgOptions; use fetch_crates::CrateInfo; -use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, db::HirDatabase, sym}; +use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, sym}; use ide_db::{ FxHashMap, FxIndexSet, LineIndexDatabase, base_db::{ @@ -71,7 +71,9 @@ use ide_db::{ }, prime_caches, symbol_index, }; -use syntax::SourceFile; +use ide_db::{MiniCore, ra_fixture::RaFixtureAnalysis}; +use macros::UpmapFromRaFixture; +use syntax::{SourceFile, ast}; use triomphe::Arc; use view_memory_layout::{RecursiveMemoryLayout, view_memory_layout}; @@ -83,6 +85,7 @@ pub use crate::{ expand_macro::ExpandedMacro, file_structure::{FileStructureConfig, StructureNode, StructureNodeKind}, folding_ranges::{Fold, FoldKind}, + goto_definition::GotoDefinitionConfig, highlight_related::{HighlightRelatedConfig, HighlightedRange}, hover::{ HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult, @@ -102,7 +105,7 @@ pub use crate::{ }, move_item::Direction, navigation_target::{NavigationTarget, TryToNav, UpmappingResult}, - references::ReferenceSearchResult, + references::{FindAllRefsConfig, ReferenceSearchResult}, rename::RenameError, runnables::{Runnable, RunnableKind, TestId, UpdateTest}, signature_help::SignatureHelp, @@ -144,7 +147,7 @@ pub use syntax::{TextRange, TextSize}; pub type Cancellable = Result; /// Info associated with a text range. -#[derive(Debug)] +#[derive(Debug, UpmapFromRaFixture)] pub struct RangeInfo { pub range: TextRange, pub info: T, @@ -274,6 +277,28 @@ impl Analysis { (host.analysis(), file_id) } + pub(crate) fn from_ra_fixture( + sema: &Semantics<'_, RootDatabase>, + literal: ast::String, + expanded: &ast::String, + minicore: MiniCore<'_>, + ) -> Option<(Analysis, RaFixtureAnalysis)> { + Self::from_ra_fixture_with_on_cursor(sema, literal, expanded, minicore, &mut |_| {}) + } + + /// Like [`Analysis::from_ra_fixture()`], but also calls `on_cursor` with the cursor position. + pub(crate) fn from_ra_fixture_with_on_cursor( + sema: &Semantics<'_, RootDatabase>, + literal: ast::String, + expanded: &ast::String, + minicore: MiniCore<'_>, + on_cursor: &mut dyn FnMut(TextRange), + ) -> Option<(Analysis, RaFixtureAnalysis)> { + let analysis = + RaFixtureAnalysis::analyze_ra_fixture(sema, literal, expanded, minicore, on_cursor)?; + Some((Analysis { db: analysis.db.clone() }, analysis)) + } + /// Debug info about the current state of the analysis. pub fn status(&self, file_id: Option) -> Cancellable { self.with_db(|db| status::status(db, file_id)) @@ -446,7 +471,7 @@ impl Analysis { /// Returns a list of the places in the file where type hints can be displayed. pub fn inlay_hints( &self, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, file_id: FileId, range: Option, ) -> Cancellable> { @@ -454,7 +479,7 @@ impl Analysis { } pub fn inlay_hints_resolve( &self, - config: &InlayHintsConfig, + config: &InlayHintsConfig<'_>, file_id: FileId, resolve_range: TextRange, hash: u64, @@ -495,16 +520,18 @@ impl Analysis { pub fn goto_definition( &self, position: FilePosition, + config: &GotoDefinitionConfig<'_>, ) -> Cancellable>>> { - self.with_db(|db| goto_definition::goto_definition(db, position)) + self.with_db(|db| goto_definition::goto_definition(db, position, config)) } /// Returns the declaration from the symbol at `position`. pub fn goto_declaration( &self, position: FilePosition, + config: &GotoDefinitionConfig<'_>, ) -> Cancellable>>> { - self.with_db(|db| goto_declaration::goto_declaration(db, position)) + self.with_db(|db| goto_declaration::goto_declaration(db, position, config)) } /// Returns the impls from the symbol at `position`. @@ -526,19 +553,16 @@ impl Analysis { pub fn find_all_refs( &self, position: FilePosition, - search_scope: Option, + config: &FindAllRefsConfig<'_>, ) -> Cancellable>> { - let search_scope = AssertUnwindSafe(search_scope); - self.with_db(|db| { - let _ = &search_scope; - references::find_all_refs(&Semantics::new(db), position, search_scope.0) - }) + let config = AssertUnwindSafe(config); + self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, &config)) } /// Returns a short text describing element at position. pub fn hover( &self, - config: &HoverConfig, + config: &HoverConfig<'_>, range: FileRange, ) -> Cancellable>> { self.with_db(|db| hover::hover(db, range, config)) @@ -576,14 +600,15 @@ impl Analysis { pub fn call_hierarchy( &self, position: FilePosition, + config: &CallHierarchyConfig<'_>, ) -> Cancellable>>> { - self.with_db(|db| call_hierarchy::call_hierarchy(db, position)) + self.with_db(|db| call_hierarchy::call_hierarchy(db, position, config)) } /// Computes incoming calls for the given file position. pub fn incoming_calls( &self, - config: CallHierarchyConfig, + config: &CallHierarchyConfig<'_>, position: FilePosition, ) -> Cancellable>> { self.with_db(|db| call_hierarchy::incoming_calls(db, config, position)) @@ -592,7 +617,7 @@ impl Analysis { /// Computes outgoing calls for the given file position. pub fn outgoing_calls( &self, - config: CallHierarchyConfig, + config: &CallHierarchyConfig<'_>, position: FilePosition, ) -> Cancellable>> { self.with_db(|db| call_hierarchy::outgoing_calls(db, config, position)) @@ -675,28 +700,22 @@ impl Analysis { /// Computes syntax highlighting for the given file pub fn highlight( &self, - highlight_config: HighlightConfig, + highlight_config: HighlightConfig<'_>, file_id: FileId, ) -> Cancellable> { - // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database - // highlighting instead sets up the attach hook where neceesary for the trait solver - Cancelled::catch(|| { - syntax_highlighting::highlight(&self.db, highlight_config, file_id, None) - }) + self.with_db(|db| syntax_highlighting::highlight(db, &highlight_config, file_id, None)) } /// Computes syntax highlighting for the given file range. pub fn highlight_range( &self, - highlight_config: HighlightConfig, + highlight_config: HighlightConfig<'_>, frange: FileRange, ) -> Cancellable> { - // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database - // highlighting instead sets up the attach hook where neceesary for the trait solver - Cancelled::catch(|| { + self.with_db(|db| { syntax_highlighting::highlight( - &self.db, - highlight_config, + db, + &highlight_config, frange.file_id, Some(frange.range), ) @@ -706,22 +725,18 @@ impl Analysis { /// Computes syntax highlighting for the given file. pub fn highlight_as_html_with_config( &self, - config: HighlightConfig, + config: HighlightConfig<'_>, file_id: FileId, rainbow: bool, ) -> Cancellable { - // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database - // highlighting instead sets up the attach hook where neceesary for the trait solver - Cancelled::catch(|| { - syntax_highlighting::highlight_as_html_with_config(&self.db, config, file_id, rainbow) + self.with_db(|db| { + syntax_highlighting::highlight_as_html_with_config(db, &config, file_id, rainbow) }) } /// Computes syntax highlighting for the given file. pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable { - // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database - // highlighting instead sets up the attach hook where neceesary for the trait solver - Cancelled::catch(|| syntax_highlighting::highlight_as_html(&self.db, file_id, rainbow)) + self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) } /// Computes completions at the given position. @@ -853,14 +868,18 @@ impl Analysis { pub fn annotations( &self, - config: &AnnotationConfig, + config: &AnnotationConfig<'_>, file_id: FileId, ) -> Cancellable> { self.with_db(|db| annotations::annotations(db, config, file_id)) } - pub fn resolve_annotation(&self, annotation: Annotation) -> Cancellable { - self.with_db(|db| annotations::resolve_annotation(db, annotation)) + pub fn resolve_annotation( + &self, + config: &AnnotationConfig<'_>, + annotation: Annotation, + ) -> Cancellable { + self.with_db(|db| annotations::resolve_annotation(db, config, annotation)) } pub fn move_item( @@ -899,12 +918,8 @@ impl Analysis { where F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, { - hir::attach_db(&self.db, || { - // the trait solver code may invoke `as_view` outside of queries, - // so technically we might run into a panic in salsa if the downcaster has not yet been registered. - HirDatabase::zalsa_register_downcaster(&self.db); - Cancelled::catch(|| f(&self.db)) - }) + // We use `attach_db_allow_change()` and not `attach_db()` because fixture injection can change the database. + hir::attach_db_allow_change(&self.db, || Cancelled::catch(|| f(&self.db))) } } diff --git a/src/tools/rust-analyzer/crates/ide/src/markup.rs b/src/tools/rust-analyzer/crates/ide/src/markup.rs index 750d12542605c..3eb9986c120f3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/markup.rs +++ b/src/tools/rust-analyzer/crates/ide/src/markup.rs @@ -5,6 +5,8 @@ //! what is used by LSP, so let's keep it simple. use std::fmt; +use ide_db::impl_empty_upmap_from_ra_fixture; + #[derive(Clone, Default, Debug, Hash, PartialEq, Eq)] pub struct Markup { text: String, @@ -39,3 +41,5 @@ impl Markup { format!("```text\n{contents}\n```").into() } } + +impl_empty_upmap_from_ra_fixture!(Markup); diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index db1298385b113..40580080c089a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -14,6 +14,7 @@ use ide_db::{ defs::{Definition, find_std_module}, documentation::{Documentation, HasDocs}, famous_defs::FamousDefs, + ra_fixture::UpmapFromRaFixture, }; use span::Edition; use stdx::never; @@ -78,6 +79,44 @@ impl fmt::Debug for NavigationTarget { } } +impl UpmapFromRaFixture for NavigationTarget { + fn upmap_from_ra_fixture( + self, + analysis: &ide_db::ra_fixture::RaFixtureAnalysis, + _virtual_file_id: FileId, + real_file_id: FileId, + ) -> Result { + let virtual_file_id = self.file_id; + Ok(NavigationTarget { + file_id: real_file_id, + full_range: self.full_range.upmap_from_ra_fixture( + analysis, + virtual_file_id, + real_file_id, + )?, + focus_range: self.focus_range.upmap_from_ra_fixture( + analysis, + virtual_file_id, + real_file_id, + )?, + name: self.name.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id)?, + kind: self.kind.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id)?, + container_name: self.container_name.upmap_from_ra_fixture( + analysis, + virtual_file_id, + real_file_id, + )?, + description: self.description.upmap_from_ra_fixture( + analysis, + virtual_file_id, + real_file_id, + )?, + docs: self.docs.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id)?, + alias: self.alias.upmap_from_ra_fixture(analysis, virtual_file_id, real_file_id)?, + }) + } +} + pub(crate) trait ToNav { fn to_nav(&self, db: &RootDatabase) -> UpmappingResult; } diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 0189939eac310..a53a192997274 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -19,14 +19,17 @@ use hir::{PathResolution, Semantics}; use ide_db::{ - FileId, RootDatabase, + FileId, MiniCore, RootDatabase, defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, + ra_fixture::UpmapFromRaFixture, search::{ReferenceCategory, SearchScope, UsageSearchResult}, }; use itertools::Itertools; +use macros::UpmapFromRaFixture; use nohash_hasher::IntMap; use span::Edition; +use syntax::AstToken; use syntax::{ AstNode, SyntaxKind::*, @@ -35,10 +38,12 @@ use syntax::{ match_ast, }; -use crate::{FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related}; +use crate::{ + Analysis, FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related, +}; /// Result of a reference search operation. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, UpmapFromRaFixture)] pub struct ReferenceSearchResult { /// Information about the declaration site of the searched item. /// For ADTs (structs/enums), this points to the type definition. @@ -54,7 +59,7 @@ pub struct ReferenceSearchResult { } /// Information about the declaration site of a searched item. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, UpmapFromRaFixture)] pub struct Declaration { /// Navigation information to jump to the declaration pub nav: NavigationTarget, @@ -82,6 +87,12 @@ pub struct Declaration { // // ![Find All References](https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif) +#[derive(Debug)] +pub struct FindAllRefsConfig<'a> { + pub search_scope: Option, + pub minicore: MiniCore<'a>, +} + /// Find all references to the item at the given position. /// /// # Arguments @@ -110,14 +121,14 @@ pub struct Declaration { pub(crate) fn find_all_refs( sema: &Semantics<'_, RootDatabase>, position: FilePosition, - search_scope: Option, + config: &FindAllRefsConfig<'_>, ) -> Option> { let _p = tracing::info_span!("find_all_refs").entered(); let syntax = sema.parse_guess_edition(position.file_id).syntax().clone(); let make_searcher = |literal_search: bool| { move |def: Definition| { let mut usages = - def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all(); + def.usages(sema).set_scope(config.search_scope.as_ref()).include_self_refs().all(); if literal_search { retain_adt_literal_usages(&mut usages, def, sema); } @@ -165,6 +176,20 @@ pub(crate) fn find_all_refs( return Some(vec![res]); } + if let Some(token) = syntax.token_at_offset(position.offset).left_biased() + && let Some(token) = ast::String::cast(token.clone()) + && let Some((analysis, fixture_analysis)) = + Analysis::from_ra_fixture(sema, token.clone(), &token, config.minicore) + && let Some((virtual_file_id, file_offset)) = + fixture_analysis.map_offset_down(position.offset) + { + return analysis + .find_all_refs(FilePosition { file_id: virtual_file_id, offset: file_offset }, config) + .ok()?? + .upmap_from_ra_fixture(&fixture_analysis, virtual_file_id, position.file_id) + .ok(); + } + match name_for_constructor_search(&syntax, position) { Some(name) => { let def = match NameClass::classify(sema, &name)? { @@ -433,10 +458,10 @@ fn handle_control_flow_keywords( mod tests { use expect_test::{Expect, expect}; use hir::EditionedFileId; - use ide_db::{FileId, RootDatabase}; + use ide_db::{FileId, MiniCore, RootDatabase}; use stdx::format_to; - use crate::{SearchScope, fixture}; + use crate::{SearchScope, fixture, references::FindAllRefsConfig}; #[test] fn exclude_tests() { @@ -1513,8 +1538,11 @@ fn main() { expect: Expect, ) { let (analysis, pos) = fixture::position(ra_fixture); - let refs = - analysis.find_all_refs(pos, search_scope.map(|it| it(&analysis.db))).unwrap().unwrap(); + let config = FindAllRefsConfig { + search_scope: search_scope.map(|it| it(&analysis.db)), + minicore: MiniCore::default(), + }; + let refs = analysis.find_all_refs(pos, &config).unwrap().unwrap(); let mut actual = String::new(); for mut refs in refs { diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index cc1bbfbe20d63..494701d97def1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -8,6 +8,7 @@ use hir::{ sym, }; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; +use ide_db::impl_empty_upmap_from_ra_fixture; use ide_db::{ FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind, base_db::RootQueryDb, @@ -17,6 +18,7 @@ use ide_db::{ search::{FileReferenceNode, SearchScope}, }; use itertools::Itertools; +use macros::UpmapFromRaFixture; use smallvec::SmallVec; use span::{Edition, TextSize}; use stdx::format_to; @@ -28,7 +30,7 @@ use syntax::{ use crate::{FileId, NavigationTarget, ToNav, TryToNav, references}; -#[derive(Debug, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq, UpmapFromRaFixture)] pub struct Runnable { pub use_name_in_title: bool, pub nav: NavigationTarget, @@ -37,6 +39,8 @@ pub struct Runnable { pub update_test: UpdateTest, } +impl_empty_upmap_from_ra_fixture!(RunnableKind, UpdateTest); + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum TestId { Name(SmolStr), diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 453d6f537a8bf..e261928c413f4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -4,7 +4,7 @@ use arrayvec::ArrayVec; use hir::{Crate, Module, Semantics, db::HirDatabase}; use ide_db::{ - FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, + FileId, FileRange, FxHashMap, FxHashSet, MiniCore, RootDatabase, base_db::{RootQueryDb, SourceDatabase, VfsPath}, defs::{Definition, IdentClass}, documentation::Documentation, @@ -184,6 +184,7 @@ impl StaticIndex<'_> { closing_brace_hints_min_lines: Some(25), fields_to_resolve: InlayFieldsToResolve::empty(), range_exclusive_hints: false, + minicore: MiniCore::default(), }, file_id, None, @@ -215,6 +216,7 @@ impl StaticIndex<'_> { max_enum_variants_count: Some(5), max_subst_ty_len: SubstTyLen::Unlimited, show_drop_glue: true, + minicore: MiniCore::default(), }; let tokens = tokens.filter(|token| { matches!( diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 0da9ee097ac3b..66895cb0b053c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -1,7 +1,6 @@ pub(crate) mod tags; mod highlights; -mod injector; mod escape; mod format; @@ -16,7 +15,7 @@ use std::ops::ControlFlow; use either::Either; use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics}; -use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind}; +use ide_db::{FxHashMap, FxHashSet, MiniCore, Ranker, RootDatabase, SymbolKind}; use syntax::{ AstNode, AstToken, NodeOrToken, SyntaxKind::*, @@ -44,8 +43,8 @@ pub struct HlRange { pub binding_hash: Option, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct HighlightConfig { +#[derive(Copy, Clone, Debug)] +pub struct HighlightConfig<'a> { /// Whether to highlight strings pub strings: bool, /// Whether to highlight comments @@ -64,6 +63,7 @@ pub struct HighlightConfig { pub macro_bang: bool, /// Whether to highlight unresolved things be their syntax pub syntactic_name_ref_highlighting: bool, + pub minicore: MiniCore<'a>, } // Feature: Semantic Syntax Highlighting @@ -191,7 +191,7 @@ pub struct HighlightConfig { // ![Semantic Syntax Highlighting](https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png) pub(crate) fn highlight( db: &RootDatabase, - config: HighlightConfig, + config: &HighlightConfig<'_>, file_id: FileId, range_to_highlight: Option, ) -> Vec { @@ -226,7 +226,7 @@ pub(crate) fn highlight( fn traverse( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, - config: HighlightConfig, + config: &HighlightConfig<'_>, InRealFile { file_id, value: root }: InRealFile<&SyntaxNode>, krate: Option, range_to_highlight: TextRange, @@ -426,12 +426,9 @@ fn traverse( let edition = descended_element.file_id.edition(sema.db); let (unsafe_ops, bindings_shadow_count) = match current_body { Some(current_body) => { - let (ops, bindings) = per_body_cache.entry(current_body).or_insert_with(|| { - ( - hir::attach_db(sema.db, || sema.get_unsafe_ops(current_body)), - Default::default(), - ) - }); + let (ops, bindings) = per_body_cache + .entry(current_body) + .or_insert_with(|| (sema.get_unsafe_ops(current_body), Default::default())); (&*ops, Some(bindings)) } None => (&empty, None), @@ -494,7 +491,7 @@ fn traverse( fn string_injections( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, - config: HighlightConfig, + config: &HighlightConfig<'_>, file_id: EditionedFileId, krate: Option, token: SyntaxToken, @@ -591,7 +588,7 @@ fn descend_token( }) } -fn filter_by_config(highlight: &mut Highlight, config: HighlightConfig) -> bool { +fn filter_by_config(highlight: &mut Highlight, config: &HighlightConfig<'_>) -> bool { match &mut highlight.tag { HlTag::StringLiteral if !config.strings => return false, HlTag::Comment if !config.comments => return false, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs index 358ac9b4ef352..75e46b8ebfdef 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs @@ -1,6 +1,7 @@ //! Renders a bit of code as HTML. use hir::{EditionedFileId, Semantics}; +use ide_db::MiniCore; use oorandom::Rand32; use stdx::format_to; use syntax::AstNode; @@ -12,7 +13,7 @@ use crate::{ pub(crate) fn highlight_as_html_with_config( db: &RootDatabase, - config: HighlightConfig, + config: &HighlightConfig<'_>, file_id: FileId, rainbow: bool, ) -> String { @@ -60,7 +61,7 @@ pub(crate) fn highlight_as_html_with_config( pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { highlight_as_html_with_config( db, - HighlightConfig { + &HighlightConfig { strings: true, comments: true, punctuation: true, @@ -70,6 +71,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo inject_doc_comment: true, macro_bang: true, syntactic_name_ref_highlighting: false, + minicore: MiniCore::default(), }, file_id, rainbow, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs index efc77823a2a45..7955f5ac0de99 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs @@ -4,9 +4,9 @@ use std::mem; use either::Either; use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym}; +use ide_db::range_mapper::RangeMapper; use ide_db::{ - SymbolKind, active_parameter::ActiveParameter, defs::Definition, - documentation::docs_with_rangemap, rust_doc::is_rust_fence, + SymbolKind, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence, }; use syntax::{ AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize, @@ -16,85 +16,56 @@ use syntax::{ use crate::{ Analysis, HlMod, HlRange, HlTag, RootDatabase, doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def}, - syntax_highlighting::{HighlightConfig, highlights::Highlights, injector::Injector}, + syntax_highlighting::{HighlightConfig, highlights::Highlights}, }; pub(super) fn ra_fixture( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, - config: HighlightConfig, + config: &HighlightConfig<'_>, literal: &ast::String, expanded: &ast::String, ) -> Option<()> { - let active_parameter = - hir::attach_db(sema.db, || ActiveParameter::at_token(sema, expanded.syntax().clone()))?; - let has_rust_fixture_attr = active_parameter.attrs().is_some_and(|attrs| { - attrs.filter_map(|attr| attr.as_simple_path()).any(|path| { - path.segments() - .zip(["rust_analyzer", "rust_fixture"]) - .all(|(seg, name)| seg.name_ref().map_or(false, |nr| nr.text() == name)) - }) - }); - if !has_rust_fixture_attr { - return None; - } - let value = literal.value().ok()?; + let (analysis, fixture_analysis) = Analysis::from_ra_fixture_with_on_cursor( + sema, + literal.clone(), + expanded, + config.minicore, + &mut |range| { + hl.add(HlRange { + range, + highlight: HlTag::Keyword | HlMod::Injected, + binding_hash: None, + }); + }, + )?; if let Some(range) = literal.open_quote_text_range() { hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None }) } - let mut inj = Injector::default(); - - let mut text = &*value; - let mut offset: TextSize = 0.into(); - - while !text.is_empty() { - let marker = "$0"; - let idx = text.find(marker).unwrap_or(text.len()); - let (chunk, next) = text.split_at(idx); - inj.add(chunk, TextRange::at(offset, TextSize::of(chunk))); - - text = next; - offset += TextSize::of(chunk); - - if let Some(next) = text.strip_prefix(marker) { - if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) { - hl.add(HlRange { - range, - highlight: HlTag::Keyword | HlMod::Injected, - binding_hash: None, - }); - } - - text = next; - - let marker_len = TextSize::of(marker); - offset += marker_len; - } - } - - let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text()); - - for mut hl_range in analysis - .highlight( - HighlightConfig { - syntactic_name_ref_highlighting: false, - comments: true, - punctuation: true, - operator: true, - strings: true, - specialize_punctuation: config.specialize_punctuation, - specialize_operator: config.operator, - inject_doc_comment: config.inject_doc_comment, - macro_bang: config.macro_bang, - }, - tmp_file_id, - ) - .unwrap() - { - for range in inj.map_range_up(hl_range.range) { - if let Some(range) = literal.map_range_up(range) { + for tmp_file_id in fixture_analysis.files() { + for mut hl_range in analysis + .highlight( + HighlightConfig { + syntactic_name_ref_highlighting: false, + comments: true, + punctuation: true, + operator: true, + strings: true, + specialize_punctuation: config.specialize_punctuation, + specialize_operator: config.operator, + inject_doc_comment: config.inject_doc_comment, + macro_bang: config.macro_bang, + // What if there is a fixture inside a fixture? It's fixtures all the way down. + // (In fact, we have a fixture inside a fixture in our test suite!) + minicore: config.minicore, + }, + tmp_file_id, + ) + .unwrap() + { + for range in fixture_analysis.map_range_up(tmp_file_id, hl_range.range) { hl_range.range = range; hl_range.highlight |= HlMod::Injected; hl.add(hl_range); @@ -116,7 +87,7 @@ const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; pub(super) fn doc_comment( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, - config: HighlightConfig, + config: &HighlightConfig<'_>, src_file_id: EditionedFileId, node: &SyntaxNode, ) { @@ -128,39 +99,37 @@ pub(super) fn doc_comment( // Extract intra-doc links and emit highlights for them. if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) { - hir::attach_db(sema.db, || { - extract_definitions_from_docs(&docs) - .into_iter() - .filter_map(|(range, link, ns)| { - doc_mapping - .map(range) - .filter(|(mapping, _)| mapping.file_id == src_file_id) - .and_then(|(InFile { value: mapped_range, .. }, attr_id)| { - Some(mapped_range).zip(resolve_doc_path_for_def( - sema.db, - def, - &link, - ns, - attr_id.is_inner_attr(), - )) - }) - }) - .for_each(|(range, def)| { - hl.add(HlRange { - range, - highlight: module_def_to_hl_tag(def) - | HlMod::Documentation - | HlMod::Injected - | HlMod::IntraDocLink, - binding_hash: None, + extract_definitions_from_docs(&docs) + .into_iter() + .filter_map(|(range, link, ns)| { + doc_mapping + .map(range) + .filter(|(mapping, _)| mapping.file_id == src_file_id) + .and_then(|(InFile { value: mapped_range, .. }, attr_id)| { + Some(mapped_range).zip(resolve_doc_path_for_def( + sema.db, + def, + &link, + ns, + attr_id.is_inner_attr(), + )) }) + }) + .for_each(|(range, def)| { + hl.add(HlRange { + range, + highlight: module_def_to_hl_tag(def) + | HlMod::Documentation + | HlMod::Injected + | HlMod::IntraDocLink, + binding_hash: None, }) - }); + }) } // Extract doc-test sources from the docs and calculate highlighting for them. - let mut inj = Injector::default(); + let mut inj = RangeMapper::default(); inj.add_unmapped("fn doctest() {\n"); let attrs_source_map = attributes.source_map(sema.db); @@ -249,7 +218,7 @@ pub(super) fn doc_comment( if let Ok(ranges) = analysis.with_db(|db| { super::highlight( db, - HighlightConfig { + &HighlightConfig { syntactic_name_ref_highlighting: true, comments: true, punctuation: true, @@ -259,6 +228,7 @@ pub(super) fn doc_comment( specialize_operator: config.operator, inject_doc_comment: config.inject_doc_comment, macro_bang: config.macro_bang, + minicore: config.minicore, }, tmp_file_id, None, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs deleted file mode 100644 index c30f797324967..0000000000000 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/injector.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Extracts a subsequence of a text document, remembering the mapping of ranges -//! between original and extracted texts. -use std::ops::{self, Sub}; - -use stdx::equal_range_by; -use syntax::{TextRange, TextSize}; - -#[derive(Default)] -pub(super) struct Injector { - buf: String, - ranges: Vec<(TextRange, Option>)>, -} - -impl Injector { - pub(super) fn add(&mut self, text: &str, source_range: TextRange) { - let len = TextSize::of(text); - assert_eq!(len, source_range.len()); - self.add_impl(text, Some(source_range.start())); - } - - pub(super) fn add_unmapped(&mut self, text: &str) { - self.add_impl(text, None); - } - - fn add_impl(&mut self, text: &str, source: Option) { - let len = TextSize::of(text); - let target_range = TextRange::at(TextSize::of(&self.buf), len); - self.ranges.push((target_range, source.map(|it| Delta::new(target_range.start(), it)))); - self.buf.push_str(text); - } - - pub(super) fn take_text(&mut self) -> String { - std::mem::take(&mut self.buf) - } - - pub(super) fn map_range_up(&self, range: TextRange) -> impl Iterator + '_ { - equal_range_by(&self.ranges, |&(r, _)| TextRange::ordering(r, range)).filter_map(move |i| { - let (target_range, delta) = self.ranges[i]; - let intersection = target_range.intersect(range).unwrap(); - Some(intersection + delta?) - }) - } -} - -#[derive(Clone, Copy)] -enum Delta { - Add(T), - Sub(T), -} - -impl Delta { - fn new(from: T, to: T) -> Delta - where - T: Ord + Sub, - { - if to >= from { Delta::Add(to - from) } else { Delta::Sub(from - to) } - } -} - -impl ops::Add> for TextSize { - type Output = TextSize; - - fn add(self, rhs: Delta) -> TextSize { - match rhs { - Delta::Add(it) => self + it, - Delta::Sub(it) => self - it, - } - } -} - -impl ops::Add> for TextRange { - type Output = TextRange; - - fn add(self, rhs: Delta) -> TextRange { - TextRange::at(self.start() + rhs, self.len()) - } -} diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html index 3b468ab6dba65..579c6ceadcb8f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html @@ -43,18 +43,19 @@
fn fixture(#[rust_analyzer::rust_fixture] ra_fixture: &str) {}
 
 fn main() {
-    fixture(r#"
-trait Foo {
-    fn foo() {
-        println!("2 + 2 = {}", 4);
-    }
+    fixture(r#"
+@@- minicore: sized
+trait Foo: Sized {
+    fn foo() {
+        println!("2 + 2 = {}", 4);
+    }
 }"#
     );
-    fixture(r"
-fn foo() {
-    foo($0{
-        92
-    }$0)
+    fixture(r"
+fn foo() {
+    foo($0{
+        92
+    }$0)
 }"
     );
 }
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html new file mode 100644 index 0000000000000..fc2d9a3870161 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html @@ -0,0 +1,61 @@ + + +
fn fixture(#[rust_analyzer::rust_fixture] ra_fixture: &str) {}
+
+fn main() {
+    fixture(r#"
+@@- /main.rs crate:main deps:other_crate
+fn test() {
+    let x = other_crate::foo::S::thing();
+    x;
+} //^ i128
+
+@@- /lib.rs crate:other_crate
+pub mod foo {
+    pub struct S;
+    impl S {
+        pub fn thing() -> i128 { 0 }
+    }
+}
+    "#);
+}
\ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index 8198701d68432..4e84127c29f82 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -1,13 +1,13 @@ use std::time::Instant; use expect_test::{ExpectFile, expect_file}; -use ide_db::SymbolKind; +use ide_db::{MiniCore, SymbolKind}; use span::Edition; use test_utils::{AssertLinear, bench, bench_fixture, skip_slow_tests}; use crate::{FileRange, HighlightConfig, HlTag, TextRange, fixture}; -const HL_CONFIG: HighlightConfig = HighlightConfig { +const HL_CONFIG: HighlightConfig<'_> = HighlightConfig { strings: true, comments: true, punctuation: true, @@ -17,6 +17,7 @@ const HL_CONFIG: HighlightConfig = HighlightConfig { inject_doc_comment: true, macro_bang: true, syntactic_name_ref_highlighting: false, + minicore: MiniCore::default(), }; #[test] @@ -1016,6 +1017,35 @@ impl t for foo { ) } +#[test] +fn test_injection_2() { + check_highlighting( + r##" +fn fixture(#[rust_analyzer::rust_fixture] ra_fixture: &str) {} + +fn main() { + fixture(r#" +@@- /main.rs crate:main deps:other_crate +fn test() { + let x = other_crate::foo::S::thing(); + x; +} //^ i128 + +@@- /lib.rs crate:other_crate +pub mod foo { + pub struct S; + impl S { + pub fn thing() -> i128 { 0 } + } +} + "#); +} +"##, + expect_file!["./test_data/highlight_injection_2.html"], + false, + ); +} + #[test] fn test_injection() { check_highlighting( @@ -1024,7 +1054,8 @@ fn fixture(#[rust_analyzer::rust_fixture] ra_fixture: &str) {} fn main() { fixture(r#" -trait Foo { +@@- minicore: sized +trait Foo: Sized { fn foo() { println!("2 + 2 = {}", 4); } @@ -1223,7 +1254,7 @@ fn foo(x: &fn(&dyn Trait)) {} /// Note that the `snapshot` file is overwritten by the rendered HTML. fn check_highlighting_with_config( #[rust_analyzer::rust_fixture] ra_fixture: &str, - config: HighlightConfig, + config: HighlightConfig<'_>, expect: ExpectFile, rainbow: bool, ) { diff --git a/src/tools/rust-analyzer/crates/macros/src/lib.rs b/src/tools/rust-analyzer/crates/macros/src/lib.rs index 8bafcf498c510..3f90ecc8f902d 100644 --- a/src/tools/rust-analyzer/crates/macros/src/lib.rs +++ b/src/tools/rust-analyzer/crates/macros/src/lib.rs @@ -162,3 +162,42 @@ fn has_ignore_attr(attrs: &[syn::Attribute], name: &'static str, meta: &'static ignored } + +decl_derive!( + [UpmapFromRaFixture] => upmap_from_ra_fixture +); + +fn upmap_from_ra_fixture(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { + if let syn::Data::Union(_) = s.ast().data { + panic!("cannot derive on union") + } + + s.add_bounds(synstructure::AddBounds::Generics); + s.bind_with(|_| synstructure::BindStyle::Move); + let body = s.each_variant(|vi| { + let bindings = vi.bindings(); + vi.construct(|_, index| { + let bind = &bindings[index]; + + quote! { + ::ide_db::ra_fixture::UpmapFromRaFixture::upmap_from_ra_fixture( + #bind, __analysis, __virtual_file_id, __real_file_id, + )? + } + }) + }); + + s.bound_impl( + quote!(::ide_db::ra_fixture::UpmapFromRaFixture), + quote! { + fn upmap_from_ra_fixture( + self, + __analysis: &::ide_db::ra_fixture::RaFixtureAnalysis, + __virtual_file_id: ::ide_db::ra_fixture::FileId, + __real_file_id: ::ide_db::ra_fixture::FileId, + ) -> Result { + Ok(match self { #body }) + } + }, + ) +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 2a9ef981291ec..717bd230a21e9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -25,7 +25,7 @@ use ide::{ InlayHintsConfig, LineCol, RootDatabase, }; use ide_db::{ - EditionedFileId, LineIndexDatabase, SnippetCap, + EditionedFileId, LineIndexDatabase, MiniCore, SnippetCap, base_db::{SourceDatabase, salsa::Database}, }; use itertools::Itertools; @@ -1194,6 +1194,7 @@ impl flags::AnalysisStats { closing_brace_hints_min_lines: Some(20), fields_to_resolve: InlayFieldsToResolve::empty(), range_exclusive_hints: true, + minicore: MiniCore::default(), }, analysis.editioned_file_id_to_vfs(file_id), None, @@ -1203,26 +1204,25 @@ impl flags::AnalysisStats { bar.finish_and_clear(); let mut bar = create_bar(); + let annotation_config = AnnotationConfig { + binary_target: true, + annotate_runnables: true, + annotate_impls: true, + annotate_references: false, + annotate_method_references: false, + annotate_enum_variant_references: false, + location: ide::AnnotationLocation::AboveName, + minicore: MiniCore::default(), + }; for &file_id in file_ids { let msg = format!("annotations: {}", vfs.file_path(file_id.file_id(db))); bar.set_message(move || msg.clone()); analysis - .annotations( - &AnnotationConfig { - binary_target: true, - annotate_runnables: true, - annotate_impls: true, - annotate_references: false, - annotate_method_references: false, - annotate_enum_variant_references: false, - location: ide::AnnotationLocation::AboveName, - }, - analysis.editioned_file_id_to_vfs(file_id), - ) + .annotations(&annotation_config, analysis.editioned_file_id_to_vfs(file_id)) .unwrap() .into_iter() .for_each(|annotation| { - _ = analysis.resolve_annotation(annotation); + _ = analysis.resolve_annotation(&annotation_config, annotation); }); bar.inc(1); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 96b65838ae426..652c2e32ffa68 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -8,14 +8,14 @@ use std::{env, fmt, iter, ops::Not, sync::OnceLock}; use cfg::{CfgAtom, CfgDiff}; use hir::Symbol; use ide::{ - AssistConfig, CallHierarchyConfig, CallableSnippets, CompletionConfig, - CompletionFieldsToResolve, DiagnosticsConfig, GenericParameterHints, HighlightConfig, - HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayFieldsToResolve, InlayHintsConfig, - JoinLinesConfig, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, Snippet, SnippetScope, - SourceRootId, + AnnotationConfig, AssistConfig, CallHierarchyConfig, CallableSnippets, CompletionConfig, + CompletionFieldsToResolve, DiagnosticsConfig, GenericParameterHints, GotoDefinitionConfig, + HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayFieldsToResolve, + InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, + Snippet, SnippetScope, SourceRootId, }; use ide_db::{ - SnippetCap, + MiniCore, SnippetCap, assists::ExprFillDefaultMode, imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind}, }; @@ -1454,6 +1454,23 @@ impl LensConfig { pub fn references(&self) -> bool { self.method_refs || self.refs_adt || self.refs_trait || self.enum_variant_refs } + + pub fn into_annotation_config<'a>( + self, + binary_target: bool, + minicore: MiniCore<'a>, + ) -> AnnotationConfig<'a> { + AnnotationConfig { + binary_target, + annotate_runnables: self.runnable(), + annotate_impls: self.implementations, + annotate_references: self.refs_adt, + annotate_method_references: self.method_refs, + annotate_enum_variant_references: self.enum_variant_refs, + location: self.location.into(), + minicore, + } + } } #[derive(Clone, Debug, PartialEq, Eq)] @@ -1688,11 +1705,15 @@ impl Config { } } - pub fn call_hierarchy(&self) -> CallHierarchyConfig { - CallHierarchyConfig { exclude_tests: self.references_excludeTests().to_owned() } + pub fn call_hierarchy<'a>(&self, minicore: MiniCore<'a>) -> CallHierarchyConfig<'a> { + CallHierarchyConfig { exclude_tests: self.references_excludeTests().to_owned(), minicore } } - pub fn completion(&self, source_root: Option) -> CompletionConfig<'_> { + pub fn completion<'a>( + &'a self, + source_root: Option, + minicore: MiniCore<'a>, + ) -> CompletionConfig<'a> { let client_capability_fields = self.completion_resolve_support_properties(); CompletionConfig { enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(), @@ -1746,6 +1767,7 @@ impl Config { }) .collect(), exclude_traits: self.completion_excludeTraits(source_root), + minicore, } } @@ -1820,7 +1842,7 @@ impl Config { } } - pub fn hover(&self) -> HoverConfig { + pub fn hover<'a>(&self, minicore: MiniCore<'a>) -> HoverConfig<'a> { let mem_kind = |kind| match kind { MemoryLayoutHoverRenderKindDef::Both => MemoryLayoutHoverRenderKind::Both, MemoryLayoutHoverRenderKindDef::Decimal => MemoryLayoutHoverRenderKind::Decimal, @@ -1853,10 +1875,15 @@ impl Config { None => ide::SubstTyLen::Unlimited, }, show_drop_glue: *self.hover_dropGlue_enable(), + minicore, } } - pub fn inlay_hints(&self) -> InlayHintsConfig { + pub fn goto_definition<'a>(&self, minicore: MiniCore<'a>) -> GotoDefinitionConfig<'a> { + GotoDefinitionConfig { minicore } + } + + pub fn inlay_hints<'a>(&self, minicore: MiniCore<'a>) -> InlayHintsConfig<'a> { let client_capability_fields = self.inlay_hint_resolve_support_properties(); InlayHintsConfig { @@ -1938,6 +1965,7 @@ impl Config { ), implicit_drop_hints: self.inlayHints_implicitDrops_enable().to_owned(), range_exclusive_hints: self.inlayHints_rangeExclusiveHints_enable().to_owned(), + minicore, } } @@ -1975,7 +2003,7 @@ impl Config { self.semanticHighlighting_nonStandardTokens().to_owned() } - pub fn highlighting_config(&self) -> HighlightConfig { + pub fn highlighting_config<'a>(&self, minicore: MiniCore<'a>) -> HighlightConfig<'a> { HighlightConfig { strings: self.semanticHighlighting_strings_enable().to_owned(), comments: self.semanticHighlighting_comments_enable().to_owned(), @@ -1990,6 +2018,7 @@ impl Config { .to_owned(), inject_doc_comment: self.semanticHighlighting_doc_comment_inject_enable().to_owned(), syntactic_name_ref_highlighting: false, + minicore, } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index ce6644f725ca6..f557dd5cb0927 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -13,7 +13,10 @@ use cargo_metadata::PackageId; use crossbeam_channel::{Receiver, Sender, unbounded}; use hir::ChangeWithProcMacros; use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId}; -use ide_db::base_db::{Crate, ProcMacroPaths, SourceDatabase}; +use ide_db::{ + MiniCore, + base_db::{Crate, ProcMacroPaths, SourceDatabase}, +}; use itertools::Itertools; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; @@ -188,6 +191,14 @@ pub(crate) struct GlobalState { /// This is marked true if we failed to load a crate root file at crate graph creation, /// which will usually end up causing a bunch of incorrect diagnostics on startup. pub(crate) incomplete_crate_graph: bool, + + pub(crate) minicore: MiniCoreRustAnalyzerInternalOnly, +} + +// FIXME: This should move to the VFS once the rewrite is done. +#[derive(Debug, Clone, Default)] +pub(crate) struct MiniCoreRustAnalyzerInternalOnly { + pub(crate) minicore_text: Option, } /// An immutable snapshot of the world's state at a point in time. @@ -204,6 +215,7 @@ pub(crate) struct GlobalStateSnapshot { // FIXME: Can we derive this from somewhere else? pub(crate) proc_macros_loaded: bool, pub(crate) flycheck: Arc<[FlycheckHandle]>, + minicore: MiniCoreRustAnalyzerInternalOnly, } impl std::panic::UnwindSafe for GlobalStateSnapshot {} @@ -304,6 +316,8 @@ impl GlobalState { deferred_task_queue: task_queue, incomplete_crate_graph: false, + + minicore: MiniCoreRustAnalyzerInternalOnly::default(), }; // Apply any required database inputs from the config. this.update_configuration(config); @@ -550,6 +564,7 @@ impl GlobalState { workspaces: Arc::clone(&self.workspaces), analysis: self.analysis_host.analysis(), vfs: Arc::clone(&self.vfs), + minicore: self.minicore.clone(), check_fixes: Arc::clone(&self.diagnostics.check_fixes), mem_docs: self.mem_docs.clone(), semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache), @@ -838,6 +853,14 @@ impl GlobalStateSnapshot { pub(crate) fn file_exists(&self, file_id: FileId) -> bool { self.vfs.read().0.exists(file_id) } + + #[inline] + pub(crate) fn minicore(&self) -> MiniCore<'_> { + match &self.minicore.minicore_text { + Some(minicore) => MiniCore::new(minicore), + None => MiniCore::default(), + } + } } pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index 6cb28aecf748f..55d092f30f6b6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -7,8 +7,8 @@ use anyhow::Context; use base64::{Engine, prelude::BASE64_STANDARD}; use ide::{ - AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve, - FilePosition, FileRange, FileStructureConfig, HoverAction, HoverGotoTypeData, + AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve, FilePosition, + FileRange, FileStructureConfig, FindAllRefsConfig, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; @@ -811,7 +811,8 @@ pub(crate) fn handle_goto_definition( let _p = tracing::info_span!("handle_goto_definition").entered(); let position = try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); - let nav_info = match snap.analysis.goto_definition(position)? { + let config = snap.config.goto_definition(snap.minicore()); + let nav_info = match snap.analysis.goto_definition(position, &config)? { None => return Ok(None), Some(it) => it, }; @@ -829,7 +830,8 @@ pub(crate) fn handle_goto_declaration( &snap, params.text_document_position_params.clone() )?); - let nav_info = match snap.analysis.goto_declaration(position)? { + let config = snap.config.goto_definition(snap.minicore()); + let nav_info = match snap.analysis.goto_declaration(position, &config)? { None => return handle_goto_definition(snap, params), Some(it) => it, }; @@ -1106,7 +1108,7 @@ pub(crate) fn handle_completion( context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next()); let source_root = snap.analysis.source_root_id(position.file_id)?; - let completion_config = &snap.config.completion(Some(source_root)); + let completion_config = &snap.config.completion(Some(source_root), snap.minicore()); // FIXME: We should fix up the position when retrying the cancelled request instead position.offset = position.offset.min(line_index.index.len()); let items = match snap.analysis.completions( @@ -1160,7 +1162,8 @@ pub(crate) fn handle_completion_resolve( }; let source_root = snap.analysis.source_root_id(file_id)?; - let mut forced_resolve_completions_config = snap.config.completion(Some(source_root)); + let mut forced_resolve_completions_config = + snap.config.completion(Some(source_root), snap.minicore()); forced_resolve_completions_config.fields_to_resolve = CompletionFieldsToResolve::empty(); let position = FilePosition { file_id, offset }; @@ -1274,7 +1277,7 @@ pub(crate) fn handle_hover( }; let file_range = try_default!(from_proto::file_range(&snap, ¶ms.text_document, range)?); - let hover = snap.config.hover(); + let hover = snap.config.hover(snap.minicore()); let info = match snap.analysis.hover(&hover, file_range)? { None => return Ok(None), Some(info) => info, @@ -1360,7 +1363,11 @@ pub(crate) fn handle_references( let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_tests = snap.config.find_all_refs_exclude_tests(); - let Some(refs) = snap.analysis.find_all_refs(position, None)? else { + let Some(refs) = snap.analysis.find_all_refs( + position, + &FindAllRefsConfig { search_scope: None, minicore: snap.minicore() }, + )? + else { return Ok(None); }; @@ -1615,8 +1622,8 @@ pub(crate) fn handle_code_lens( let target_spec = TargetSpec::for_file(&snap, file_id)?; let annotations = snap.analysis.annotations( - &AnnotationConfig { - binary_target: target_spec + &lens_config.into_annotation_config( + target_spec .map(|spec| { matches!( spec.target_kind(), @@ -1624,13 +1631,8 @@ pub(crate) fn handle_code_lens( ) }) .unwrap_or(false), - annotate_runnables: lens_config.runnable(), - annotate_impls: lens_config.implementations, - annotate_references: lens_config.refs_adt, - annotate_method_references: lens_config.method_refs, - annotate_enum_variant_references: lens_config.enum_variant_refs, - location: lens_config.location.into(), - }, + snap.minicore(), + ), file_id, )?; @@ -1653,7 +1655,8 @@ pub(crate) fn handle_code_lens_resolve( let Some(annotation) = from_proto::annotation(&snap, code_lens.range, resolve)? else { return Ok(code_lens); }; - let annotation = snap.analysis.resolve_annotation(annotation)?; + let config = snap.config.lens().into_annotation_config(false, snap.minicore()); + let annotation = snap.analysis.resolve_annotation(&config, annotation)?; let mut acc = Vec::new(); to_proto::code_lens(&mut acc, &snap, annotation)?; @@ -1736,7 +1739,7 @@ pub(crate) fn handle_inlay_hints( range.end().min(line_index.index.len()), ); - let inlay_hints_config = snap.config.inlay_hints(); + let inlay_hints_config = snap.config.inlay_hints(snap.minicore()); Ok(Some( snap.analysis .inlay_hints(&inlay_hints_config, file_id, Some(range))? @@ -1777,7 +1780,7 @@ pub(crate) fn handle_inlay_hints_resolve( let line_index = snap.file_line_index(file_id)?; let range = from_proto::text_range(&line_index, resolve_data.resolve_range)?; - let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(); + let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints(snap.minicore()); forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty(); let resolve_hints = snap.analysis.inlay_hints_resolve( &forced_resolve_inlay_hints_config, @@ -1816,7 +1819,8 @@ pub(crate) fn handle_call_hierarchy_prepare( let position = try_default!(from_proto::file_position(&snap, params.text_document_position_params)?); - let nav_info = match snap.analysis.call_hierarchy(position)? { + let config = snap.config.call_hierarchy(snap.minicore()); + let nav_info = match snap.analysis.call_hierarchy(position, &config)? { None => return Ok(None), Some(it) => it, }; @@ -1842,8 +1846,8 @@ pub(crate) fn handle_call_hierarchy_incoming( let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?); let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; - let config = snap.config.call_hierarchy(); - let call_items = match snap.analysis.incoming_calls(config, fpos)? { + let config = snap.config.call_hierarchy(snap.minicore()); + let call_items = match snap.analysis.incoming_calls(&config, fpos)? { None => return Ok(None), Some(it) => it, }; @@ -1881,8 +1885,8 @@ pub(crate) fn handle_call_hierarchy_outgoing( let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; let line_index = snap.file_line_index(fpos.file_id)?; - let config = snap.config.call_hierarchy(); - let call_items = match snap.analysis.outgoing_calls(config, fpos)? { + let config = snap.config.call_hierarchy(snap.minicore()); + let call_items = match snap.analysis.outgoing_calls(&config, fpos)? { None => return Ok(None), Some(it) => it, }; @@ -1916,7 +1920,7 @@ pub(crate) fn handle_semantic_tokens_full( let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; - let mut highlight_config = snap.config.highlighting_config(); + let mut highlight_config = snap.config.highlighting_config(snap.minicore()); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. highlight_config.syntactic_name_ref_highlighting = snap.workspaces.is_empty() || !snap.proc_macros_loaded; @@ -1946,7 +1950,7 @@ pub(crate) fn handle_semantic_tokens_full_delta( let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; - let mut highlight_config = snap.config.highlighting_config(); + let mut highlight_config = snap.config.highlighting_config(snap.minicore()); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. highlight_config.syntactic_name_ref_highlighting = snap.workspaces.is_empty() || !snap.proc_macros_loaded; @@ -1988,7 +1992,7 @@ pub(crate) fn handle_semantic_tokens_range( let text = snap.analysis.file_text(frange.file_id)?; let line_index = snap.file_line_index(frange.file_id)?; - let mut highlight_config = snap.config.highlighting_config(); + let mut highlight_config = snap.config.highlighting_config(snap.minicore()); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. highlight_config.syntactic_name_ref_highlighting = snap.workspaces.is_empty() || !snap.proc_macros_loaded; @@ -2156,7 +2160,13 @@ fn show_ref_command_link( ) -> Option { if snap.config.hover_actions().references && snap.config.client_commands().show_reference - && let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) + && let Some(ref_search_res) = snap + .analysis + .find_all_refs( + *position, + &FindAllRefsConfig { search_scope: None, minicore: snap.minicore() }, + ) + .unwrap_or(None) { let uri = to_proto::url(snap, position.file_id); let line_index = snap.file_line_index(position.file_id).ok()?; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index 84b7888258f87..38ee9cbe7fc86 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -16,7 +16,7 @@ use ide::{ FilePosition, TextSize, }; use ide_db::{ - SnippetCap, + MiniCore, SnippetCap, imports::insert_use::{ImportGranularity, InsertUseConfig}, }; use project_model::CargoConfig; @@ -186,6 +186,7 @@ fn integrated_completion_benchmark() { exclude_traits: &[], enable_auto_await: true, enable_auto_iter: true, + minicore: MiniCore::default(), }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; @@ -240,6 +241,7 @@ fn integrated_completion_benchmark() { exclude_traits: &[], enable_auto_await: true, enable_auto_iter: true, + minicore: MiniCore::default(), }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; @@ -292,6 +294,7 @@ fn integrated_completion_benchmark() { exclude_traits: &[], enable_auto_await: true, enable_auto_iter: true, + minicore: MiniCore::default(), }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index d51ddb86d197f..cd384ca713ec5 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -16,7 +16,9 @@ use ide::{ SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize, UpdateTest, }; -use ide_db::{FxHasher, assists, rust_doc::format_docs, source_change::ChangeAnnotationId}; +use ide_db::{ + FxHasher, MiniCore, assists, rust_doc::format_docs, source_change::ChangeAnnotationId, +}; use itertools::Itertools; use paths::{Utf8Component, Utf8Prefix}; use semver::VersionReq; @@ -270,7 +272,7 @@ pub(crate) fn completion_items( ); } - if let Some(limit) = config.completion(None).limit { + if let Some(limit) = config.completion(None, MiniCore::default()).limit { res.sort_by(|item1, item2| item1.sort_text.cmp(&item2.sort_text)); res.truncate(limit); } @@ -400,16 +402,17 @@ fn completion_item( set_score(&mut lsp_item, max_relevance, item.relevance); - let imports = - if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() { - item.import_to_add - .clone() - .into_iter() - .map(|import_path| lsp_ext::CompletionImport { full_import_path: import_path }) - .collect() - } else { - Vec::new() - }; + let imports = if config.completion(None, MiniCore::default()).enable_imports_on_the_fly + && !item.import_to_add.is_empty() + { + item.import_to_add + .clone() + .into_iter() + .map(|import_path| lsp_ext::CompletionImport { full_import_path: import_path }) + .collect() + } else { + Vec::new() + }; let (ref_resolve_data, resolve_data) = if something_to_resolve || !imports.is_empty() { let ref_resolve_data = if ref_match.is_some() { let ref_resolve_data = lsp_ext::CompletionResolveData { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 3e80e8b7bdfb5..c0947b2a291ec 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -847,6 +847,13 @@ impl GlobalState { self.debounce_workspace_fetch(); let vfs = &mut self.vfs.write().0; for (path, contents) in files { + if matches!(path.name_and_extension(), Some(("minicore", Some("rs")))) { + // Not a lot of bad can happen from mistakenly identifying `minicore`, so proceed with that. + self.minicore.minicore_text = contents + .as_ref() + .and_then(|contents| String::from_utf8(contents.clone()).ok()); + } + let path = VfsPath::from(path); // if the file is in mem docs, it's managed by the client via notifications // so only set it if its not in there diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs index d9223e8216da3..e1a9f3ac03418 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs @@ -201,6 +201,10 @@ pub trait IsString: AstToken { None } } + fn map_offset_down(&self, offset: TextSize) -> Option { + let contents_range = self.text_range_between_quotes()?; + offset.checked_sub(contents_range.start()) + } } impl IsString for ast::String { diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index a4549794db336..aefe81f83e294 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -24,7 +24,7 @@ use paths::AbsPathBuf; use span::{Edition, FileId, Span}; use stdx::itertools::Itertools; use test_utils::{ - CURSOR_MARKER, ESCAPED_CURSOR_MARKER, Fixture, FixtureWithProjectMeta, RangeOrOffset, + CURSOR_MARKER, ESCAPED_CURSOR_MARKER, Fixture, FixtureWithProjectMeta, MiniCore, RangeOrOffset, extract_range_or_offset, }; use triomphe::Arc; @@ -69,7 +69,12 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static { proc_macros: Vec<(String, ProcMacro)>, ) -> Self { let mut db = Self::default(); - let fixture = ChangeFixture::parse_with_proc_macros(&db, ra_fixture, proc_macros); + let fixture = ChangeFixture::parse_with_proc_macros( + &db, + ra_fixture, + MiniCore::RAW_SOURCE, + proc_macros, + ); fixture.change.apply(&mut db); assert!(fixture.file_position.is_none()); db @@ -112,8 +117,10 @@ impl WithFixture for DB pub struct ChangeFixture { pub file_position: Option<(EditionedFileId, RangeOrOffset)>, + pub file_lines: Vec, pub files: Vec, pub change: ChangeWithProcMacros, + pub sysroot_files: Vec, } const SOURCE_ROOT_PREFIX: &str = "/"; @@ -123,12 +130,13 @@ impl ChangeFixture { db: &dyn salsa::Database, #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> ChangeFixture { - Self::parse_with_proc_macros(db, ra_fixture, Vec::new()) + Self::parse_with_proc_macros(db, ra_fixture, MiniCore::RAW_SOURCE, Vec::new()) } pub fn parse_with_proc_macros( db: &dyn salsa::Database, #[rust_analyzer::rust_fixture] ra_fixture: &str, + minicore_raw: &str, mut proc_macro_defs: Vec<(String, ProcMacro)>, ) -> ChangeFixture { let FixtureWithProjectMeta { @@ -149,6 +157,8 @@ impl ChangeFixture { let mut source_change = FileChange::default(); let mut files = Vec::new(); + let mut sysroot_files = Vec::new(); + let mut file_lines = Vec::new(); let mut crate_graph = CrateGraphBuilder::default(); let mut crates = FxIndexMap::default(); let mut crate_deps = Vec::new(); @@ -173,6 +183,8 @@ impl ChangeFixture { let proc_macro_cwd = Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())); for entry in fixture { + file_lines.push(entry.line); + let mut range_or_offset = None; let text = if entry.text.contains(CURSOR_MARKER) { if entry.text.contains(ESCAPED_CURSOR_MARKER) { @@ -259,7 +271,9 @@ impl ChangeFixture { fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_owned())); roots.push(SourceRoot::new_library(fs)); - source_change.change_file(core_file, Some(mini_core.source_code())); + sysroot_files.push(core_file); + + source_change.change_file(core_file, Some(mini_core.source_code(minicore_raw))); let core_crate = crate_graph.add_crate_root( core_file, @@ -348,6 +362,8 @@ impl ChangeFixture { ); roots.push(SourceRoot::new_library(fs)); + sysroot_files.push(proc_lib_file); + source_change.change_file(proc_lib_file, Some(source)); let all_crates = crate_graph.iter().collect::>(); @@ -396,7 +412,7 @@ impl ChangeFixture { change.source_change.set_roots(roots); change.source_change.set_crate_graph(crate_graph); - ChangeFixture { file_position, files, change } + ChangeFixture { file_position, file_lines, files, change, sysroot_files } } } diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs index c024089a016f9..559894ee6205b 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs @@ -132,13 +132,17 @@ pub struct Fixture { pub library: bool, /// Actual file contents. All meta comments are stripped. pub text: String, + /// The line number in the original fixture of the beginning of this fixture. + pub line: usize, } +#[derive(Debug)] pub struct MiniCore { activated_flags: Vec, valid_flags: Vec, } +#[derive(Debug)] pub struct FixtureWithProjectMeta { pub fixture: Vec, pub mini_core: Option, @@ -184,40 +188,49 @@ impl FixtureWithProjectMeta { let mut mini_core = None; let mut res: Vec = Vec::new(); let mut proc_macro_names = vec![]; + let mut first_row = 0; if let Some(meta) = fixture.strip_prefix("//- toolchain:") { + first_row += 1; let (meta, remain) = meta.split_once('\n').unwrap(); toolchain = Some(meta.trim().to_owned()); fixture = remain; } if let Some(meta) = fixture.strip_prefix("//- target_data_layout:") { + first_row += 1; let (meta, remain) = meta.split_once('\n').unwrap(); meta.trim().clone_into(&mut target_data_layout); fixture = remain; } if let Some(meta) = fixture.strip_prefix("//- target_arch:") { + first_row += 1; let (meta, remain) = meta.split_once('\n').unwrap(); meta.trim().clone_into(&mut target_arch); fixture = remain; } if let Some(meta) = fixture.strip_prefix("//- proc_macros:") { + first_row += 1; let (meta, remain) = meta.split_once('\n').unwrap(); proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect(); fixture = remain; } if let Some(meta) = fixture.strip_prefix("//- minicore:") { + first_row += 1; let (meta, remain) = meta.split_once('\n').unwrap(); mini_core = Some(MiniCore::parse(meta)); fixture = remain; } - let default = if fixture.contains("//-") { None } else { Some("//- /main.rs") }; + let default = + if fixture.contains("//- /") { None } else { Some((first_row - 1, "//- /main.rs")) }; - for (ix, line) in default.into_iter().chain(fixture.split_inclusive('\n')).enumerate() { + for (ix, line) in + default.into_iter().chain((first_row..).zip(fixture.split_inclusive('\n'))) + { if line.contains("//-") { assert!( line.starts_with("//-"), @@ -228,7 +241,7 @@ impl FixtureWithProjectMeta { } if let Some(line) = line.strip_prefix("//-") { - let meta = Self::parse_meta_line(line); + let meta = Self::parse_meta_line(line, (ix + 1).try_into().unwrap()); res.push(meta); } else { if matches!(line.strip_prefix("// "), Some(l) if l.trim().starts_with('/')) { @@ -252,7 +265,7 @@ impl FixtureWithProjectMeta { } //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo - fn parse_meta_line(meta: &str) -> Fixture { + fn parse_meta_line(meta: &str, line: usize) -> Fixture { let meta = meta.trim(); let mut components = meta.split_ascii_whitespace(); @@ -317,6 +330,7 @@ impl FixtureWithProjectMeta { Fixture { path, text: String::new(), + line, krate, deps, extern_prelude, @@ -330,7 +344,7 @@ impl FixtureWithProjectMeta { } impl MiniCore { - const RAW_SOURCE: &'static str = include_str!("./minicore.rs"); + pub const RAW_SOURCE: &'static str = include_str!("./minicore.rs"); fn has_flag(&self, flag: &str) -> bool { self.activated_flags.iter().any(|it| it == flag) @@ -363,8 +377,8 @@ impl MiniCore { res } - pub fn available_flags() -> impl Iterator { - let lines = MiniCore::RAW_SOURCE.split_inclusive('\n'); + pub fn available_flags(raw_source: &str) -> impl Iterator { + let lines = raw_source.split_inclusive('\n'); lines .map_while(|x| x.strip_prefix("//!")) .skip_while(|line| !line.contains("Available flags:")) @@ -375,9 +389,9 @@ impl MiniCore { /// Strips parts of minicore.rs which are flagged by inactive flags. /// /// This is probably over-engineered to support flags dependencies. - pub fn source_code(mut self) -> String { + pub fn source_code(mut self, raw_source: &str) -> String { let mut buf = String::new(); - let mut lines = MiniCore::RAW_SOURCE.split_inclusive('\n'); + let mut lines = raw_source.split_inclusive('\n'); let mut implications = Vec::new(); From 9a2db55596ce6c2c08bfde8fb69cc4672cd85992 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Fri, 17 Oct 2025 18:40:07 +0800 Subject: [PATCH 31/76] Support underscore suffix parameter hide inlayHints Using suffix underscores to avoid keywords is one of the common skill, and inlayHints hiding should support it Example --- **Before this PR**: ```rust fn far(loop_: u32) {} fn faz(r#loop: u32) {} let loop_level = 0; far(loop_level); //^^^^^^^^^^ loop_ faz(loop_level); ``` **After this PR**: ```rust fn far(loop_: u32) {} fn faz(r#loop: u32) {} let loop_level = 0; far(loop_level); faz(loop_level); ``` --- .../crates/ide/src/inlay_hints/param_name.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs index 754707784055a..d6271ce5adeba 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs @@ -124,12 +124,12 @@ fn should_hide_param_name_hint( // hide when: // - the parameter name is a suffix of the function's name // - the argument is a qualified constructing or call expression where the qualifier is an ADT - // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix - // of argument with _ splitting it off + // - exact argument<->parameter match(ignoring leading and trailing underscore) or + // parameter is a prefix/suffix of argument with _ splitting it off // - param starts with `ra_fixture` // - param is a well known name in a unary function - let param_name = param_name.trim_start_matches('_'); + let param_name = param_name.trim_matches('_'); if param_name.is_empty() { return true; } @@ -540,6 +540,8 @@ fn enum_matches_param_name(completion_kind: CompletionKind) {} fn foo(param: u32) {} fn bar(param_eter: u32) {} fn baz(a_d_e: u32) {} +fn far(loop_: u32) {} +fn faz(r#loop: u32) {} enum CompletionKind { Keyword, @@ -590,6 +592,9 @@ fn main() { let param_eter2 = 0; bar(param_eter2); //^^^^^^^^^^^ param_eter + let loop_level = 0; + far(loop_level); + faz(loop_level); non_ident_pat((0, 0)); From d3263775c414d967ddd2a09eb95940a87beafa74 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sat, 18 Oct 2025 10:21:35 +0800 Subject: [PATCH 32/76] Migrate `generate_single_field_struct_from` assist to use `SyntaxEditor` --- .../generate_single_field_struct_from.rs | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs index 6c302a2a6fbdf..a1ec763365669 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs @@ -5,9 +5,10 @@ use ide_db::{ RootDatabase, famous_defs::FamousDefs, helpers::mod_path_to_ast, imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor, }; +use syntax::syntax_editor::{Element, Position}; use syntax::{ TokenText, - ast::{self, AstNode, HasAttrs, HasGenericParams, HasName, edit, edit_in_place::Indent}, + ast::{self, AstNode, HasAttrs, HasGenericParams, HasName, edit::AstNodeEdit}, }; use crate::{ @@ -111,9 +112,8 @@ pub(crate) fn generate_single_field_struct_from( false, false, ) - .clone_for_update(); + .indent(1.into()); - fn_.indent(1.into()); let cfg_attrs = strukt .attrs() .filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg")); @@ -129,16 +129,25 @@ pub(crate) fn generate_single_field_struct_from( make::ty("From"), ty.clone(), None, - ty_where_clause.map(|wc| edit::AstNodeEdit::reset_indent(&wc)), + ty_where_clause.map(|wc| wc.reset_indent()), None, ) .clone_for_update(); impl_.get_or_create_assoc_item_list().add_item(fn_.into()); + let impl_ = impl_.indent(indent); + + let mut edit = builder.make_editor(strukt.syntax()); - impl_.reindent_to(indent); + edit.insert_all( + Position::after(strukt.syntax()), + vec![ + make::tokens::whitespace(&format!("\n\n{indent}")).syntax_element(), + impl_.syntax().syntax_element(), + ], + ); - builder.insert(strukt.syntax().text_range().end(), format!("\n\n{indent}{impl_}")); + builder.add_file_edits(ctx.vfs_file_id(), edit); }, ) } From 0aa39c42330f00ca14d2655d038a441c6957eb0f Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sun, 19 Oct 2025 18:31:26 +0900 Subject: [PATCH 33/76] fix: Run `cargo metadata` on sysroot with cwd=sysroot --- src/tools/rust-analyzer/crates/project-model/src/sysroot.rs | 3 +-- src/tools/rust-analyzer/crates/project-model/src/tests.rs | 1 - src/tools/rust-analyzer/crates/project-model/src/workspace.rs | 4 ---- .../rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs | 1 - 4 files changed, 1 insertion(+), 8 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index c0a5009afba37..272cf7dada8aa 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -210,7 +210,6 @@ impl Sysroot { &self, sysroot_source_config: &RustSourceWorkspaceConfig, no_deps: bool, - current_dir: &AbsPath, target_dir: &Utf8Path, progress: &dyn Fn(String), ) -> Option { @@ -224,7 +223,7 @@ impl Sysroot { if fs::metadata(&library_manifest).is_ok() { match self.load_library_via_cargo( &library_manifest, - current_dir, + src_root, target_dir, cargo_config, no_deps, diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 987d381fac638..a79c8640fa6b8 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -241,7 +241,6 @@ fn smoke_test_real_sysroot_cargo() { let loaded_sysroot = sysroot.load_workspace( &RustSourceWorkspaceConfig::default_cargo(), false, - &cwd, &Utf8PathBuf::default(), &|_| (), ); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 0649ce9eeb9df..957f336ee419c 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -383,7 +383,6 @@ impl ProjectWorkspace { toolchain.clone(), )), config.no_deps, - workspace_dir, &target_dir, progress, ) @@ -487,7 +486,6 @@ impl ProjectWorkspace { sysroot.load_workspace( &RustSourceWorkspaceConfig::Json(*sysroot_project), config.no_deps, - project_root, &target_dir, progress, ) @@ -499,7 +497,6 @@ impl ProjectWorkspace { toolchain.clone(), )), config.no_deps, - project_root, &target_dir, progress, ) @@ -561,7 +558,6 @@ impl ProjectWorkspace { toolchain.clone(), )), config.no_deps, - dir, &target_dir, &|_| (), ); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 609ebf2b514f0..20567149bb4ba 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -78,7 +78,6 @@ impl Tester { let loaded_sysroot = sysroot.load_workspace( &RustSourceWorkspaceConfig::default_cargo(), false, - &path, &Utf8PathBuf::default(), &|_| (), ); From 4182a95e05157c6c735be7f87cd8a0c1a23c9148 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Mon, 20 Oct 2025 01:55:52 +0900 Subject: [PATCH 34/76] fix: Report metadata errors for sysroot --- .../crates/project-model/src/sysroot.rs | 28 ++++++++++++++----- .../crates/project-model/src/tests.rs | 2 +- .../crates/project-model/src/workspace.rs | 4 +-- .../crates/rust-analyzer/src/reload.rs | 10 +++++++ 4 files changed, 34 insertions(+), 10 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 272cf7dada8aa..5cc399bfe76d6 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -30,7 +30,7 @@ pub struct Sysroot { #[derive(Debug, Clone, Eq, PartialEq)] pub enum RustLibSrcWorkspace { - Workspace(CargoWorkspace), + Workspace { ws: CargoWorkspace, metadata_err: Option }, Json(ProjectJson), Stitched(stitched::Stitched), Empty, @@ -39,7 +39,9 @@ pub enum RustLibSrcWorkspace { impl fmt::Display for RustLibSrcWorkspace { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - RustLibSrcWorkspace::Workspace(ws) => write!(f, "workspace {}", ws.workspace_root()), + RustLibSrcWorkspace::Workspace { ws, .. } => { + write!(f, "workspace {}", ws.workspace_root()) + } RustLibSrcWorkspace::Json(json) => write!(f, "json {}", json.manifest_or_root()), RustLibSrcWorkspace::Stitched(stitched) => { write!(f, "stitched with {} crates", stitched.crates.len()) @@ -74,7 +76,7 @@ impl Sysroot { pub fn is_rust_lib_src_empty(&self) -> bool { match &self.workspace { - RustLibSrcWorkspace::Workspace(ws) => ws.packages().next().is_none(), + RustLibSrcWorkspace::Workspace { ws, .. } => ws.packages().next().is_none(), RustLibSrcWorkspace::Json(project_json) => project_json.n_crates() == 0, RustLibSrcWorkspace::Stitched(stitched) => stitched.crates.is_empty(), RustLibSrcWorkspace::Empty => true, @@ -85,9 +87,16 @@ impl Sysroot { self.error.as_deref() } + pub fn metadata_error(&self) -> Option<&str> { + match &self.workspace { + RustLibSrcWorkspace::Workspace { metadata_err, .. } => metadata_err.as_deref(), + _ => None, + } + } + pub fn num_packages(&self) -> usize { match &self.workspace { - RustLibSrcWorkspace::Workspace(ws) => ws.packages().count(), + RustLibSrcWorkspace::Workspace { ws, .. } => ws.packages().count(), RustLibSrcWorkspace::Json(project_json) => project_json.n_crates(), RustLibSrcWorkspace::Stitched(stitched) => stitched.crates.len(), RustLibSrcWorkspace::Empty => 0, @@ -293,7 +302,9 @@ impl Sysroot { && let Some(src_root) = &self.rust_lib_src_root { let has_core = match &self.workspace { - RustLibSrcWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"), + RustLibSrcWorkspace::Workspace { ws: workspace, .. } => { + workspace.packages().any(|p| workspace[p].name == "core") + } RustLibSrcWorkspace::Json(project_json) => project_json .crates() .filter_map(|(_, krate)| krate.display_name.clone()) @@ -332,7 +343,7 @@ impl Sysroot { // Make sure we never attempt to write to the sysroot let locked = true; - let (mut res, _) = + let (mut res, err) = FetchMetadata::new(library_manifest, current_dir, &cargo_config, self, no_deps) .exec(target_dir, locked, progress)?; @@ -387,7 +398,10 @@ impl Sysroot { let cargo_workspace = CargoWorkspace::new(res, library_manifest.clone(), Default::default(), true); - Ok(RustLibSrcWorkspace::Workspace(cargo_workspace)) + Ok(RustLibSrcWorkspace::Workspace { + ws: cargo_workspace, + metadata_err: err.map(|e| format!("{e:#}")), + }) } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index a79c8640fa6b8..711cdd11b9a89 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -248,7 +248,7 @@ fn smoke_test_real_sysroot_cargo() { sysroot.set_workspace(loaded_sysroot); } assert!( - matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_)), + matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace { .. }), "got {}", sysroot.workspace() ); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 957f336ee419c..22b84791aee9b 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -743,7 +743,7 @@ impl ProjectWorkspace { pub fn to_roots(&self) -> Vec { let mk_sysroot = || { let mut r = match self.sysroot.workspace() { - RustLibSrcWorkspace::Workspace(ws) => ws + RustLibSrcWorkspace::Workspace { ws, .. } => ws .packages() .filter_map(|pkg| { if ws[pkg].is_local { @@ -1731,7 +1731,7 @@ fn sysroot_to_crate_graph( ) -> (SysrootPublicDeps, Option) { let _p = tracing::info_span!("sysroot_to_crate_graph").entered(); match sysroot.workspace() { - RustLibSrcWorkspace::Workspace(cargo) => { + RustLibSrcWorkspace::Workspace { ws: cargo, .. } => { let (sysroot_cg, sysroot_pm) = cargo_to_crate_graph( load, None, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index ca15e6a98e035..1475f02447d23 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -222,6 +222,16 @@ impl GlobalState { message.push_str(err); message.push_str("\n\n"); } + if let Some(err) = ws.sysroot.metadata_error() { + status.health |= lsp_ext::Health::Warning; + format_to!( + message, + "Failed to read Cargo metadata with dependencies for sysroot of `{}`: ", + ws.manifest_or_root() + ); + message.push_str(err); + message.push_str("\n\n"); + } if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(err)), .. } = &ws.kind { status.health |= lsp_ext::Health::Warning; format_to!( From 6232ba8d08d200fd9fc862076d4991a599fdc0da Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 17 Oct 2025 05:14:02 +0300 Subject: [PATCH 35/76] Migrate variance to the new solver --- .../crates/hir-ty/src/chalk_db.rs | 48 +- .../crates/hir-ty/src/chalk_ext.rs | 21 +- .../rust-analyzer/crates/hir-ty/src/db.rs | 9 +- .../rust-analyzer/crates/hir-ty/src/lib.rs | 1 - .../crates/hir-ty/src/next_solver/def_id.rs | 4 +- .../crates/hir-ty/src/next_solver/interner.rs | 52 +- .../crates/hir-ty/src/next_solver/mapping.rs | 36 +- .../crates/hir-ty/src/variance.rs | 444 +++++++----------- src/tools/rust-analyzer/crates/hir/src/lib.rs | 37 +- 9 files changed, 228 insertions(+), 424 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 3d06b52106709..a6b859b37210f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -1,55 +1,9 @@ //! The implementation of `RustIrDatabase` for Chalk, which provides information //! about the code that Chalk needs. -use hir_def::{CallableDefId, GenericDefId}; -use crate::{Interner, db::HirDatabase, mapping::from_chalk}; +use crate::Interner; pub(crate) type AssocTypeId = chalk_ir::AssocTypeId; pub(crate) type TraitId = chalk_ir::TraitId; pub(crate) type AdtId = chalk_ir::AdtId; pub(crate) type ImplId = chalk_ir::ImplId; -pub(crate) type Variances = chalk_ir::Variances; - -impl chalk_ir::UnificationDatabase for &dyn HirDatabase { - fn fn_def_variance( - &self, - fn_def_id: chalk_ir::FnDefId, - ) -> chalk_ir::Variances { - HirDatabase::fn_def_variance(*self, from_chalk(*self, fn_def_id)) - } - - fn adt_variance(&self, adt_id: chalk_ir::AdtId) -> chalk_ir::Variances { - HirDatabase::adt_variance(*self, adt_id.0) - } -} - -pub(crate) fn fn_def_variance_query( - db: &dyn HirDatabase, - callable_def: CallableDefId, -) -> Variances { - Variances::from_iter( - Interner, - db.variances_of(GenericDefId::from_callable(db, callable_def)) - .as_deref() - .unwrap_or_default() - .iter() - .map(|v| match v { - crate::variance::Variance::Covariant => chalk_ir::Variance::Covariant, - crate::variance::Variance::Invariant => chalk_ir::Variance::Invariant, - crate::variance::Variance::Contravariant => chalk_ir::Variance::Contravariant, - crate::variance::Variance::Bivariant => chalk_ir::Variance::Invariant, - }), - ) -} - -pub(crate) fn adt_variance_query(db: &dyn HirDatabase, adt_id: hir_def::AdtId) -> Variances { - Variances::from_iter( - Interner, - db.variances_of(adt_id.into()).as_deref().unwrap_or_default().iter().map(|v| match v { - crate::variance::Variance::Covariant => chalk_ir::Variance::Covariant, - crate::variance::Variance::Invariant => chalk_ir::Variance::Invariant, - crate::variance::Variance::Contravariant => chalk_ir::Variance::Contravariant, - crate::variance::Variance::Bivariant => chalk_ir::Variance::Invariant, - }), - ) -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index a315f699ddaae..4ea563d46e6e7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -3,8 +3,8 @@ use hir_def::{ItemContainerId, Lookup, TraitId}; use crate::{ - Binders, DynTy, Interner, ProjectionTy, Substitution, TraitRef, Ty, db::HirDatabase, - from_assoc_type_id, from_chalk_trait_id, generics::generics, to_chalk_trait_id, + Interner, ProjectionTy, Substitution, TraitRef, Ty, db::HirDatabase, from_assoc_type_id, + from_chalk_trait_id, generics::generics, to_chalk_trait_id, }; pub(crate) trait ProjectionTyExt { @@ -35,23 +35,6 @@ impl ProjectionTyExt for ProjectionTy { } } -pub(crate) trait DynTyExt { - fn principal(&self) -> Option>>; -} - -impl DynTyExt for DynTy { - fn principal(&self) -> Option>> { - self.bounds.as_ref().filter_map(|bounds| { - bounds.interned().first().and_then(|b| { - b.as_ref().filter_map(|b| match b { - crate::WhereClause::Implemented(trait_ref) => Some(trait_ref), - _ => None, - }) - }) - }) - } -} - pub(crate) trait TraitRefExt { fn hir_trait_id(&self) -> TraitId; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 7ad76f35b1f2f..a4c19eea162e2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -17,7 +17,6 @@ use triomphe::Arc; use crate::{ Binders, ImplTraitId, ImplTraits, InferenceResult, TraitEnvironment, Ty, TyDefId, ValueTyDefId, - chalk_db, consteval::ConstEvalError, dyn_compatibility::DynCompatibilityViolation, layout::{Layout, LayoutError}, @@ -308,19 +307,13 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::interned] fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId; - #[salsa::invoke(chalk_db::fn_def_variance_query)] - fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances; - - #[salsa::invoke(chalk_db::adt_variance_query)] - fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances; - #[salsa::invoke(crate::variance::variances_of)] #[salsa::cycle( // cycle_fn = crate::variance::variances_of_cycle_fn, // cycle_initial = crate::variance::variances_of_cycle_initial, cycle_result = crate::variance::variances_of_cycle_initial, )] - fn variances_of(&self, def: GenericDefId) -> Option>; + fn variances_of(&self, def: GenericDefId) -> crate::next_solver::VariancesOf<'_>; // next trait solver diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 7277617bce8a1..77585177c1b5c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -117,7 +117,6 @@ pub use utils::{ TargetFeatureIsSafeInTarget, Unsafety, all_super_traits, direct_super_traits, is_fn_unsafe_to_call, target_feature_is_safe_in_target, }; -pub use variance::Variance; use chalk_ir::{BoundVar, DebruijnIndex, Safety, Scalar}; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs index 8525d4bc96e6d..928e1321e7388 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs @@ -155,7 +155,7 @@ impl From for SolverDefId { } impl TryFrom for GenericDefId { - type Error = SolverDefId; + type Error = (); fn try_from(value: SolverDefId) -> Result { Ok(match value { @@ -170,7 +170,7 @@ impl TryFrom for GenericDefId { | SolverDefId::InternedCoroutineId(_) | SolverDefId::InternedOpaqueTyId(_) | SolverDefId::EnumVariantId(_) - | SolverDefId::Ctor(_) => return Err(value), + | SolverDefId::Ctor(_) => return Err(()), }) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index cfa8b5b8a7f7d..7be891106df33 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -83,7 +83,7 @@ macro_rules! _interned_vec_nolifetime_salsa { ($name:ident, $ty:ty) => { interned_vec_nolifetime_salsa!($name, $ty, nofold); - impl<'db> rustc_type_ir::TypeFoldable> for $name { + impl<'db> rustc_type_ir::TypeFoldable> for $name<'db> { fn try_fold_with>>( self, folder: &mut F, @@ -104,7 +104,7 @@ macro_rules! _interned_vec_nolifetime_salsa { } } - impl<'db> rustc_type_ir::TypeVisitable> for $name { + impl<'db> rustc_type_ir::TypeVisitable> for $name<'db> { fn visit_with>>( &self, visitor: &mut V, @@ -117,14 +117,14 @@ macro_rules! _interned_vec_nolifetime_salsa { } }; ($name:ident, $ty:ty, nofold) => { - #[salsa::interned(no_lifetime, constructor = new_, debug)] + #[salsa::interned(constructor = new_, debug)] pub struct $name { #[returns(ref)] inner_: smallvec::SmallVec<[$ty; 2]>, } - impl $name { - pub fn new_from_iter<'db>( + impl<'db> $name<'db> { + pub fn new_from_iter( interner: DbInterner<'db>, data: impl IntoIterator, ) -> Self { @@ -140,7 +140,7 @@ macro_rules! _interned_vec_nolifetime_salsa { } } - impl rustc_type_ir::inherent::SliceLike for $name { + impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> { type Item = $ty; type IntoIter = as IntoIterator>::IntoIter; @@ -154,7 +154,7 @@ macro_rules! _interned_vec_nolifetime_salsa { } } - impl IntoIterator for $name { + impl<'db> IntoIterator for $name<'db> { type Item = $ty; type IntoIter = ::IntoIter; @@ -163,7 +163,7 @@ macro_rules! _interned_vec_nolifetime_salsa { } } - impl Default for $name { + impl<'db> Default for $name<'db> { fn default() -> Self { $name::new_from_iter(DbInterner::conjure(), []) } @@ -887,7 +887,7 @@ macro_rules! as_lang_item { impl<'db> rustc_type_ir::Interner for DbInterner<'db> { type DefId = SolverDefId; type LocalDefId = SolverDefId; - type LocalDefIds = SolverDefIds; + type LocalDefIds = SolverDefIds<'db>; type TraitId = TraitIdWrapper; type ForeignId = TypeAliasIdWrapper; type FunctionId = CallableIdWrapper; @@ -904,7 +904,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { type Term = Term<'db>; - type BoundVarKinds = BoundVarKinds; + type BoundVarKinds = BoundVarKinds<'db>; type BoundVarKind = BoundVarKind; type PredefinedOpaques = PredefinedOpaques<'db>; @@ -977,7 +977,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { type GenericsOf = Generics; - type VariancesOf = VariancesOf; + type VariancesOf = VariancesOf<'db>; type AdtDef = AdtDef; @@ -1045,10 +1045,9 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { fn variances_of(self, def_id: Self::DefId) -> Self::VariancesOf { let generic_def = match def_id { - SolverDefId::FunctionId(def_id) => def_id.into(), - SolverDefId::AdtId(def_id) => def_id.into(), - SolverDefId::Ctor(Ctor::Struct(def_id)) => def_id.into(), - SolverDefId::Ctor(Ctor::Enum(def_id)) => def_id.loc(self.db).parent.into(), + SolverDefId::Ctor(Ctor::Enum(def_id)) | SolverDefId::EnumVariantId(def_id) => { + def_id.loc(self.db).parent.into() + } SolverDefId::InternedOpaqueTyId(_def_id) => { // FIXME(next-solver): track variances // @@ -1059,17 +1058,20 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { (0..self.generics_of(def_id).count()).map(|_| Variance::Invariant), ); } - _ => return VariancesOf::new_from_iter(self, []), + SolverDefId::Ctor(Ctor::Struct(def_id)) => def_id.into(), + SolverDefId::AdtId(def_id) => def_id.into(), + SolverDefId::FunctionId(def_id) => def_id.into(), + SolverDefId::ConstId(_) + | SolverDefId::StaticId(_) + | SolverDefId::TraitId(_) + | SolverDefId::TypeAliasId(_) + | SolverDefId::ImplId(_) + | SolverDefId::InternedClosureId(_) + | SolverDefId::InternedCoroutineId(_) => { + return VariancesOf::new_from_iter(self, []); + } }; - VariancesOf::new_from_iter( - self, - self.db() - .variances_of(generic_def) - .as_deref() - .unwrap_or_default() - .iter() - .map(|v| v.to_nextsolver(self)), - ) + self.db.variances_of(generic_def) } fn type_of(self, def_id: Self::DefId) -> EarlyBinder { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs index 1a5982cc00d36..adbc6094a221c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs @@ -605,8 +605,8 @@ impl<'db, T: NextSolverToChalk<'db, U>, U: HasInterner> } } -impl<'db> ChalkToNextSolver<'db, BoundVarKinds> for chalk_ir::VariableKinds { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKinds { +impl<'db> ChalkToNextSolver<'db, BoundVarKinds<'db>> for chalk_ir::VariableKinds { + fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKinds<'db> { BoundVarKinds::new_from_iter( interner, self.iter(Interner).map(|v| v.to_nextsolver(interner)), @@ -614,7 +614,7 @@ impl<'db> ChalkToNextSolver<'db, BoundVarKinds> for chalk_ir::VariableKinds NextSolverToChalk<'db, chalk_ir::VariableKinds> for BoundVarKinds { +impl<'db> NextSolverToChalk<'db, chalk_ir::VariableKinds> for BoundVarKinds<'db> { fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::VariableKinds { chalk_ir::VariableKinds::from_iter(Interner, self.iter().map(|v| v.to_chalk(interner))) } @@ -763,36 +763,6 @@ impl<'db> ChalkToNextSolver<'db, rustc_ast_ir::Mutability> for chalk_ir::Mutabil } } -impl<'db> ChalkToNextSolver<'db, rustc_type_ir::Variance> for crate::Variance { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::Variance { - match self { - crate::Variance::Covariant => rustc_type_ir::Variance::Covariant, - crate::Variance::Invariant => rustc_type_ir::Variance::Invariant, - crate::Variance::Contravariant => rustc_type_ir::Variance::Contravariant, - crate::Variance::Bivariant => rustc_type_ir::Variance::Bivariant, - } - } -} - -impl<'db> ChalkToNextSolver<'db, rustc_type_ir::Variance> for chalk_ir::Variance { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::Variance { - match self { - chalk_ir::Variance::Covariant => rustc_type_ir::Variance::Covariant, - chalk_ir::Variance::Invariant => rustc_type_ir::Variance::Invariant, - chalk_ir::Variance::Contravariant => rustc_type_ir::Variance::Contravariant, - } - } -} - -impl<'db> ChalkToNextSolver<'db, VariancesOf> for chalk_ir::Variances { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> VariancesOf { - VariancesOf::new_from_iter( - interner, - self.as_slice(Interner).iter().map(|v| v.to_nextsolver(interner)), - ) - } -} - impl<'db> ChalkToNextSolver<'db, Goal, Predicate<'db>>> for chalk_ir::InEnvironment> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs index 0ff110106ebe5..46898ddeec126 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs @@ -13,43 +13,45 @@ //! by the next salsa version. If not, we will likely have to adapt and go with the rustc approach //! while installing firewall per item queries to prevent invalidation issues. -use crate::db::HirDatabase; -use crate::generics::{Generics, generics}; -use crate::next_solver::DbInterner; -use crate::next_solver::mapping::{ChalkToNextSolver, NextSolverToChalk}; -use crate::{ - AliasTy, Const, ConstScalar, DynTyExt, GenericArg, GenericArgData, Interner, Lifetime, - LifetimeData, Ty, TyKind, +use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId, signatures::StructFlags}; +use rustc_ast_ir::Mutability; +use rustc_type_ir::{ + Variance, + inherent::{AdtDef, IntoKind, SliceLike}, }; -use chalk_ir::Mutability; -use hir_def::signatures::StructFlags; -use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId}; -use std::fmt; -use std::ops::Not; use stdx::never; -use triomphe::Arc; -pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option> { +use crate::{ + db::HirDatabase, + generics::{Generics, generics}, + next_solver::{ + Const, ConstKind, DbInterner, ExistentialPredicate, GenericArg, GenericArgs, Region, + RegionKind, Term, Ty, TyKind, VariancesOf, + }, +}; + +pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> VariancesOf<'_> { tracing::debug!("variances_of(def={:?})", def); + let interner = DbInterner::new_with(db, None, None); match def { GenericDefId::FunctionId(_) => (), GenericDefId::AdtId(adt) => { if let AdtId::StructId(id) = adt { let flags = &db.struct_signature(id).flags; if flags.contains(StructFlags::IS_UNSAFE_CELL) { - return Some(Arc::from_iter(vec![Variance::Invariant; 1])); + return VariancesOf::new_from_iter(interner, [Variance::Invariant]); } else if flags.contains(StructFlags::IS_PHANTOM_DATA) { - return Some(Arc::from_iter(vec![Variance::Covariant; 1])); + return VariancesOf::new_from_iter(interner, [Variance::Covariant]); } } } - _ => return None, + _ => return VariancesOf::new_from_iter(interner, []), } let generics = generics(db, def); let count = generics.len(); if count == 0 { - return None; + return VariancesOf::new_from_iter(interner, []); } let mut variances = Context { generics, variances: vec![Variance::Bivariant; count], db }.solve(); @@ -69,7 +71,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option Option Option> { - let generics = generics(db, def); - let count = generics.len(); - - if count == 0 { - return None; - } - // FIXME(next-solver): Returns `Invariance` and not `Bivariance` here, see the comment in the main query. - Some(Arc::from(vec![Variance::Invariant; count])) -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub enum Variance { - Covariant, // T <: T iff A <: B -- e.g., function return type - Invariant, // T <: T iff B == A -- e.g., type of mutable cell - Contravariant, // T <: T iff B <: A -- e.g., function param type - Bivariant, // T <: T -- e.g., unused type parameter -} - -impl fmt::Display for Variance { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Variance::Covariant => write!(f, "covariant"), - Variance::Invariant => write!(f, "invariant"), - Variance::Contravariant => write!(f, "contravariant"), - Variance::Bivariant => write!(f, "bivariant"), - } - } -} - -impl Variance { - /// `a.xform(b)` combines the variance of a context with the - /// variance of a type with the following meaning. If we are in a - /// context with variance `a`, and we encounter a type argument in - /// a position with variance `b`, then `a.xform(b)` is the new - /// variance with which the argument appears. - /// - /// Example 1: - /// ```ignore (illustrative) - /// *mut Vec - /// ``` - /// Here, the "ambient" variance starts as covariant. `*mut T` is - /// invariant with respect to `T`, so the variance in which the - /// `Vec` appears is `Covariant.xform(Invariant)`, which - /// yields `Invariant`. Now, the type `Vec` is covariant with - /// respect to its type argument `T`, and hence the variance of - /// the `i32` here is `Invariant.xform(Covariant)`, which results - /// (again) in `Invariant`. - /// - /// Example 2: - /// ```ignore (illustrative) - /// fn(*const Vec, *mut Vec` appears is - /// `Contravariant.xform(Covariant)` or `Contravariant`. The same - /// is true for its `i32` argument. In the `*mut T` case, the - /// variance of `Vec` is `Contravariant.xform(Invariant)`, - /// and hence the outermost type is `Invariant` with respect to - /// `Vec` (and its `i32` argument). - /// - /// Source: Figure 1 of "Taming the Wildcards: - /// Combining Definition- and Use-Site Variance" published in PLDI'11. - fn xform(self, v: Variance) -> Variance { - match (self, v) { - // Figure 1, column 1. - (Variance::Covariant, Variance::Covariant) => Variance::Covariant, - (Variance::Covariant, Variance::Contravariant) => Variance::Contravariant, - (Variance::Covariant, Variance::Invariant) => Variance::Invariant, - (Variance::Covariant, Variance::Bivariant) => Variance::Bivariant, - - // Figure 1, column 2. - (Variance::Contravariant, Variance::Covariant) => Variance::Contravariant, - (Variance::Contravariant, Variance::Contravariant) => Variance::Covariant, - (Variance::Contravariant, Variance::Invariant) => Variance::Invariant, - (Variance::Contravariant, Variance::Bivariant) => Variance::Bivariant, - - // Figure 1, column 3. - (Variance::Invariant, _) => Variance::Invariant, - - // Figure 1, column 4. - (Variance::Bivariant, _) => Variance::Bivariant, - } - } - - fn glb(self, v: Variance) -> Variance { - // Greatest lower bound of the variance lattice as - // defined in The Paper: - // - // * - // - + - // o - match (self, v) { - (Variance::Invariant, _) | (_, Variance::Invariant) => Variance::Invariant, +fn glb(v1: Variance, v2: Variance) -> Variance { + // Greatest lower bound of the variance lattice as defined in The Paper: + // + // * + // - + + // o + match (v1, v2) { + (Variance::Invariant, _) | (_, Variance::Invariant) => Variance::Invariant, - (Variance::Covariant, Variance::Contravariant) => Variance::Invariant, - (Variance::Contravariant, Variance::Covariant) => Variance::Invariant, + (Variance::Covariant, Variance::Contravariant) => Variance::Invariant, + (Variance::Contravariant, Variance::Covariant) => Variance::Invariant, - (Variance::Covariant, Variance::Covariant) => Variance::Covariant, + (Variance::Covariant, Variance::Covariant) => Variance::Covariant, - (Variance::Contravariant, Variance::Contravariant) => Variance::Contravariant, + (Variance::Contravariant, Variance::Contravariant) => Variance::Contravariant, - (x, Variance::Bivariant) | (Variance::Bivariant, x) => x, - } - } - - pub fn invariant(self) -> Self { - self.xform(Variance::Invariant) + (x, Variance::Bivariant) | (Variance::Bivariant, x) => x, } +} - pub fn covariant(self) -> Self { - self.xform(Variance::Covariant) - } +pub(crate) fn variances_of_cycle_initial( + db: &dyn HirDatabase, + def: GenericDefId, +) -> VariancesOf<'_> { + let interner = DbInterner::new_with(db, None, None); + let generics = generics(db, def); + let count = generics.len(); - pub fn contravariant(self) -> Self { - self.xform(Variance::Contravariant) - } + // FIXME(next-solver): Returns `Invariance` and not `Bivariance` here, see the comment in the main query. + VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count)) } struct Context<'db> { @@ -213,17 +121,16 @@ struct Context<'db> { variances: Vec, } -impl Context<'_> { +impl<'db> Context<'db> { fn solve(mut self) -> Vec { tracing::debug!("solve(generics={:?})", self.generics); match self.generics.def() { GenericDefId::AdtId(adt) => { let db = self.db; let mut add_constraints_from_variant = |variant| { - let subst = self.generics.placeholder_subst(db); - for (_, field) in db.field_types(variant).iter() { + for (_, field) in db.field_types_ns(variant).iter() { self.add_constraints_from_ty( - &field.clone().substitute(Interner, &subst), + field.instantiate_identity(), Variance::Covariant, ); } @@ -239,16 +146,9 @@ impl Context<'_> { } } GenericDefId::FunctionId(f) => { - let subst = self.generics.placeholder_subst(self.db); - let interner = DbInterner::new_with(self.db, None, None); - let args: crate::next_solver::GenericArgs<'_> = subst.to_nextsolver(interner); - let sig = self - .db - .callable_item_signature(f.into()) - .instantiate(interner, args) - .skip_binder() - .to_chalk(interner); - self.add_constraints_from_sig(sig.params_and_return.iter(), Variance::Covariant); + let sig = + self.db.callable_item_signature(f.into()).instantiate_identity().skip_binder(); + self.add_constraints_from_sig(sig.inputs_and_output.iter(), Variance::Covariant); } _ => {} } @@ -276,122 +176,102 @@ impl Context<'_> { /// Adds constraints appropriate for an instance of `ty` appearing /// in a context with the generics defined in `generics` and /// ambient variance `variance` - fn add_constraints_from_ty(&mut self, ty: &Ty, variance: Variance) { + fn add_constraints_from_ty(&mut self, ty: Ty<'db>, variance: Variance) { tracing::debug!("add_constraints_from_ty(ty={:?}, variance={:?})", ty, variance); - match ty.kind(Interner) { - TyKind::Scalar(_) | TyKind::Never | TyKind::Str | TyKind::Foreign(..) => { + match ty.kind() { + TyKind::Int(_) + | TyKind::Uint(_) + | TyKind::Float(_) + | TyKind::Char + | TyKind::Bool + | TyKind::Never + | TyKind::Str + | TyKind::Foreign(..) => { // leaf type -- noop } - TyKind::FnDef(..) | TyKind::Coroutine(..) | TyKind::Closure(..) => { + TyKind::FnDef(..) + | TyKind::Coroutine(..) + | TyKind::CoroutineClosure(..) + | TyKind::Closure(..) => { never!("Unexpected unnameable type in variance computation: {:?}", ty); } - TyKind::Ref(mutbl, lifetime, ty) => { + TyKind::Ref(lifetime, ty, mutbl) => { self.add_constraints_from_region(lifetime, variance); - self.add_constraints_from_mt(ty, *mutbl, variance); + self.add_constraints_from_mt(ty, mutbl, variance); } TyKind::Array(typ, len) => { - self.add_constraints_from_const(len, variance); + self.add_constraints_from_const(len); self.add_constraints_from_ty(typ, variance); } TyKind::Slice(typ) => { self.add_constraints_from_ty(typ, variance); } - TyKind::Raw(mutbl, ty) => { - self.add_constraints_from_mt(ty, *mutbl, variance); + TyKind::RawPtr(ty, mutbl) => { + self.add_constraints_from_mt(ty, mutbl, variance); } - TyKind::Tuple(_, subtys) => { - for subty in subtys.type_parameters(Interner) { - self.add_constraints_from_ty(&subty, variance); + TyKind::Tuple(subtys) => { + for subty in subtys { + self.add_constraints_from_ty(subty, variance); } } TyKind::Adt(def, args) => { - self.add_constraints_from_args(def.0.into(), args.as_slice(Interner), variance); - } - TyKind::Alias(AliasTy::Opaque(opaque)) => { - self.add_constraints_from_invariant_args( - opaque.substitution.as_slice(Interner), - variance, - ); - } - TyKind::Alias(AliasTy::Projection(proj)) => { - self.add_constraints_from_invariant_args( - proj.substitution.as_slice(Interner), - variance, - ); + self.add_constraints_from_args(def.def_id().0.into(), args, variance); } - // FIXME: check this - TyKind::AssociatedType(_, subst) => { - self.add_constraints_from_invariant_args(subst.as_slice(Interner), variance); + TyKind::Alias(_, alias) => { + // FIXME: Probably not correct wrt. opaques. + self.add_constraints_from_invariant_args(alias.args); } - // FIXME: check this - TyKind::OpaqueType(_, subst) => { - self.add_constraints_from_invariant_args(subst.as_slice(Interner), variance); - } - TyKind::Dyn(it) => { + TyKind::Dynamic(bounds, region) => { // The type `dyn Trait +'a` is covariant w/r/t `'a`: - self.add_constraints_from_region(&it.lifetime, variance); - - if let Some(trait_ref) = it.principal() { - // Trait are always invariant so we can take advantage of that. - self.add_constraints_from_invariant_args( - trait_ref - .map(|it| it.map(|it| it.substitution.clone())) - .substitute( - Interner, - &[GenericArg::new( - Interner, - chalk_ir::GenericArgData::Ty(TyKind::Error.intern(Interner)), - )], - ) - .skip_binders() - .as_slice(Interner), - variance, - ); + self.add_constraints_from_region(region, variance); + + for bound in bounds { + match bound.skip_binder() { + ExistentialPredicate::Trait(trait_ref) => { + self.add_constraints_from_invariant_args(trait_ref.args) + } + ExistentialPredicate::Projection(projection) => { + self.add_constraints_from_invariant_args(projection.args); + match projection.term { + Term::Ty(ty) => { + self.add_constraints_from_ty(ty, Variance::Invariant) + } + Term::Const(konst) => self.add_constraints_from_const(konst), + } + } + ExistentialPredicate::AutoTrait(_) => {} + } } - - // FIXME - // for projection in data.projection_bounds() { - // match projection.skip_binder().term.unpack() { - // TyKind::TermKind::Ty(ty) => { - // self.add_constraints_from_ty( ty, self.invariant); - // } - // TyKind::TermKind::Const(c) => { - // self.add_constraints_from_const( c, self.invariant) - // } - // } - // } } // Chalk has no params, so use placeholders for now? - TyKind::Placeholder(index) => { - let idx = crate::from_placeholder_idx(self.db, *index).0; - let index = self.generics.type_or_const_param_idx(idx).unwrap(); - self.constrain(index, variance); + TyKind::Param(param) => self.constrain(param.index as usize, variance), + TyKind::FnPtr(sig, _) => { + self.add_constraints_from_sig(sig.skip_binder().inputs_and_output.iter(), variance); } - TyKind::Function(f) => { - self.add_constraints_from_sig( - f.substitution.0.iter(Interner).filter_map(move |p| p.ty(Interner)), - variance, - ); - } - TyKind::Error => { + TyKind::Error(_) => { // we encounter this when walking the trait references for object // types, where we use Error as the Self type } - TyKind::CoroutineWitness(..) | TyKind::BoundVar(..) | TyKind::InferenceVar(..) => { + TyKind::Bound(..) => {} + TyKind::CoroutineWitness(..) + | TyKind::Placeholder(..) + | TyKind::Infer(..) + | TyKind::UnsafeBinder(..) + | TyKind::Pat(..) => { never!("unexpected type encountered in variance inference: {:?}", ty) } } } - fn add_constraints_from_invariant_args(&mut self, args: &[GenericArg], variance: Variance) { - let variance_i = variance.invariant(); - - for k in args { - match k.data(Interner) { - GenericArgData::Lifetime(lt) => self.add_constraints_from_region(lt, variance_i), - GenericArgData::Ty(ty) => self.add_constraints_from_ty(ty, variance_i), - GenericArgData::Const(val) => self.add_constraints_from_const(val, variance_i), + fn add_constraints_from_invariant_args(&mut self, args: GenericArgs<'db>) { + for k in args.iter() { + match k { + GenericArg::Lifetime(lt) => { + self.add_constraints_from_region(lt, Variance::Invariant) + } + GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, Variance::Invariant), + GenericArg::Const(val) => self.add_constraints_from_const(val), } } } @@ -401,51 +281,40 @@ impl Context<'_> { fn add_constraints_from_args( &mut self, def_id: GenericDefId, - args: &[GenericArg], + args: GenericArgs<'db>, variance: Variance, ) { - // We don't record `inferred_starts` entries for empty generics. if args.is_empty() { return; } - let Some(variances) = self.db.variances_of(def_id) else { - return; - }; + let variances = self.db.variances_of(def_id); - for (i, k) in args.iter().enumerate() { - match k.data(Interner) { - GenericArgData::Lifetime(lt) => { - self.add_constraints_from_region(lt, variance.xform(variances[i])) - } - GenericArgData::Ty(ty) => { - self.add_constraints_from_ty(ty, variance.xform(variances[i])) - } - GenericArgData::Const(val) => self.add_constraints_from_const(val, variance), + for (k, v) in args.iter().zip(variances) { + match k { + GenericArg::Lifetime(lt) => self.add_constraints_from_region(lt, variance.xform(v)), + GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, variance.xform(v)), + GenericArg::Const(val) => self.add_constraints_from_const(val), } } } /// Adds constraints appropriate for a const expression `val` /// in a context with ambient variance `variance` - fn add_constraints_from_const(&mut self, c: &Const, variance: Variance) { - match &c.data(Interner).value { - chalk_ir::ConstValue::Concrete(c) => { - if let ConstScalar::UnevaluatedConst(_, subst) = &c.interned { - self.add_constraints_from_invariant_args(subst.as_slice(Interner), variance); - } - } + fn add_constraints_from_const(&mut self, c: Const<'db>) { + match c.kind() { + ConstKind::Unevaluated(c) => self.add_constraints_from_invariant_args(c.args), _ => {} } } /// Adds constraints appropriate for a function with signature /// `sig` appearing in a context with ambient variance `variance` - fn add_constraints_from_sig<'a>( + fn add_constraints_from_sig( &mut self, - mut sig_tys: impl DoubleEndedIterator, + mut sig_tys: impl DoubleEndedIterator>, variance: Variance, ) { - let contra = variance.contravariant(); + let contra = variance.xform(Variance::Contravariant); let Some(output) = sig_tys.next_back() else { return never!("function signature has no return type"); }; @@ -457,27 +326,26 @@ impl Context<'_> { /// Adds constraints appropriate for a region appearing in a /// context with ambient variance `variance` - fn add_constraints_from_region(&mut self, region: &Lifetime, variance: Variance) { + fn add_constraints_from_region(&mut self, region: Region<'db>, variance: Variance) { tracing::debug!( "add_constraints_from_region(region={:?}, variance={:?})", region, variance ); - match region.data(Interner) { - LifetimeData::Placeholder(index) => { - let idx = crate::lt_from_placeholder_idx(self.db, *index).0; - let inferred = self.generics.lifetime_idx(idx).unwrap(); - self.constrain(inferred, variance); - } - LifetimeData::Static => {} - LifetimeData::BoundVar(..) => { + match region.kind() { + RegionKind::ReEarlyParam(param) => self.constrain(param.index as usize, variance), + RegionKind::ReStatic => {} + RegionKind::ReBound(..) => { // Either a higher-ranked region inside of a type or a // late-bound function parameter. // // We do not compute constraints for either of these. } - LifetimeData::Error => {} - LifetimeData::Phantom(..) | LifetimeData::InferenceVar(..) | LifetimeData::Erased => { + RegionKind::ReError(_) => {} + RegionKind::ReLateParam(..) + | RegionKind::RePlaceholder(..) + | RegionKind::ReVar(..) + | RegionKind::ReErased => { // We don't expect to see anything but 'static or bound // regions when visiting member types or method types. never!( @@ -491,11 +359,11 @@ impl Context<'_> { /// Adds constraints appropriate for a mutability-type pair /// appearing in a context with ambient variance `variance` - fn add_constraints_from_mt(&mut self, ty: &Ty, mt: Mutability, variance: Variance) { + fn add_constraints_from_mt(&mut self, ty: Ty<'db>, mt: Mutability, variance: Variance) { self.add_constraints_from_ty( ty, match mt { - Mutability::Mut => variance.invariant(), + Mutability::Mut => Variance::Invariant, Mutability::Not => variance, }, ); @@ -508,7 +376,7 @@ impl Context<'_> { self.variances[index], variance ); - self.variances[index] = self.variances[index].glb(variance); + self.variances[index] = glb(self.variances[index], variance); } } @@ -519,6 +387,7 @@ mod tests { AdtId, GenericDefId, ModuleDefId, hir::generics::GenericParamDataRef, src::HasSource, }; use itertools::Itertools; + use rustc_type_ir::{Variance, inherent::SliceLike}; use stdx::format_to; use syntax::{AstNode, ast::HasName}; use test_fixture::WithFixture; @@ -1037,26 +906,21 @@ struct FixedPoint(&'static FixedPoint<(), T, U>, V); let loc = it.lookup(&db); loc.source(&db).value.name().unwrap() } - GenericDefId::TraitId(it) => { - let loc = it.lookup(&db); - loc.source(&db).value.name().unwrap() - } - GenericDefId::TypeAliasId(it) => { - let loc = it.lookup(&db); - loc.source(&db).value.name().unwrap() - } - GenericDefId::ImplId(_) => return None, - GenericDefId::ConstId(_) => return None, - GenericDefId::StaticId(_) => return None, + GenericDefId::TraitId(_) + | GenericDefId::TypeAliasId(_) + | GenericDefId::ImplId(_) + | GenericDefId::ConstId(_) + | GenericDefId::StaticId(_) => return None, }, )) }) .sorted_by_key(|(_, n)| n.syntax().text_range().start()); let mut res = String::new(); for (def, name) in defs { - let Some(variances) = db.variances_of(def) else { + let variances = db.variances_of(def); + if variances.is_empty() { continue; - }; + } format_to!( res, "{name}[{}]\n", @@ -1072,10 +936,16 @@ struct FixedPoint(&'static FixedPoint<(), T, U>, V); &lifetime_param_data.name } }) - .zip_eq(&*variances) + .zip_eq(variances) .format_with(", ", |(name, var), f| f(&format_args!( - "{}: {var}", - name.as_str() + "{}: {}", + name.as_str(), + match var { + Variance::Covariant => "covariant", + Variance::Invariant => "invariant", + Variance::Contravariant => "contravariant", + Variance::Bivariant => "bivariant", + }, ))) ); } diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index fc516a6764a5d..36c8c3051cf52 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -36,6 +36,7 @@ pub mod term_search; mod display; use std::{ + fmt, mem::discriminant, ops::{ControlFlow, Not}, }; @@ -160,7 +161,7 @@ pub use { // FIXME: Properly encapsulate mir hir_ty::mir, hir_ty::{ - CastError, FnAbi, PointerCast, Variance, attach_db, attach_db_allow_change, + CastError, FnAbi, PointerCast, attach_db, attach_db_allow_change, consteval::ConstEvalError, diagnostics::UnsafetyReason, display::{ClosureStyle, DisplayTarget, HirDisplay, HirDisplayError, HirWrite}, @@ -4110,7 +4111,39 @@ impl GenericParam { GenericParam::ConstParam(_) => return None, GenericParam::LifetimeParam(it) => generics.lifetime_idx(it.id)?, }; - db.variances_of(parent)?.get(index).copied() + db.variances_of(parent).get(index).map(Into::into) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Variance { + Bivariant, + Covariant, + Contravariant, + Invariant, +} + +impl From for Variance { + #[inline] + fn from(value: rustc_type_ir::Variance) -> Self { + match value { + rustc_type_ir::Variance::Covariant => Variance::Covariant, + rustc_type_ir::Variance::Invariant => Variance::Invariant, + rustc_type_ir::Variance::Contravariant => Variance::Contravariant, + rustc_type_ir::Variance::Bivariant => Variance::Bivariant, + } + } +} + +impl fmt::Display for Variance { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let description = match self { + Variance::Bivariant => "bivariant", + Variance::Covariant => "covariant", + Variance::Contravariant => "contravariant", + Variance::Invariant => "invariant", + }; + f.pad(description) } } From 369715b77c2b663184535e4beee710784f0ead6f Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 17 Oct 2025 06:46:34 +0300 Subject: [PATCH 36/76] Remove lint allows from new solver stuff --- .../crates/hir-ty/src/builder.rs | 23 +- .../crates/hir-ty/src/consteval.rs | 64 +---- .../crates/hir-ty/src/display.rs | 6 +- .../crates/hir-ty/src/dyn_compatibility.rs | 12 +- .../crates/hir-ty/src/generics.rs | 2 +- .../rust-analyzer/crates/hir-ty/src/infer.rs | 18 +- .../crates/hir-ty/src/infer/closure.rs | 16 +- .../crates/hir-ty/src/infer/coerce.rs | 19 +- .../crates/hir-ty/src/infer/expr.rs | 6 +- .../crates/hir-ty/src/infer/pat.rs | 2 +- .../crates/hir-ty/src/infer/path.rs | 4 +- .../crates/hir-ty/src/infer/unify.rs | 10 +- .../rust-analyzer/crates/hir-ty/src/lib.rs | 2 +- .../crates/hir-ty/src/lower/path.rs | 7 +- .../crates/hir-ty/src/lower_nextsolver.rs | 224 ++++++------------ .../hir-ty/src/lower_nextsolver/path.rs | 56 +---- .../crates/hir-ty/src/method_resolution.rs | 4 +- .../crates/hir-ty/src/next_solver.rs | 1 - .../crates/hir-ty/src/next_solver/consts.rs | 9 +- .../crates/hir-ty/src/next_solver/fold.rs | 7 +- .../crates/hir-ty/src/next_solver/fulfill.rs | 19 +- .../hir-ty/src/next_solver/fulfill/errors.rs | 209 ++-------------- .../hir-ty/src/next_solver/generic_arg.rs | 80 ++----- .../crates/hir-ty/src/next_solver/generics.rs | 37 +-- .../crates/hir-ty/src/next_solver/infer/at.rs | 66 +----- .../infer/canonical/canonicalizer.rs | 2 +- .../infer/canonical/instantiate.rs | 15 +- .../src/next_solver/infer/canonical/mod.rs | 19 +- .../hir-ty/src/next_solver/infer/context.rs | 20 +- .../hir-ty/src/next_solver/infer/mod.rs | 71 +++--- .../src/next_solver/infer/opaque_types/mod.rs | 32 +-- .../next_solver/infer/opaque_types/table.rs | 20 +- .../infer/region_constraints/mod.rs | 6 +- .../next_solver/infer/relate/generalize.rs | 9 +- .../next_solver/infer/relate/higher_ranked.rs | 3 - .../src/next_solver/infer/relate/lattice.rs | 15 +- .../hir-ty/src/next_solver/infer/resolve.rs | 5 +- .../hir-ty/src/next_solver/infer/select.rs | 40 ++-- .../src/next_solver/infer/snapshot/fudge.rs | 5 +- .../next_solver/infer/snapshot/undo_log.rs | 4 +- .../hir-ty/src/next_solver/infer/traits.rs | 26 +- .../hir-ty/src/next_solver/infer/unify_key.rs | 29 +-- .../crates/hir-ty/src/next_solver/inspect.rs | 51 ++-- .../crates/hir-ty/src/next_solver/interner.rs | 141 ++++------- .../crates/hir-ty/src/next_solver/ir_print.rs | 2 - .../crates/hir-ty/src/next_solver/mapping.rs | 97 ++++---- .../hir-ty/src/next_solver/normalize.rs | 1 - .../hir-ty/src/next_solver/obligation_ctxt.rs | 37 +-- .../crates/hir-ty/src/next_solver/opaques.rs | 3 +- .../hir-ty/src/next_solver/predicate.rs | 13 +- .../crates/hir-ty/src/next_solver/region.rs | 3 +- .../crates/hir-ty/src/next_solver/solver.rs | 66 ++---- .../crates/hir-ty/src/next_solver/ty.rs | 26 +- .../crates/hir-ty/src/next_solver/util.rs | 46 +--- .../crates/hir-ty/src/primitive.rs | 67 +----- src/tools/rust-analyzer/crates/hir/src/lib.rs | 7 +- .../rust-analyzer/crates/hir/src/semantics.rs | 9 +- 57 files changed, 526 insertions(+), 1267 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs index 5c4eb8475bbc2..4cd0af28f33f8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs @@ -1,10 +1,10 @@ //! `TyBuilder`, a helper for building instances of `Ty` and related types. use chalk_ir::{ - DebruijnIndex, Scalar, + DebruijnIndex, cast::{Cast, Caster}, }; -use hir_def::{GenericDefId, GenericParamId, TraitId, builtin_type::BuiltinType}; +use hir_def::{GenericDefId, GenericParamId, TraitId}; use smallvec::SmallVec; use crate::{ @@ -18,7 +18,7 @@ use crate::{ DbInterner, EarlyBinder, mapping::{ChalkToNextSolver, NextSolverToChalk}, }, - primitive, to_chalk_trait_id, + to_chalk_trait_id, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -137,23 +137,6 @@ impl TyBuilder<()> { TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner) } - pub(crate) fn builtin(builtin: BuiltinType) -> Ty { - match builtin { - BuiltinType::Char => TyKind::Scalar(Scalar::Char).intern(Interner), - BuiltinType::Bool => TyKind::Scalar(Scalar::Bool).intern(Interner), - BuiltinType::Str => TyKind::Str.intern(Interner), - BuiltinType::Int(t) => { - TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(t))).intern(Interner) - } - BuiltinType::Uint(t) => { - TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(t))).intern(Interner) - } - BuiltinType::Float(t) => { - TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(t))).intern(Interner) - } - } - } - pub(crate) fn unknown_subst( db: &dyn HirDatabase, def: impl Into, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 761d72243e9f8..18ebe7d7a5395 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -6,74 +6,28 @@ mod tests; use base_db::Crate; use hir_def::{ EnumVariantId, GeneralConstId, HasModule, StaticId, - expr_store::{Body, HygieneId, path::Path}, + expr_store::Body, hir::{Expr, ExprId}, - resolver::{Resolver, ValueNs}, type_ref::LiteralConstRef, }; use hir_expand::Lookup; -use rustc_type_ir::{UnevaluatedConst, inherent::IntoKind}; -use stdx::never; +use rustc_type_ir::inherent::IntoKind; use triomphe::Arc; use crate::{ - MemoryMap, TraitEnvironment, + LifetimeElisionKind, MemoryMap, TraitEnvironment, TyLoweringContext, db::HirDatabase, display::DisplayTarget, - generics::Generics, infer::InferenceContext, mir::{MirEvalError, MirLowerError}, next_solver::{ Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, - ParamConst, SolverDefId, Ty, ValueConst, + SolverDefId, Ty, ValueConst, }, }; use super::mir::{interpret_mir, lower_to_mir, pad16}; -pub(crate) fn path_to_const<'a, 'g>( - db: &'a dyn HirDatabase, - resolver: &Resolver<'a>, - path: &Path, - args: impl FnOnce() -> &'g Generics, - _expected_ty: Ty<'a>, -) -> Option> { - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) { - Some(ValueNs::GenericParam(p)) => { - let args = args(); - match args - .type_or_const_param(p.into()) - .and_then(|(idx, p)| p.const_param().map(|p| (idx, p.clone()))) - { - Some((idx, _param)) => { - Some(Const::new_param(interner, ParamConst { index: idx as u32, id: p })) - } - None => { - never!( - "Generic list doesn't contain this param: {:?}, {:?}, {:?}", - args, - path, - p - ); - None - } - } - } - Some(ValueNs::ConstId(c)) => { - let args = GenericArgs::new_from_iter(interner, []); - Some(Const::new( - interner, - rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new( - SolverDefId::ConstId(c), - args, - )), - )) - } - _ => None, - } -} - pub fn unknown_const<'db>(_ty: Ty<'db>) -> Const<'db> { Const::new(DbInterner::conjure(), rustc_type_ir::ConstKind::Error(ErrorGuaranteed)) } @@ -279,8 +233,14 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd return unknown_const(infer[expr]); } if let Expr::Path(p) = &ctx.body[expr] { - let resolver = &ctx.resolver; - if let Some(c) = path_to_const(ctx.db, resolver, p, || ctx.generics(), infer[expr]) { + let mut ctx = TyLoweringContext::new( + ctx.db, + &ctx.resolver, + ctx.body, + ctx.generic_def, + LifetimeElisionKind::Infer, + ); + if let Some(c) = ctx.path_to_const(p) { return c; } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 210e1ac52e58a..2c6cbdd03f13c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -1062,9 +1062,9 @@ impl<'db> HirDisplay<'db> for Ty<'db> { TyKind::Str => write!(f, "str")?, TyKind::Bool => write!(f, "bool")?, TyKind::Char => write!(f, "char")?, - TyKind::Float(t) => write!(f, "{}", primitive::float_ty_to_string_ns(t))?, - TyKind::Int(t) => write!(f, "{}", primitive::int_ty_to_string_ns(t))?, - TyKind::Uint(t) => write!(f, "{}", primitive::uint_ty_to_string_ns(t))?, + TyKind::Float(t) => write!(f, "{}", primitive::float_ty_to_string(t))?, + TyKind::Int(t) => write!(f, "{}", primitive::int_ty_to_string(t))?, + TyKind::Uint(t) => write!(f, "{}", primitive::uint_ty_to_string(t))?, TyKind::Slice(t) => { write!(f, "[")?; t.hir_fmt(f)?; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index b2406a0889583..e35a798703294 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -7,7 +7,6 @@ use hir_def::{ TypeAliasId, TypeOrConstParamId, TypeParamId, hir::generics::LocalTypeOrConstParamId, lang_item::LangItem, signatures::TraitFlags, }; -use intern::Symbol; use rustc_hash::FxHashSet; use rustc_type_ir::{ AliasTyKind, ClauseKind, PredicatePolarity, TypeSuperVisitable as _, TypeVisitable as _, @@ -441,8 +440,7 @@ fn receiver_is_dispatchable<'db>( // Type `U` // FIXME: That seems problematic to fake a generic param like that? - let unsized_self_ty = - crate::next_solver::Ty::new_param(interner, self_param_id, u32::MAX, Symbol::empty()); + let unsized_self_ty = crate::next_solver::Ty::new_param(interner, self_param_id, u32::MAX); // `Receiver[Self => U]` let unsized_receiver_ty = receiver_for_self_ty(interner, func, receiver_ty, unsized_self_ty); @@ -454,8 +452,8 @@ fn receiver_is_dispatchable<'db>( TraitRef::new(interner, unsize_did.into(), [self_param_ty, unsized_self_ty]); // U: Trait - let args = GenericArgs::for_item(interner, trait_.into(), |name, index, kind, _| { - if index == 0 { unsized_self_ty.into() } else { mk_param(interner, index, name, kind) } + let args = GenericArgs::for_item(interner, trait_.into(), |index, kind, _| { + if index == 0 { unsized_self_ty.into() } else { mk_param(interner, index, kind) } }); let trait_predicate = TraitRef::new_from_args(interner, trait_.into(), args); @@ -494,8 +492,8 @@ fn receiver_for_self_ty<'db>( let args = crate::next_solver::GenericArgs::for_item( interner, SolverDefId::FunctionId(func), - |name, index, kind, _| { - if index == 0 { self_ty.into() } else { mk_param(interner, index, name, kind) } + |index, kind, _| { + if index == 0 { self_ty.into() } else { mk_param(interner, index, kind) } }, ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index 2053a099ed781..3ca5f0dcb2476 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -258,7 +258,7 @@ impl Generics { } /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`). - pub fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution { + pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution { Substitution::from_iter( Interner, self.iter_id().enumerate().map(|(index, id)| match id { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 041799be9602e..b2dd90a3d0df0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -57,7 +57,6 @@ use triomphe::Arc; use crate::{ ImplTraitId, IncorrectGenericsLenKind, PathLoweringDiagnostic, TargetFeatures, db::{HirDatabase, InternedClosureId, InternedOpaqueTyId}, - generics::Generics, infer::{ coerce::{CoerceMany, DynamicCoerceMany}, diagnostics::{Diagnostics, InferenceTyLoweringContext as TyLoweringContext}, @@ -72,10 +71,7 @@ use crate::{ Tys, abi::Safety, fold::fold_tys, - infer::{ - DefineOpaqueTypes, - traits::{Obligation, ObligationCause}, - }, + infer::traits::{Obligation, ObligationCause}, mapping::ChalkToNextSolver, }, traits::FnTrait, @@ -763,8 +759,7 @@ pub(crate) struct InferenceContext<'body, 'db> { /// and resolve the path via its methods. This will ensure proper error reporting. pub(crate) resolver: Resolver<'db>, target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, - generic_def: GenericDefId, - generics: OnceCell, + pub(crate) generic_def: GenericDefId, table: unify::InferenceTable<'db>, /// The traits in scope, disregarding block modules. This is used for caching purposes. traits_in_scope: FxHashSet, @@ -873,7 +868,6 @@ impl<'body, 'db> InferenceContext<'body, 'db> { return_ty: types.error, // set in collect_* calls types, target_features: OnceCell::new(), - generics: OnceCell::new(), table, tuple_field_accesses_rev: Default::default(), resume_yield_tys: None, @@ -902,10 +896,6 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } } - pub(crate) fn generics(&self) -> &Generics { - self.generics.get_or_init(|| crate::generics::generics(self.db, self.generic_def)) - } - #[inline] fn krate(&self) -> Crate { self.resolver.krate() @@ -1133,7 +1123,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { GenericArgs::for_item_with_defaults( self.interner(), va_list.into(), - |_, _, id, _| self.table.next_var_for_param(id), + |_, id, _| self.table.next_var_for_param(id), ), ), None => self.err_ty(), @@ -1676,7 +1666,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { .table .infer_ctxt .at(&ObligationCause::new(), self.table.trait_env.env) - .eq(DefineOpaqueTypes::Yes, expected, actual) + .eq(expected, actual) .map(|infer_ok| self.table.register_infer_ok(infer_ok)); if let Err(_err) = result { // FIXME: Emit diagnostic. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 2637ed6b3ec93..3dc277023a325 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -26,7 +26,7 @@ use crate::{ PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind, abi::Safety, infer::{ - BoundRegionConversionTime, DefineOpaqueTypes, InferOk, InferResult, + BoundRegionConversionTime, InferOk, InferResult, traits::{ObligationCause, PredicateObligations}, }, util::explicit_item_bounds, @@ -307,7 +307,7 @@ impl<'db> InferenceContext<'_, 'db> { .table .infer_ctxt .at(&ObligationCause::new(), self.table.trait_env.env) - .eq(DefineOpaqueTypes::Yes, inferred_fnptr_sig, generalized_fnptr_sig) + .eq(inferred_fnptr_sig, generalized_fnptr_sig) .map(|infer_ok| self.table.register_infer_ok(infer_ok)); let resolved_sig = @@ -692,18 +692,16 @@ impl<'db> InferenceContext<'_, 'db> { let InferOk { value: (), obligations } = table .infer_ctxt .at(&cause, table.trait_env.env) - .eq(DefineOpaqueTypes::Yes, expected_ty, supplied_ty)?; + .eq(expected_ty, supplied_ty)?; all_obligations.extend(obligations); } let supplied_output_ty = supplied_sig.output(); let cause = ObligationCause::new(); - let InferOk { value: (), obligations } = - table.infer_ctxt.at(&cause, table.trait_env.env).eq( - DefineOpaqueTypes::Yes, - expected_sigs.liberated_sig.output(), - supplied_output_ty, - )?; + let InferOk { value: (), obligations } = table + .infer_ctxt + .at(&cause, table.trait_env.env) + .eq(expected_sigs.liberated_sig.output(), supplied_output_ty)?; all_obligations.extend(obligations); let inputs = supplied_sig diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 4620da7147439..78889ccb89a28 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -63,7 +63,7 @@ use crate::{ GenericArgs, PolyFnSig, PredicateKind, Region, RegionKind, SolverDefId, TraitRef, Ty, TyKind, infer::{ - DefineOpaqueTypes, InferCtxt, InferOk, InferResult, + InferCtxt, InferOk, InferResult, relate::RelateResult, select::{ImplSource, SelectionError}, traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations}, @@ -149,7 +149,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> { let res = if this.use_lub { at.lub(b, a) } else { - at.sup(DefineOpaqueTypes::Yes, b, a) + at.sup(b, a) .map(|InferOk { value: (), obligations }| InferOk { value: b, obligations }) }; @@ -1460,19 +1460,12 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> { // // Another example is `break` with no argument expression. assert!(expression_ty.is_unit(), "if let hack without unit type"); - icx.table - .infer_ctxt - .at(cause, icx.table.trait_env.env) - .eq( - // needed for tests/ui/type-alias-impl-trait/issue-65679-inst-opaque-ty-from-val-twice.rs - DefineOpaqueTypes::Yes, - expected, - found, - ) - .map(|infer_ok| { + icx.table.infer_ctxt.at(cause, icx.table.trait_env.env).eq(expected, found).map( + |infer_ok| { icx.table.register_infer_ok(infer_ok); expression_ty - }) + }, + ) }; debug!(?result); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 179eaccc652b7..e1964608a3f06 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -46,7 +46,7 @@ use crate::{ AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, TraitRef, Ty, TyKind, TypeError, infer::{ - DefineOpaqueTypes, InferOk, + InferOk, traits::{Obligation, ObligationCause}, }, obligation_ctxt::ObligationCtxt, @@ -1333,7 +1333,7 @@ impl<'db> InferenceContext<'_, 'db> { self.interner(), box_id.into(), [inner_ty.into()], - |_, _, id, _| self.table.next_var_for_param(id), + |_, id, _| self.table.next_var_for_param(id), ), ) } else { @@ -2122,7 +2122,7 @@ impl<'db> InferenceContext<'_, 'db> { .table .infer_ctxt .at(&ObligationCause::new(), this.table.trait_env.env) - .eq(DefineOpaqueTypes::Yes, formal_input_ty, coerced_ty); + .eq(formal_input_ty, coerced_ty); // If neither check failed, the types are compatible match formal_ty_error { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 452ae316620f2..61255d31d2810 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -358,7 +358,7 @@ impl<'db> InferenceContext<'_, 'db> { self.interner(), box_adt.into(), std::iter::once(inner_ty.into()).chain(alloc_ty.map(Into::into)), - |_, _, id, _| self.table.next_var_for_param(id), + |_, id, _| self.table.next_var_for_param(id), ), ) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index f70ed90b953a0..84d17db6c663a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -118,7 +118,7 @@ impl<'db> InferenceContext<'_, 'db> { self.interner(), generic_def.into(), self_subst.iter().flat_map(|it| it.iter()).chain(substs.iter().skip(parent_substs_len)), - |_, _, id, _| GenericArg::error_from_id(self.interner(), id), + |_, id, _| GenericArg::error_from_id(self.interner(), id), ); Some(ValuePathResolution::GenericDef(value_def, generic_def, substs)) @@ -352,7 +352,7 @@ impl<'db> InferenceContext<'_, 'db> { self.interner(), trait_.into(), [ty.into()], - |_, _, id, _| self.table.next_var_for_param(id), + |_, id, _| self.table.next_var_for_param(id), ); let trait_ref = TraitRef::new(self.interner(), trait_.into(), args); self.table.register_predicate(Obligation::new( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index 8f754f0e1aaa4..beb26f7d68908 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -25,7 +25,7 @@ use crate::{ SolverDefId, SolverDefIds, TraitRef, Ty, TyKind, TypingMode, fulfill::{FulfillmentCtxt, NextSolverError}, infer::{ - DbInternerInferExt, DefineOpaqueTypes, InferCtxt, InferOk, InferResult, + DbInternerInferExt, InferCtxt, InferOk, InferResult, at::ToTrace, snapshot::CombinedSnapshot, traits::{Obligation, ObligationCause, PredicateObligation}, @@ -148,7 +148,7 @@ fn could_unify_impl<'db>( let ((ty1_with_vars, ty2_with_vars), _) = infcx.instantiate_canonical(tys); let mut ctxt = ObligationCtxt::new(&infcx); let can_unify = at - .eq(DefineOpaqueTypes::No, ty1_with_vars, ty2_with_vars) + .eq(ty1_with_vars, ty2_with_vars) .map(|infer_ok| ctxt.register_infer_ok_obligations(infer_ok)) .is_ok(); can_unify && select(&mut ctxt).is_empty() @@ -452,11 +452,7 @@ impl<'db> InferenceTable<'db> { /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the /// caller needs to deal with them. pub(crate) fn try_unify>(&mut self, t1: T, t2: T) -> InferResult<'db, ()> { - self.infer_ctxt.at(&ObligationCause::new(), self.trait_env.env).eq( - DefineOpaqueTypes::Yes, - t1, - t2, - ) + self.infer_ctxt.at(&ObligationCause::new(), self.trait_env.env).eq(t1, t2) } pub(crate) fn shallow_resolve(&self, ty: Ty<'db>) -> Ty<'db> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 77585177c1b5c..b698fd9a14541 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -82,7 +82,7 @@ use traits::FnTrait; use triomphe::Arc; use crate::{ - builder::{ParamKind, TyBuilder}, + builder::TyBuilder, chalk_ext::*, db::HirDatabase, display::{DisplayTarget, HirDisplay}, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index 09a256a86dccf..42723dc9e1dda 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -434,13 +434,16 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty { + let interner = DbInterner::conjure(); let generic_def = match typeable { - TyDefId::BuiltinType(builtin) => return TyBuilder::builtin(builtin), + TyDefId::BuiltinType(builtin) => { + return crate::next_solver::Ty::from_builtin_type(interner, builtin) + .to_chalk(interner); + } TyDefId::AdtId(it) => it.into(), TyDefId::TypeAliasId(it) => it.into(), }; let substs = self.substs_from_path_segment(generic_def, infer_args, None, false); - let interner = DbInterner::conjure(); let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner); self.ctx.db.ty(typeable).instantiate(interner, args).to_chalk(interner) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs index aced46bf806bb..76ee1a4f2d2b7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs @@ -5,8 +5,6 @@ //! - Building the type for an item: This happens through the `ty` query. //! //! This usually involves resolving names, collecting generic arguments etc. -#![allow(unused)] -// FIXME(next-solver): this should get removed as things get moved to rustc_type_ir from chalk_ir pub(crate) mod path; use std::{ @@ -20,19 +18,15 @@ use either::Either; use hir_def::{ AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, - LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, - TypeParamId, VariantId, - expr_store::{ - ExpressionStore, - path::{GenericArg, Path}, - }, + LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeParamId, + VariantId, + expr_store::{ExpressionStore, HygieneId, path::Path}, hir::generics::{ - GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance, - WherePredicate, + GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate, }, item_tree::FieldsShape, lang_item::LangItem, - resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, + resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs}, signatures::{FunctionSignature, TraitFlags, TypeAliasFlags}, type_ref::{ ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, @@ -40,7 +34,6 @@ use hir_def::{ }, }; use hir_expand::name::Name; -use intern::{Symbol, sym}; use la_arena::{Arena, ArenaMap, Idx}; use path::{PathDiagnosticCallback, PathLoweringContext}; use rustc_ast_ir::Mutability; @@ -50,7 +43,7 @@ use rustc_type_ir::{ AliasTyKind, ConstKind, DebruijnIndex, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate, TyKind::{self}, - TypeFoldable, TypeFolder, TypeVisitableExt, Upcast, + TypeVisitableExt, inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _}, }; use salsa::plumbing::AsId; @@ -59,19 +52,17 @@ use stdx::never; use triomphe::Arc; use crate::{ - FnAbi, ImplTraitId, Interner, ParamKind, TraitEnvironment, TyDefId, TyLoweringDiagnostic, - TyLoweringDiagnosticKind, ValueTyDefId, - consteval::{intern_const_ref, path_to_const, unknown_const_as_generic}, + FnAbi, ImplTraitId, TraitEnvironment, TyDefId, TyLoweringDiagnostic, TyLoweringDiagnosticKind, + ValueTyDefId, + consteval::intern_const_ref, db::HirDatabase, generics::{Generics, generics, trait_self_param_idx}, lower::{Diagnostics, PathDiagnosticCallbackData, create_diagnostics}, next_solver::{ - AdtDef, AliasTy, Binder, BoundExistentialPredicates, BoundRegionKind, BoundTyKind, - BoundVarKind, BoundVarKinds, Clause, Clauses, Const, DbInterner, EarlyBinder, - EarlyParamRegion, ErrorGuaranteed, GenericArgs, ParamConst, ParamEnv, PolyFnSig, Predicate, - Region, SolverDefId, TraitPredicate, TraitRef, Ty, Tys, - abi::Safety, - mapping::{ChalkToNextSolver, convert_ty_for_result}, + AliasTy, Binder, BoundExistentialPredicates, Clause, Clauses, Const, DbInterner, + EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs, ParamConst, + ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef, Ty, Tys, + UnevaluatedConst, abi::Safety, }, }; @@ -95,11 +86,11 @@ struct ImplTraitLoweringState<'db> { mode: ImplTraitLoweringMode, // This is structured as a struct with fields and not as an enum because it helps with the borrow checker. opaque_type_data: Arena>, - param_and_variable_counter: u16, } + impl<'db> ImplTraitLoweringState<'db> { fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState<'db> { - Self { mode, opaque_type_data: Arena::new(), param_and_variable_counter: 0 } + Self { mode, opaque_type_data: Arena::new() } } } @@ -279,8 +270,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { let const_ref = &self.store[const_ref.expr]; match const_ref { hir_def::hir::Expr::Path(path) => { - path_to_const(self.db, self.resolver, path, || self.generics(), const_type) - .unwrap_or_else(|| unknown_const(const_type)) + self.path_to_const(path).unwrap_or_else(|| unknown_const(const_type)) } hir_def::hir::Expr::Literal(literal) => intern_const_ref( self.db, @@ -324,9 +314,39 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { } } + pub(crate) fn path_to_const(&mut self, path: &Path) -> Option> { + match self.resolver.resolve_path_in_value_ns_fully(self.db, path, HygieneId::ROOT) { + Some(ValueNs::GenericParam(p)) => { + let args = self.generics(); + match args.type_or_const_param_idx(p.into()) { + Some(idx) => Some(self.const_param(p, idx as u32)), + None => { + never!( + "Generic list doesn't contain this param: {:?}, {:?}, {:?}", + args, + path, + p + ); + None + } + } + } + Some(ValueNs::ConstId(c)) => { + let args = GenericArgs::new_from_iter(self.interner, []); + Some(Const::new( + self.interner, + rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new( + SolverDefId::ConstId(c), + args, + )), + )) + } + _ => None, + } + } + pub(crate) fn lower_path_as_const(&mut self, path: &Path, const_type: Ty<'db>) -> Const<'db> { - path_to_const(self.db, self.resolver, path, || self.generics(), const_type) - .unwrap_or_else(|| unknown_const(const_type)) + self.path_to_const(path).unwrap_or_else(|| unknown_const(const_type)) } fn generics(&self) -> &Generics { @@ -338,12 +358,12 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { .is_some_and(|disallow_params_after| index >= disallow_params_after) } - fn type_param(&mut self, id: TypeParamId, index: u32, name: Symbol) -> Ty<'db> { + fn type_param(&mut self, id: TypeParamId, index: u32) -> Ty<'db> { if self.param_index_is_disallowed(index) { // FIXME: Report an error. Ty::new_error(self.interner, ErrorGuaranteed) } else { - Ty::new_param(self.interner, id, index, name) + Ty::new_param(self.interner, id, index) } } @@ -387,20 +407,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> { res = Some(TypeNs::GenericParam(type_param_id)); let generics = self.generics(); - let (idx, data) = + let (idx, _data) = generics.type_or_const_param(type_param_id.into()).expect("matching generics"); - let type_data = match data { - TypeOrConstParamData::TypeParamData(ty) => ty, - _ => unreachable!(), - }; - self.type_param( - type_param_id, - idx as u32, - type_data - .name - .as_ref() - .map_or_else(|| sym::MISSING_NAME, |d| d.symbol().clone()), - ) + self.type_param(type_param_id, idx as u32) } &TypeRef::RawPtr(inner, mutability) => { let inner_ty = self.lower_ty(inner); @@ -1058,10 +1067,7 @@ fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'d /// Build the declared type of a static. fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder<'db, Ty<'db>> { let resolver = def.resolver(db); - let module = resolver.module(); - let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block()); let data = db.static_signature(def); - let parent = def.loc(db).container; let mut ctx = TyLoweringContext::new( db, &resolver, @@ -1177,7 +1183,6 @@ pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( impl_id: ImplId, ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { let resolver = impl_id.resolver(db); - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); let impl_data = db.impl_signature(impl_id); let mut ctx = TyLoweringContext::new( @@ -1451,7 +1456,6 @@ pub(crate) fn trait_environment_query<'db>( return TraitEnvironment::empty(def.krate(db)); } - let interner = DbInterner::new_with(db, Some(def.krate(db)), None); let resolver = def.resolver(db); let mut ctx = TyLoweringContext::new( db, @@ -1509,7 +1513,7 @@ pub(crate) fn trait_environment_query<'db>( continue; }; let idx = idx as u32 + generics.parent_count as u32; - let param_ty = Ty::new_param(ctx.interner, param_id, idx, p.name.clone()); + let param_ty = Ty::new_param(ctx.interner, param_id, idx); if explicitly_unsized_tys.contains(¶m_ty) { continue; } @@ -1635,11 +1639,7 @@ where return; } - let param_name = param_data - .name - .as_ref() - .map_or_else(|| sym::MISSING_NAME, |name| name.symbol().clone()); - let param_ty = Ty::new_param(interner, param_id, param_idx, param_name); + let param_ty = Ty::new_param(interner, param_id, param_idx); if explicitly_unsized_tys.contains(¶m_ty) { return; } @@ -1724,83 +1724,12 @@ fn implicitly_sized_clauses<'a, 'subst, 'db>( ) } -pub(crate) fn make_binders<'db, T: rustc_type_ir::TypeVisitable>>( - interner: DbInterner<'db>, - generics: &Generics, - value: T, -) -> Binder<'db, T> { - Binder::bind_with_vars( - value, - BoundVarKinds::new_from_iter( - interner, - generics.iter_id().map(|x| match x { - hir_def::GenericParamId::ConstParamId(_) => BoundVarKind::Const, - hir_def::GenericParamId::TypeParamId(_) => BoundVarKind::Ty(BoundTyKind::Anon), - hir_def::GenericParamId::LifetimeParamId(_) => { - BoundVarKind::Region(BoundRegionKind::Anon) - } - }), - ), - ) -} - -/// Checks if the provided generic arg matches its expected kind, then lower them via -/// provided closures. Use unknown if there was kind mismatch. -/// -pub(crate) fn lower_generic_arg<'a, 'db, T>( - db: &'db dyn HirDatabase, - kind_id: GenericParamId, - arg: &'a GenericArg, - this: &mut T, - store: &ExpressionStore, - for_type: impl FnOnce(&mut T, TypeRefId) -> Ty<'db> + 'a, - for_const: impl FnOnce(&mut T, &ConstRef, Ty<'db>) -> Const<'db> + 'a, - for_const_ty_path_fallback: impl FnOnce(&mut T, &Path, Ty<'db>) -> Const<'db> + 'a, - for_lifetime: impl FnOnce(&mut T, &LifetimeRefId) -> Region<'db> + 'a, -) -> crate::next_solver::GenericArg<'db> { - let interner = DbInterner::new_with(db, None, None); - let kind = match kind_id { - GenericParamId::TypeParamId(_) => ParamKind::Type, - GenericParamId::ConstParamId(id) => { - let ty = db.const_param_ty(id); - ParamKind::Const(ty) - } - GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, - }; - match (arg, kind) { - (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, *type_ref).into(), - (GenericArg::Const(c), ParamKind::Const(c_ty)) => { - for_const(this, c, c_ty.to_nextsolver(interner)).into() - } - (GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => { - for_lifetime(this, lifetime_ref).into() - } - (GenericArg::Const(_), ParamKind::Type) => Ty::new_error(interner, ErrorGuaranteed).into(), - (GenericArg::Lifetime(_), ParamKind::Type) => { - Ty::new_error(interner, ErrorGuaranteed).into() - } - (GenericArg::Type(t), ParamKind::Const(c_ty)) => match &store[*t] { - TypeRef::Path(p) => { - for_const_ty_path_fallback(this, p, c_ty.to_nextsolver(interner)).into() - } - _ => unknown_const_as_generic(c_ty.to_nextsolver(interner)), - }, - (GenericArg::Lifetime(_), ParamKind::Const(c_ty)) => { - unknown_const(c_ty.to_nextsolver(interner)).into() - } - (GenericArg::Type(_), ParamKind::Lifetime) => Region::error(interner).into(), - (GenericArg::Const(_), ParamKind::Lifetime) => Region::error(interner).into(), - } -} - #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericDefaults<'db>( - Option>>]>>, -); +pub struct GenericDefaults<'db>(Option>>]>>); impl<'db> GenericDefaults<'db> { #[inline] - pub fn get(&self, idx: usize) -> Option>> { + pub fn get(&self, idx: usize) -> Option>> { self.0.as_ref()?[idx] } } @@ -1837,17 +1766,17 @@ pub(crate) fn generic_defaults_with_diagnostics_query( let mut has_any_default = false; let mut defaults = generic_params .iter_parents_with_store() - .map(|((id, p), store)| { + .map(|((_id, p), store)| { ctx.store = store; - let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params); + let (result, has_default) = handle_generic_param(&mut ctx, idx, p); has_any_default |= has_default; idx += 1; result }) .collect::>(); ctx.diagnostics.clear(); // Don't include diagnostics from the parent. - defaults.extend(generic_params.iter_self().map(|(id, p)| { - let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params); + defaults.extend(generic_params.iter_self().map(|(_id, p)| { + let (result, has_default) = handle_generic_param(&mut ctx, idx, p); has_any_default |= has_default; idx += 1; result @@ -1863,10 +1792,8 @@ pub(crate) fn generic_defaults_with_diagnostics_query( fn handle_generic_param<'db>( ctx: &mut TyLoweringContext<'db, '_>, idx: usize, - id: GenericParamId, p: GenericParamDataRef<'_>, - generic_params: &Generics, - ) -> (Option>>, bool) { + ) -> (Option>>, bool) { ctx.lowering_param_default(idx as u32); match p { GenericParamDataRef::TypeParamData(p) => { @@ -1874,11 +1801,7 @@ pub(crate) fn generic_defaults_with_diagnostics_query( (ty.map(|ty| EarlyBinder::bind(ty.into())), p.default.is_some()) } GenericParamDataRef::ConstParamData(p) => { - let GenericParamId::ConstParamId(id) = id else { - unreachable!("Unexpected lifetime or type argument") - }; - - let mut val = p.default.map(|c| { + let val = p.default.map(|c| { let param_ty = ctx.lower_ty(p.ty); let c = ctx.lower_const(c, param_ty); c.into() @@ -1999,11 +1922,6 @@ pub(crate) fn associated_ty_item_bounds<'db>( db: &'db dyn HirDatabase, type_alias: TypeAliasId, ) -> EarlyBinder<'db, BoundExistentialPredicates<'db>> { - let trait_ = match type_alias.lookup(db).container { - ItemContainerId::TraitId(t) => t, - _ => panic!("associated type not in trait"), - }; - let type_alias_data = db.type_alias_signature(type_alias); let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); let interner = DbInterner::new_with(db, Some(resolver.krate()), None); @@ -2051,7 +1969,7 @@ pub(crate) fn associated_ty_item_bounds<'db>( p.term, )), ), - rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => None, + rustc_type_ir::ClauseKind::TypeOutlives(_) => None, rustc_type_ir::ClauseKind::RegionOutlives(_) | rustc_type_ir::ClauseKind::ConstArgHasType(_, _) | rustc_type_ir::ClauseKind::WellFormed(_) @@ -2066,15 +1984,15 @@ pub(crate) fn associated_ty_item_bounds<'db>( }); } - if !ctx.unsized_types.contains(&self_ty) { - let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); + if !ctx.unsized_types.contains(&self_ty) + && let Some(sized_trait) = LangItem::Sized.resolve_trait(db, resolver.krate()) + { let sized_clause = Binder::dummy(ExistentialPredicate::Trait(ExistentialTraitRef::new( interner, - trait_.into(), - [] as [crate::next_solver::GenericArg<'_>; 0], + sized_trait.into(), + [] as [GenericArg<'_>; 0], ))); bounds.push(sized_clause); - bounds.shrink_to_fit(); } EarlyBinder::bind(BoundExistentialPredicates::new_from_iter(interner, bounds)) @@ -2117,7 +2035,6 @@ fn named_associated_type_shorthand_candidates<'db, R>( ) -> Option { let db = interner.db; let mut search = |t: TraitRef<'db>| -> Option { - let trait_id = t.def_id.0; let mut checked_traits = FxHashSet::default(); let mut check_trait = |trait_ref: TraitRef<'db>| { let trait_id = trait_ref.def_id.0; @@ -2192,10 +2109,7 @@ fn named_associated_type_shorthand_candidates<'db, R>( let trait_generics = generics(db, trait_id.into()); tracing::debug!(?trait_generics); if trait_generics[param_id.local_id()].is_trait_self() { - let args = crate::next_solver::GenericArgs::identity_for_item( - interner, - trait_id.into(), - ); + let args = GenericArgs::identity_for_item(interner, trait_id.into()); let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), args); tracing::debug!(?args, ?trait_ref); return search(trait_ref); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs index 6bfe266b460c7..a4ff47e3892a6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs @@ -1,11 +1,8 @@ //! A wrapper around [`TyLoweringContext`] specifically for lowering paths. -use std::ops::Deref; - use either::Either; use hir_def::{ - AssocItemId, GenericDefId, GenericParamId, Lookup, TraitId, TypeAliasId, - builtin_type::BuiltinType, + GenericDefId, GenericParamId, Lookup, TraitId, TypeAliasId, expr_store::{ ExpressionStore, HygieneId, path::{GenericArg, GenericArgs, GenericArgsParentheses, Path, PathSegment, PathSegments}, @@ -18,13 +15,11 @@ use hir_def::{ type_ref::{TypeRef, TypeRefId}, }; use hir_expand::name::Name; -use intern::sym; -use rustc_hash::FxHashSet; use rustc_type_ir::{ - AliasTerm, AliasTy, AliasTyKind, TypeVisitableExt, - inherent::{GenericArgs as _, IntoKind, Region as _, SliceLike, Ty as _}, + AliasTerm, AliasTy, AliasTyKind, + inherent::{GenericArgs as _, Region as _, SliceLike, Ty as _}, }; -use smallvec::{SmallVec, smallvec}; +use smallvec::SmallVec; use stdx::never; use crate::{ @@ -34,16 +29,12 @@ use crate::{ db::HirDatabase, generics::{Generics, generics}, lower::PathDiagnosticCallbackData, - lower_nextsolver::{ - LifetimeElisionKind, PredicateFilter, generic_predicates_filtered_by, - named_associated_type_shorthand_candidates, - }, + lower_nextsolver::{LifetimeElisionKind, named_associated_type_shorthand_candidates}, next_solver::{ - AdtDef, Binder, Clause, Const, DbInterner, ErrorGuaranteed, Predicate, ProjectionPredicate, - Region, SolverDefId, TraitRef, Ty, + Binder, Clause, Const, DbInterner, ErrorGuaranteed, Predicate, ProjectionPredicate, Region, + TraitRef, Ty, mapping::{ChalkToNextSolver, convert_binder_to_early_binder}, }, - primitive, }; use super::{ @@ -173,22 +164,6 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { } } - fn prohibit_parenthesized_generic_args(&mut self) -> bool { - if let Some(generic_args) = self.current_or_prev_segment.args_and_bindings { - match generic_args.parenthesized { - GenericArgsParentheses::No => {} - GenericArgsParentheses::ReturnTypeNotation | GenericArgsParentheses::ParenSugar => { - let segment = self.current_segment_u32(); - self.on_diagnostic( - PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, - ); - return true; - } - } - } - false - } - // When calling this, the current segment is the resolved segment (we don't advance it yet). pub(crate) fn lower_partly_resolved_path( &mut self, @@ -274,19 +249,9 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { Ty::new_error(self.ctx.interner, ErrorGuaranteed) } Some(idx) => { - let (pidx, param) = generics.iter().nth(idx).unwrap(); + let (pidx, _param) = generics.iter().nth(idx).unwrap(); assert_eq!(pidx, param_id.into()); - let p = match param { - GenericParamDataRef::TypeParamData(p) => p, - _ => unreachable!(), - }; - self.ctx.type_param( - param_id, - idx as u32, - p.name - .as_ref() - .map_or_else(|| sym::MISSING_NAME.clone(), |p| p.symbol().clone()), - ) + self.ctx.type_param(param_id, idx as u32) } } } @@ -520,11 +485,10 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { let Some(res) = res else { return Ty::new_error(self.ctx.interner, ErrorGuaranteed); }; - let db = self.ctx.db; let def = self.ctx.def; let segment = self.current_or_prev_segment; let assoc_name = segment.name; - let mut check_alias = |name: &Name, t: TraitRef<'db>, associated_ty: TypeAliasId| { + let check_alias = |name: &Name, t: TraitRef<'db>, associated_ty: TypeAliasId| { if name != assoc_name { return None; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index 06c7cdd4e4166..bce17905037cc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -37,7 +37,7 @@ use crate::{ Canonical, DbInterner, ErrorGuaranteed, GenericArgs, Goal, Predicate, Region, SolverDefId, TraitRef, Ty, TyKind, TypingMode, infer::{ - DbInternerInferExt, DefineOpaqueTypes, + DbInternerInferExt, traits::{Obligation, ObligationCause, PredicateObligation}, }, obligation_ctxt::ObligationCtxt, @@ -1654,7 +1654,7 @@ fn is_valid_trait_method_candidate<'db>( let res = table .infer_ctxt .at(&ObligationCause::dummy(), table.trait_env.env) - .relate(DefineOpaqueTypes::No, expected_receiver, variance, receiver_ty); + .relate(expected_receiver, variance, receiver_ty); let Ok(infer_ok) = res else { return IsValidCandidate::No; }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs index 776e0d956f404..f5b4fa1e2a004 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs @@ -1,5 +1,4 @@ //! Things relevant to the next trait solver. -#![allow(unused, unreachable_pub)] pub mod abi; mod consts; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs index c5a1e7d315465..8d81a382c362a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs @@ -2,10 +2,9 @@ use std::hash::Hash; -use hir_def::{ConstParamId, TypeOrConstParamId}; -use intern::{Interned, Symbol}; +use hir_def::ConstParamId; use macros::{TypeFoldable, TypeVisitable}; -use rustc_ast_ir::{try_visit, visit::VisitorResult}; +use rustc_ast_ir::visit::VisitorResult; use rustc_type_ir::{ BoundVar, DebruijnIndex, FlagComputation, Flags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, WithCachedTypeInfo, @@ -14,7 +13,7 @@ use rustc_type_ir::{ }; use crate::{ - ConstScalar, MemoryMap, + MemoryMap, interner::InternedWrapperNoDebug, next_solver::{ClauseKind, ParamEnv}, }; @@ -429,7 +428,7 @@ impl<'db> PlaceholderLike> for PlaceholderConst { impl<'db> Relate> for ExprConst { fn relate>>( - relation: &mut R, + _relation: &mut R, a: Self, b: Self, ) -> rustc_type_ir::relate::RelateResult, Self> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs index a42fdb0943041..588d42857493d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs @@ -1,9 +1,8 @@ //! Fold impls for the next-trait-solver. use rustc_type_ir::{ - BoundVar, DebruijnIndex, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, - TypeVisitableExt, - inherent::{IntoKind, Region as _}, + DebruijnIndex, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, + inherent::IntoKind, }; use crate::next_solver::BoundConst; @@ -55,7 +54,7 @@ pub(crate) struct BoundVarReplacer<'db, D> { } impl<'db, D: BoundVarReplacerDelegate<'db>> BoundVarReplacer<'db, D> { - pub fn new(tcx: DbInterner<'db>, delegate: D) -> Self { + pub(crate) fn new(tcx: DbInterner<'db>, delegate: D) -> Self { BoundVarReplacer { interner: tcx, current_index: DebruijnIndex::ZERO, delegate } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs index 262da858d466a..7783075d1a369 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill.rs @@ -2,7 +2,7 @@ mod errors; -use std::{marker::PhantomData, mem, ops::ControlFlow, vec::ExtractIf}; +use std::{mem, ops::ControlFlow}; use rustc_hash::FxHashSet; use rustc_next_trait_solver::{ @@ -46,6 +46,7 @@ pub struct FulfillmentCtxt<'db> { /// outside of this snapshot leads to subtle bugs if the snapshot /// gets rolled back. Because of this we explicitly check that we only /// use the context in exactly this snapshot. + #[expect(unused)] usable_in_snapshot: usize, } @@ -69,10 +70,6 @@ impl<'db> ObligationStorage<'db> { self.pending.push((obligation, stalled_on)); } - fn has_pending_obligations(&self) -> bool { - !self.pending.is_empty() || !self.overflowed.is_empty() - } - fn clone_pending(&self) -> PredicateObligations<'db> { let mut obligations: PredicateObligations<'db> = self.pending.iter().map(|(o, _)| o.clone()).collect(); @@ -125,10 +122,10 @@ impl<'db> FulfillmentCtxt<'db> { } impl<'db> FulfillmentCtxt<'db> { - #[tracing::instrument(level = "trace", skip(self, infcx))] + #[tracing::instrument(level = "trace", skip(self, _infcx))] pub(crate) fn register_predicate_obligation( &mut self, - infcx: &InferCtxt<'db>, + _infcx: &InferCtxt<'db>, obligation: PredicateObligation<'db>, ) { // FIXME: See the comment in `try_evaluate_obligations()`. @@ -138,7 +135,7 @@ impl<'db> FulfillmentCtxt<'db> { pub(crate) fn register_predicate_obligations( &mut self, - infcx: &InferCtxt<'db>, + _infcx: &InferCtxt<'db>, obligations: impl IntoIterator>, ) { // FIXME: See the comment in `try_evaluate_obligations()`. @@ -148,7 +145,7 @@ impl<'db> FulfillmentCtxt<'db> { pub(crate) fn collect_remaining_errors( &mut self, - infcx: &InferCtxt<'db>, + _infcx: &InferCtxt<'db>, ) -> Vec> { self.obligations .pending @@ -235,10 +232,6 @@ impl<'db> FulfillmentCtxt<'db> { self.collect_remaining_errors(infcx) } - fn has_pending_obligations(&self) -> bool { - self.obligations.has_pending_obligations() - } - pub(crate) fn pending_obligations(&self) -> PredicateObligations<'db> { self.obligations.clone_pending() } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs index ab4a229fbc05f..82dbf9403cabf 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fulfill/errors.rs @@ -9,15 +9,15 @@ use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt}; use rustc_type_ir::{ AliasRelationDirection, AliasTermKind, HostEffectPredicate, Interner, PredicatePolarity, error::ExpectedFound, - inherent::{IntoKind, PlaceholderConst, SliceLike, Span as _}, + inherent::{IntoKind, SliceLike, Span as _}, lang_items::SolverTraitLangItem, - solve::{CandidateSource, Certainty, GoalSource, MaybeCause, NoSolution}, + solve::{Certainty, GoalSource, MaybeCause, NoSolution}, }; use tracing::{instrument, trace}; use crate::next_solver::{ AliasTerm, Binder, ClauseKind, Const, ConstKind, DbInterner, PolyTraitPredicate, PredicateKind, - SolverContext, SolverDefId, Span, Term, TraitPredicate, Ty, TyKind, TypeError, + SolverContext, Span, Term, TraitPredicate, Ty, TyKind, TypeError, fulfill::NextSolverError, infer::{ InferCtxt, @@ -529,7 +529,6 @@ impl<'db> ProofTreeVisitor<'db> for BestObligation<'db> { } } - let mut impl_where_bound_count = 0; for nested_goal in nested_goals { trace!(nested_goal = ?(nested_goal.goal(), nested_goal.source(), nested_goal.result())); @@ -542,34 +541,27 @@ impl<'db> ProofTreeVisitor<'db> for BestObligation<'db> { recursion_depth: self.obligation.recursion_depth + 1, }; - let obligation; - match (child_mode, nested_goal.source()) { + let obligation = match (child_mode, nested_goal.source()) { ( ChildMode::Trait(_) | ChildMode::Host(_), GoalSource::Misc | GoalSource::TypeRelating | GoalSource::NormalizeGoal(_), ) => { continue; } - (ChildMode::Trait(parent_trait_pred), GoalSource::ImplWhereBound) => { - obligation = make_obligation(); - impl_where_bound_count += 1; + (ChildMode::Trait(_parent_trait_pred), GoalSource::ImplWhereBound) => { + make_obligation() } ( - ChildMode::Host(parent_host_pred), + ChildMode::Host(_parent_host_pred), GoalSource::ImplWhereBound | GoalSource::AliasBoundConstCondition, - ) => { - obligation = make_obligation(); - impl_where_bound_count += 1; - } + ) => make_obligation(), // Skip over a higher-ranked predicate. - (_, GoalSource::InstantiateHigherRanked) => { - obligation = self.obligation.clone(); - } + (_, GoalSource::InstantiateHigherRanked) => self.obligation.clone(), (ChildMode::PassThrough, _) | (_, GoalSource::AliasWellFormed | GoalSource::AliasBoundConstCondition) => { - obligation = make_obligation(); + make_obligation() } - } + }; self.with_derived_obligation(obligation, |this| nested_goal.visit_with(this))?; } @@ -628,35 +620,29 @@ impl<'db> NextSolverError<'db> { } mod wf { - use std::iter; - use hir_def::ItemContainerId; use rustc_type_ir::inherent::{ - AdtDef, BoundExistentialPredicates, GenericArg, GenericArgs as _, IntoKind, SliceLike, - Term as _, Ty as _, + AdtDef, BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Term as _, + Ty as _, }; use rustc_type_ir::lang_items::SolverTraitLangItem; use rustc_type_ir::{ - Interner, PredicatePolarity, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, - TypeVisitor, + Interner, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, }; - use tracing::{debug, instrument, trace}; + use tracing::{debug, instrument}; use crate::next_solver::infer::InferCtxt; - use crate::next_solver::infer::traits::{ - Obligation, ObligationCause, PredicateObligation, PredicateObligations, - }; + use crate::next_solver::infer::traits::{Obligation, ObligationCause, PredicateObligations}; use crate::next_solver::{ - AliasTerm, Binder, ClauseKind, Const, ConstKind, Ctor, DbInterner, ExistentialPredicate, - GenericArgs, ParamEnv, Predicate, PredicateKind, Region, SolverDefId, Term, TraitPredicate, - TraitRef, Ty, TyKind, + Binder, ClauseKind, Const, ConstKind, Ctor, DbInterner, ExistentialPredicate, GenericArgs, + ParamEnv, Predicate, PredicateKind, Region, SolverDefId, Term, TraitRef, Ty, TyKind, }; /// Compute the predicates that are required for a type to be well-formed. /// /// This is only intended to be used in the new solver, since it does not /// take into account recursion depth or proper error-reporting spans. - pub fn unnormalized_obligations<'db>( + pub(crate) fn unnormalized_obligations<'db>( infcx: &InferCtxt<'db>, param_env: ParamEnv<'db>, term: Term<'db>, @@ -683,158 +669,11 @@ mod wf { recursion_depth: usize, } - /// Controls whether we "elaborate" supertraits and so forth on the WF - /// predicates. This is a kind of hack to address #43784. The - /// underlying problem in that issue was a trait structure like: - /// - /// ```ignore (illustrative) - /// trait Foo: Copy { } - /// trait Bar: Foo { } - /// impl Foo for T { } - /// impl Bar for T { } - /// ``` - /// - /// Here, in the `Foo` impl, we will check that `T: Copy` holds -- but - /// we decide that this is true because `T: Bar` is in the - /// where-clauses (and we can elaborate that to include `T: - /// Copy`). This wouldn't be a problem, except that when we check the - /// `Bar` impl, we decide that `T: Foo` must hold because of the `Foo` - /// impl. And so nowhere did we check that `T: Copy` holds! - /// - /// To resolve this, we elaborate the WF requirements that must be - /// proven when checking impls. This means that (e.g.) the `impl Bar - /// for T` will be forced to prove not only that `T: Foo` but also `T: - /// Copy` (which it won't be able to do, because there is no `Copy` - /// impl for `T`). - #[derive(Debug, PartialEq, Eq, Copy, Clone)] - enum Elaborate { - All, - None, - } - impl<'a, 'db> WfPredicates<'a, 'db> { fn interner(&self) -> DbInterner<'db> { self.infcx.interner } - /// Pushes the obligations required for `trait_ref` to be WF into `self.out`. - fn add_wf_preds_for_trait_pred( - &mut self, - trait_pred: TraitPredicate<'db>, - elaborate: Elaborate, - ) { - let tcx = self.interner(); - let trait_ref = trait_pred.trait_ref; - - // Negative trait predicates don't require supertraits to hold, just - // that their args are WF. - if trait_pred.polarity == PredicatePolarity::Negative { - self.add_wf_preds_for_negative_trait_pred(trait_ref); - return; - } - - // if the trait predicate is not const, the wf obligations should not be const as well. - let obligations = self.nominal_obligations(trait_ref.def_id.0.into(), trait_ref.args); - - debug!("compute_trait_pred obligations {:?}", obligations); - let param_env = self.param_env; - let depth = self.recursion_depth; - - let extend = |PredicateObligation { predicate, mut cause, .. }| { - Obligation::with_depth(tcx, cause, depth, param_env, predicate) - }; - - if let Elaborate::All = elaborate { - let implied_obligations = rustc_type_ir::elaborate::elaborate(tcx, obligations); - let implied_obligations = implied_obligations.map(extend); - self.out.extend(implied_obligations); - } else { - self.out.extend(obligations); - } - - self.out.extend( - trait_ref - .args - .iter() - .enumerate() - .filter_map(|(i, arg)| arg.as_term().map(|t| (i, t))) - .filter(|(_, term)| !term.has_escaping_bound_vars()) - .map(|(i, term)| { - let mut cause = ObligationCause::misc(); - // The first arg is the self ty - use the correct span for it. - Obligation::with_depth( - tcx, - cause, - depth, - param_env, - ClauseKind::WellFormed(term), - ) - }), - ); - } - - // Compute the obligations that are required for `trait_ref` to be WF, - // given that it is a *negative* trait predicate. - fn add_wf_preds_for_negative_trait_pred(&mut self, trait_ref: TraitRef<'db>) { - for arg in trait_ref.args { - if let Some(term) = arg.as_term() { - self.add_wf_preds_for_term(term); - } - } - } - - /// Pushes the obligations required for an alias (except inherent) to be WF - /// into `self.out`. - fn add_wf_preds_for_alias_term(&mut self, data: AliasTerm<'db>) { - // A projection is well-formed if - // - // (a) its predicates hold (*) - // (b) its args are wf - // - // (*) The predicates of an associated type include the predicates of - // the trait that it's contained in. For example, given - // - // trait A: Clone { - // type X where T: Copy; - // } - // - // The predicates of `<() as A>::X` are: - // [ - // `(): Sized` - // `(): Clone` - // `(): A` - // `i32: Sized` - // `i32: Clone` - // `i32: Copy` - // ] - let obligations = self.nominal_obligations(data.def_id, data.args); - self.out.extend(obligations); - - self.add_wf_preds_for_projection_args(data.args); - } - - fn add_wf_preds_for_projection_args(&mut self, args: GenericArgs<'db>) { - let tcx = self.interner(); - let cause = ObligationCause::new(); - let param_env = self.param_env; - let depth = self.recursion_depth; - - self.out.extend( - args.iter() - .filter_map(|arg| arg.as_term()) - .filter(|term| !term.has_escaping_bound_vars()) - .map(|term| { - Obligation::with_depth( - tcx, - cause.clone(), - depth, - param_env, - ClauseKind::WellFormed(term), - ) - }), - ); - } - fn require_sized(&mut self, subty: Ty<'db>) { if !subty.has_escaping_bound_vars() { let cause = ObligationCause::new(); @@ -895,7 +734,7 @@ mod wf { fn add_wf_preds_for_dyn_ty( &mut self, - ty: Ty<'db>, + _ty: Ty<'db>, data: &[Binder<'db, ExistentialPredicate<'db>>], region: Region<'db>, ) { @@ -1013,7 +852,7 @@ mod wf { )); } - TyKind::Pat(base_ty, pat) => { + TyKind::Pat(base_ty, _pat) => { self.require_sized(base_ty); } @@ -1036,7 +875,7 @@ mod wf { let obligations = self.nominal_obligations(data.def_id, data.args); self.out.extend(obligations); } - TyKind::Alias(rustc_type_ir::Inherent, data) => { + TyKind::Alias(rustc_type_ir::Inherent, _data) => { return; } @@ -1148,7 +987,7 @@ mod wf { // Let the visitor iterate into the argument/return // types appearing in the fn signature. } - TyKind::UnsafeBinder(ty) => {} + TyKind::UnsafeBinder(_ty) => {} TyKind::Dynamic(data, r) => { // WfObject @@ -1291,7 +1130,7 @@ mod wf { /// /// Requires that trait definitions have been processed so that we can /// elaborate predicates and walk supertraits. - pub fn object_region_bounds<'db>( + pub(crate) fn object_region_bounds<'db>( interner: DbInterner<'db>, existential_predicates: &[Binder<'db, ExistentialPredicate<'db>>], ) -> Vec> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs index 38293c45422c1..b2632ba63709d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs @@ -1,27 +1,20 @@ //! Things related to generic args in the next-trait-solver. use hir_def::{GenericDefId, GenericParamId}; -use intern::{Interned, Symbol}; use macros::{TypeFoldable, TypeVisitable}; -use rustc_type_ir::inherent::Const as _; use rustc_type_ir::{ - ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSig, FnSigTys, - GenericArgKind, IntTy, Interner, TermKind, TyKind, TyVid, TypeFoldable, TypeVisitable, - Variance, - inherent::{ - GenericArg as _, GenericArgs as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _, - }, + ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSigTys, + GenericArgKind, Interner, TermKind, TyKind, TyVid, Variance, + inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _}, relate::{Relate, VarianceDiagInfo}, }; use smallvec::SmallVec; -use crate::db::HirDatabase; -use crate::next_solver::{Binder, PolyFnSig}; +use crate::next_solver::{PolyFnSig, interned_vec_db}; use super::{ Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys, - generics::{GenericParamDef, Generics}, - interned_vec_db, + generics::Generics, }; #[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)] @@ -191,7 +184,7 @@ impl<'db> GenericArgs<'db> { mut mk_kind: F, ) -> GenericArgs<'db> where - F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { let defs = interner.generics_of(def_id); let count = defs.count(); @@ -202,9 +195,7 @@ impl<'db> GenericArgs<'db> { /// Creates an all-error `GenericArgs`. pub fn error_for_item(interner: DbInterner<'db>, def_id: SolverDefId) -> GenericArgs<'db> { - GenericArgs::for_item(interner, def_id, |_, _, id, _| { - GenericArg::error_from_id(interner, id) - }) + GenericArgs::for_item(interner, def_id, |_, id, _| GenericArg::error_from_id(interner, id)) } /// Like `for_item`, but prefers the default of a parameter if it has any. @@ -214,14 +205,12 @@ impl<'db> GenericArgs<'db> { mut fallback: F, ) -> GenericArgs<'db> where - F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { let defaults = interner.db.generic_defaults_ns(def_id); - Self::for_item(interner, def_id.into(), |name, idx, id, prev| { - match defaults.get(idx as usize) { - Some(default) => default.instantiate(interner, prev), - None => fallback(name, idx, id, prev), - } + Self::for_item(interner, def_id.into(), |idx, id, prev| match defaults.get(idx as usize) { + Some(default) => default.instantiate(interner, prev), + None => fallback(idx, id, prev), }) } @@ -233,11 +222,11 @@ impl<'db> GenericArgs<'db> { mut fallback: F, ) -> GenericArgs<'db> where - F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { let mut iter = first.into_iter(); - Self::for_item(interner, def_id, |name, idx, id, prev| { - iter.next().unwrap_or_else(|| fallback(name, idx, id, prev)) + Self::for_item(interner, def_id, |idx, id, prev| { + iter.next().unwrap_or_else(|| fallback(idx, id, prev)) }) } @@ -249,14 +238,14 @@ impl<'db> GenericArgs<'db> { mut fallback: F, ) -> GenericArgs<'db> where - F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { let defaults = interner.db.generic_defaults_ns(def_id); - Self::fill_rest(interner, def_id.into(), first, |name, idx, id, prev| { + Self::fill_rest(interner, def_id.into(), first, |idx, id, prev| { defaults .get(idx as usize) .map(|default| default.instantiate(interner, prev)) - .unwrap_or_else(|| fallback(name, idx, id, prev)) + .unwrap_or_else(|| fallback(idx, id, prev)) }) } @@ -266,9 +255,8 @@ impl<'db> GenericArgs<'db> { defs: Generics, mk_kind: &mut F, ) where - F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { - let self_len = defs.own_params.len() as u32; if let Some(def_id) = defs.parent { let parent_defs = interner.generics_of(def_id.into()); Self::fill_item(args, interner, parent_defs, mk_kind); @@ -278,12 +266,11 @@ impl<'db> GenericArgs<'db> { fn fill_single(args: &mut SmallVec<[GenericArg<'db>; 8]>, defs: &Generics, mk_kind: &mut F) where - F: FnMut(&Symbol, u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, + F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { - let start_len = args.len(); args.reserve(defs.own_params.len()); for param in &defs.own_params { - let kind = mk_kind(¶m.name, args.len() as u32, param.id, args); + let kind = mk_kind(args.len() as u32, param.id, args); args.push(kind); } } @@ -374,9 +361,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< interner: DbInterner<'db>, def_id: as rustc_type_ir::Interner>::DefId, ) -> as rustc_type_ir::Interner>::GenericArgs { - Self::for_item(interner, def_id, |name, index, kind, _| { - mk_param(interner, index, name, kind) - }) + Self::for_item(interner, def_id, |index, kind, _| mk_param(interner, index, kind)) } fn extend_with_error( @@ -384,7 +369,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< def_id: as rustc_type_ir::Interner>::DefId, original_args: &[ as rustc_type_ir::Interner>::GenericArg], ) -> as rustc_type_ir::Interner>::GenericArgs { - Self::for_item(interner, def_id, |name, index, kind, _| { + Self::for_item(interner, def_id, |index, kind, _| { if let Some(arg) = original_args.get(index as usize) { *arg } else { @@ -461,7 +446,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< signature_parts_ty, tupled_upvars_ty, coroutine_captures_by_ref_ty, - coroutine_witness_ty, + _coroutine_witness_ty, ] => rustc_type_ir::CoroutineClosureArgsParts { parent_args: GenericArgs::new_from_iter( DbInterner::conjure(), @@ -494,18 +479,12 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs< } } -pub fn mk_param<'db>( - interner: DbInterner<'db>, - index: u32, - name: &Symbol, - id: GenericParamId, -) -> GenericArg<'db> { - let name = name.clone(); +pub fn mk_param<'db>(interner: DbInterner<'db>, index: u32, id: GenericParamId) -> GenericArg<'db> { match id { GenericParamId::LifetimeParamId(id) => { Region::new_early_param(interner, EarlyParamRegion { index, id }).into() } - GenericParamId::TypeParamId(id) => Ty::new_param(interner, id, index, name).into(), + GenericParamId::TypeParamId(id) => Ty::new_param(interner, id, index).into(), GenericParamId::ConstParamId(id) => { Const::new_param(interner, ParamConst { index, id }).into() } @@ -596,13 +575,4 @@ impl<'db> DbInterner<'db> { { T::collect_and_apply(iter, |xs| self.mk_args(xs)) } - - pub(super) fn check_args_compatible(self, def_id: SolverDefId, args: GenericArgs<'db>) -> bool { - // TODO - true - } - - pub(super) fn debug_assert_args_compatible(self, def_id: SolverDefId, args: GenericArgs<'db>) { - // TODO - } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs index 5ec9a18a6c20e..d5a9a6f527bb5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs @@ -1,36 +1,25 @@ //! Things related to generics in the next-trait-solver. use hir_def::{ - ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId, Lookup, - TypeOrConstParamId, TypeParamId, - db::DefDatabase, - expr_store::ExpressionStore, + ConstParamId, GenericDefId, GenericParamId, LifetimeParamId, TypeOrConstParamId, TypeParamId, hir::generics::{ - GenericParamDataRef, GenericParams, LifetimeParamData, LocalLifetimeParamId, - LocalTypeOrConstParamId, TypeOrConstParamData, TypeParamData, TypeParamProvenance, - WherePredicate, + GenericParams, LocalTypeOrConstParamId, TypeOrConstParamData, TypeParamData, + TypeParamProvenance, }, }; -use hir_expand::name::Name; -use intern::{Symbol, sym}; -use la_arena::Arena; -use rustc_type_ir::inherent::Ty as _; -use triomphe::Arc; -use crate::{db::HirDatabase, generics::parent_generic_def, next_solver::Ty}; +use crate::{db::HirDatabase, generics::parent_generic_def}; -use super::{Const, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId}; +use super::SolverDefId; -use super::{DbInterner, GenericArg}; +use super::DbInterner; pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics { - let mk_lt = |parent, index, local_id, lt: &LifetimeParamData| { - let name = lt.name.symbol().clone(); + let mk_lt = |parent, index, local_id| { let id = GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id }); - GenericParamDef { name, index, id } + GenericParamDef { index, id } }; let mk_ty = |parent, index, local_id, p: &TypeOrConstParamData| { - let name = p.name().map(|n| n.symbol().clone()).unwrap_or_else(|| sym::MISSING_NAME); let id = TypeOrConstParamId { parent, local_id }; let id = match p { TypeOrConstParamData::TypeParamData(_) => { @@ -40,7 +29,7 @@ pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics { GenericParamId::ConstParamId(ConstParamId::from_unchecked(id)) } }; - GenericParamDef { name, index, id } + GenericParamDef { index, id } }; let own_params_for_generic_params = |parent, params: &GenericParams| { let mut result = Vec::with_capacity(params.len()); @@ -51,8 +40,8 @@ pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics { type_and_consts.next(); index += 1; } - result.extend(params.iter_lt().map(|(local_id, data)| { - let lt = mk_lt(parent, index, local_id, data); + result.extend(params.iter_lt().map(|(local_id, _data)| { + let lt = mk_lt(parent, index, local_id); index += 1; lt })); @@ -78,7 +67,7 @@ pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics { crate::ImplTraitId::TypeAliasImplTrait(type_alias_id, _) => { (Some(type_alias_id.into()), Vec::new()) } - crate::ImplTraitId::AsyncBlockTypeImplTrait(def, _) => { + crate::ImplTraitId::AsyncBlockTypeImplTrait(_def, _) => { let param = TypeOrConstParamData::TypeParamData(TypeParamData { name: None, default: None, @@ -121,8 +110,6 @@ pub struct Generics { #[derive(Debug)] pub struct GenericParamDef { - pub(crate) name: Symbol, - //def_id: GenericDefId, index: u32, pub(crate) id: GenericParamId, } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs index 8dfffe0d365e7..70b659406f86c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/at.rs @@ -36,7 +36,7 @@ use crate::next_solver::{ AliasTerm, AliasTy, Binder, Const, DbInterner, GenericArg, Goal, ParamEnv, PolyExistentialProjection, PolyExistentialTraitRef, PolyFnSig, Predicate, Region, Span, Term, TraitRef, Ty, - fulfill::{FulfillmentCtxt, NextSolverError}, + fulfill::NextSolverError, infer::relate::lattice::{LatticeOp, LatticeOpKind}, }; @@ -45,16 +45,6 @@ use super::{ traits::{Obligation, ObligationCause}, }; -/// Whether we should define opaque types or just treat them opaquely. -/// -/// Currently only used to prevent predicate matching from matching anything -/// against opaque types. -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum DefineOpaqueTypes { - Yes, - No, -} - #[derive(Clone, Copy)] pub struct At<'a, 'db> { pub infcx: &'a InferCtxt<'db>, @@ -107,12 +97,7 @@ impl<'a, 'db> At<'a, 'db> { /// call like `foo(x)`, where `foo: fn(i32)`, you might have /// `sup(i32, x)`, since the "expected" type is the type that /// appears in the signature. - pub fn sup( - self, - define_opaque_types: DefineOpaqueTypes, - expected: T, - actual: T, - ) -> InferResult<'db, ()> + pub fn sup(self, expected: T, actual: T) -> InferResult<'db, ()> where T: ToTrace<'db>, { @@ -128,12 +113,7 @@ impl<'a, 'db> At<'a, 'db> { } /// Makes `expected <: actual`. - pub fn sub( - self, - define_opaque_types: DefineOpaqueTypes, - expected: T, - actual: T, - ) -> InferResult<'db, ()> + pub fn sub(self, expected: T, actual: T) -> InferResult<'db, ()> where T: ToTrace<'db>, { @@ -149,31 +129,7 @@ impl<'a, 'db> At<'a, 'db> { } /// Makes `expected == actual`. - pub fn eq( - self, - define_opaque_types: DefineOpaqueTypes, - expected: T, - actual: T, - ) -> InferResult<'db, ()> - where - T: ToTrace<'db>, - { - self.eq_trace( - define_opaque_types, - ToTrace::to_trace(self.cause, expected, actual), - expected, - actual, - ) - } - - /// Makes `expected == actual`. - pub fn eq_trace( - self, - define_opaque_types: DefineOpaqueTypes, - trace: TypeTrace<'db>, - expected: T, - actual: T, - ) -> InferResult<'db, ()> + pub fn eq(self, expected: T, actual: T) -> InferResult<'db, ()> where T: Relate>, { @@ -188,20 +144,14 @@ impl<'a, 'db> At<'a, 'db> { .map(|goals| self.goals_to_obligations(goals)) } - pub fn relate( - self, - define_opaque_types: DefineOpaqueTypes, - expected: T, - variance: Variance, - actual: T, - ) -> InferResult<'db, ()> + pub fn relate(self, expected: T, variance: Variance, actual: T) -> InferResult<'db, ()> where T: ToTrace<'db>, { match variance { - Variance::Covariant => self.sub(define_opaque_types, expected, actual), - Variance::Invariant => self.eq(define_opaque_types, expected, actual), - Variance::Contravariant => self.sup(define_opaque_types, expected, actual), + Variance::Covariant => self.sub(expected, actual), + Variance::Invariant => self.eq(expected, actual), + Variance::Contravariant => self.sup(expected, actual), // We could make this make sense but it's not readily // exposed and I don't feel like dealing with it. Note diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs index beaac11a2de41..e6a818fdf3bc3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs @@ -8,7 +8,7 @@ use rustc_hash::FxHashMap; use rustc_index::Idx; use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar}; -use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _}; +use rustc_type_ir::inherent::{Const as _, IntoKind as _, SliceLike, Ty as _}; use rustc_type_ir::{ BoundVar, CanonicalQueryInput, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs index 6c7a87ef52494..64287fe47261d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs @@ -8,22 +8,11 @@ use crate::next_solver::BoundConst; use crate::next_solver::{ - AliasTy, Binder, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Const, DbInterner, Goal, - ParamEnv, Predicate, PredicateKind, Region, Ty, TyKind, - fold::FnMutDelegate, - infer::{ - DefineOpaqueTypes, InferCtxt, TypeTrace, - traits::{Obligation, PredicateObligations}, - }, + BoundRegion, BoundTy, Canonical, CanonicalVarValues, DbInterner, fold::FnMutDelegate, }; use rustc_type_ir::{ - AliasRelationDirection, AliasTyKind, BoundVar, GenericArgKind, InferTy, TypeFoldable, Upcast, - Variance, + GenericArgKind, TypeFoldable, inherent::{IntoKind, SliceLike}, - relate::{ - Relate, TypeRelation, VarianceDiagInfo, - combine::{super_combine_consts, super_combine_tys}, - }, }; pub trait CanonicalExt<'db, V> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/mod.rs index d0669f5c3bcc5..b3bd0a437b8d8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/mod.rs @@ -22,26 +22,13 @@ //! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html use crate::next_solver::{ - AliasTy, Binder, Canonical, CanonicalVarValues, CanonicalVars, Const, DbInterner, GenericArg, - Goal, ParamEnv, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Predicate, PredicateKind, - Region, Ty, TyKind, - infer::{ - DefineOpaqueTypes, InferCtxt, TypeTrace, - traits::{Obligation, PredicateObligations}, - }, + Canonical, CanonicalVarValues, Const, DbInterner, GenericArg, PlaceholderConst, + PlaceholderRegion, PlaceholderTy, Region, Ty, TyKind, infer::InferCtxt, }; use instantiate::CanonicalExt; use rustc_index::IndexVec; use rustc_type_ir::inherent::IntoKind; -use rustc_type_ir::{ - AliasRelationDirection, AliasTyKind, CanonicalVarKind, InferTy, TypeFoldable, UniverseIndex, - Upcast, Variance, - inherent::{SliceLike, Ty as _}, - relate::{ - Relate, TypeRelation, VarianceDiagInfo, - combine::{super_combine_consts, super_combine_tys}, - }, -}; +use rustc_type_ir::{CanonicalVarKind, InferTy, TypeFoldable, UniverseIndex, inherent::Ty as _}; pub mod canonicalizer; pub mod instantiate; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/context.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/context.rs index 5aa5ad14af551..397986e2edd3b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/context.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/context.rs @@ -1,19 +1,19 @@ //! Definition of `InferCtxtLike` from the librarified type layer. use rustc_type_ir::{ - ConstVid, FloatVarValue, FloatVid, GenericArgKind, InferConst, InferTy, IntTy, IntVarValue, - IntVid, RegionVid, TyVid, TypeFoldable, TypingMode, UniverseIndex, - inherent::{Const as _, IntoKind, Span as _, Ty as _}, + ConstVid, FloatVarValue, FloatVid, GenericArgKind, InferConst, InferTy, IntVarValue, IntVid, + RegionVid, TyVid, TypeFoldable, TypingMode, UniverseIndex, + inherent::{Const as _, IntoKind, Ty as _}, relate::combine::PredicateEmittingRelation, }; use crate::next_solver::{ - Binder, Const, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, OpaqueTypeKey, ParamEnv, - Region, SolverDefId, Span, Ty, TyKind, + Binder, Const, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, OpaqueTypeKey, Region, + SolverDefId, Span, Ty, TyKind, infer::opaque_types::{OpaqueHiddenType, table::OpaqueTypeStorageEntries}, }; -use super::{BoundRegionConversionTime, InferCtxt, relate::RelateResult, traits::ObligationCause}; +use super::{BoundRegionConversionTime, InferCtxt, relate::RelateResult}; impl<'db> rustc_type_ir::InferCtxtLike for InferCtxt<'db> { type Interner = DbInterner<'db>; @@ -250,16 +250,16 @@ impl<'db> rustc_type_ir::InferCtxtLike for InferCtxt<'db> { self.probe(|_| probe()) } - fn sub_regions(&self, sub: Region<'db>, sup: Region<'db>, span: Span) { + fn sub_regions(&self, sub: Region<'db>, sup: Region<'db>, _span: Span) { self.inner.borrow_mut().unwrap_region_constraints().make_subregion(sub, sup); } - fn equate_regions(&self, a: Region<'db>, b: Region<'db>, span: Span) { + fn equate_regions(&self, a: Region<'db>, b: Region<'db>, _span: Span) { self.inner.borrow_mut().unwrap_region_constraints().make_eqregion(a, b); } - fn register_ty_outlives(&self, ty: Ty<'db>, r: Region<'db>, span: Span) { - //self.register_region_obligation_with_cause(ty, r, &ObligationCause::dummy_with_span(Span::dummy())); + fn register_ty_outlives(&self, _ty: Ty<'db>, _r: Region<'db>, _span: Span) { + // self.register_type_outlives_constraint(ty, r, &ObligationCause::dummy()); } type OpaqueTypeStorageEntries = OpaqueTypeStorageEntries; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs index e1a46fa0694c4..36c6c48c5a0b7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/mod.rs @@ -6,32 +6,23 @@ use std::ops::Range; use std::sync::Arc; pub use BoundRegionConversionTime::*; -pub use at::DefineOpaqueTypes; -use ena::undo_log::UndoLogs; use ena::unify as ut; use hir_def::GenericParamId; use hir_def::lang_item::LangItem; -use intern::Symbol; use opaque_types::{OpaqueHiddenType, OpaqueTypeStorage}; -use region_constraints::{ - GenericKind, RegionConstraintCollector, RegionConstraintStorage, UndoLog, VarInfos, VerifyBound, -}; -pub use relate::StructurallyRelateAliases; -pub use relate::combine::PredicateEmittingRelation; -use rustc_hash::{FxHashMap, FxHashSet}; +use region_constraints::{RegionConstraintCollector, RegionConstraintStorage}; use rustc_next_trait_solver::solve::SolverDelegateEvalExt; use rustc_pattern_analysis::Captures; +use rustc_type_ir::TypeFoldable; use rustc_type_ir::error::{ExpectedFound, TypeError}; use rustc_type_ir::inherent::{ - Const as _, GenericArg as _, GenericArgs as _, IntoKind, ParamEnv as _, SliceLike, Term as _, - Ty as _, + Const as _, GenericArg as _, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, }; use rustc_type_ir::{ - BoundVar, ClosureKind, ConstVid, FloatTy, FloatVarValue, FloatVid, GenericArgKind, InferConst, - InferTy, IntTy, IntVarValue, IntVid, OutlivesPredicate, RegionVid, TyVid, UniverseIndex, + ClosureKind, ConstVid, FloatVarValue, FloatVid, GenericArgKind, InferConst, InferTy, + IntVarValue, IntVid, OutlivesPredicate, RegionVid, TyVid, UniverseIndex, }; use rustc_type_ir::{TermKind, TypeVisitableExt}; -use rustc_type_ir::{TypeFoldable, TypeFolder, TypeSuperFoldable}; use snapshot::undo_log::InferCtxtUndoLogs; use tracing::{debug, instrument}; use traits::{ObligationCause, PredicateObligations}; @@ -39,19 +30,17 @@ use type_variable::TypeVariableOrigin; use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey}; use crate::next_solver::fold::BoundVarReplacerDelegate; -use crate::next_solver::infer::opaque_types::table::OpaqueTypeStorageEntries; use crate::next_solver::infer::select::EvaluationResult; use crate::next_solver::infer::traits::PredicateObligation; use crate::next_solver::obligation_ctxt::ObligationCtxt; use crate::next_solver::{BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, SolverContext}; -use super::generics::GenericParamDef; use super::{ - AliasTerm, Binder, BoundRegionKind, CanonicalQueryInput, CanonicalVarValues, Const, ConstKind, - DbInterner, ErrorGuaranteed, FxIndexMap, GenericArg, GenericArgs, OpaqueTypeKey, ParamEnv, - PlaceholderRegion, PolyCoercePredicate, PolyExistentialProjection, PolyExistentialTraitRef, - PolyFnSig, PolyRegionOutlivesPredicate, PolySubtypePredicate, Predicate, Region, SolverDefId, - SubtypePredicate, Term, TraitPredicate, TraitRef, Ty, TyKind, TypingMode, + AliasTerm, Binder, CanonicalQueryInput, CanonicalVarValues, Const, ConstKind, DbInterner, + ErrorGuaranteed, GenericArg, GenericArgs, OpaqueTypeKey, ParamEnv, PolyCoercePredicate, + PolyExistentialProjection, PolyExistentialTraitRef, PolyFnSig, PolyRegionOutlivesPredicate, + PolySubtypePredicate, Region, SolverDefId, SubtypePredicate, Term, TraitRef, Ty, TyKind, + TypingMode, }; pub mod at; @@ -82,8 +71,6 @@ pub struct InferOk<'db, T> { } pub type InferResult<'db, T> = Result, TypeError>>; -pub(crate) type FixupResult = Result; // "fixup result" - pub(crate) type UnificationTable<'a, 'db, T> = ut::UnificationTable< ut::InPlace, &'a mut InferCtxtUndoLogs<'db>>, >; @@ -440,6 +427,7 @@ impl<'db> InferCtxt<'db> { /// check::<&'_ T>(); /// } /// ``` + #[expect(dead_code, reason = "this is used in rustc")] fn predicate_must_hold_considering_regions( &self, obligation: &PredicateObligation<'db>, @@ -452,14 +440,13 @@ impl<'db> InferCtxt<'db> { /// not entirely accurate if inference variables are involved. /// /// This version ignores all outlives constraints. + #[expect(dead_code, reason = "this is used in rustc")] fn predicate_must_hold_modulo_regions(&self, obligation: &PredicateObligation<'db>) -> bool { self.evaluate_obligation(obligation).must_apply_modulo_regions() } /// Evaluate a given predicate, capturing overflow and propagating it back. fn evaluate_obligation(&self, obligation: &PredicateObligation<'db>) -> EvaluationResult { - let param_env = obligation.param_env; - self.probe(|snapshot| { let mut ocx = ObligationCtxt::new(self); ocx.register_obligation(obligation.clone()); @@ -583,16 +570,16 @@ impl<'db> InferCtxt<'db> { self.enter_forall(predicate, |SubtypePredicate { a_is_expected, a, b }| { if a_is_expected { - Ok(self.at(cause, param_env).sub(DefineOpaqueTypes::Yes, a, b)) + Ok(self.at(cause, param_env).sub(a, b)) } else { - Ok(self.at(cause, param_env).sup(DefineOpaqueTypes::Yes, b, a)) + Ok(self.at(cause, param_env).sup(b, a)) } }) } pub fn region_outlives_predicate( &self, - cause: &traits::ObligationCause, + _cause: &traits::ObligationCause, predicate: PolyRegionOutlivesPredicate<'db>, ) { self.enter_forall(predicate, |OutlivesPredicate(r_a, r_b)| { @@ -632,7 +619,7 @@ impl<'db> InferCtxt<'db> { } pub fn next_const_var(&self) -> Const<'db> { - self.next_const_var_with_origin(ConstVariableOrigin { param_def_id: None }) + self.next_const_var_with_origin(ConstVariableOrigin {}) } pub fn next_const_vid(&self) -> ConstVid { @@ -640,7 +627,7 @@ impl<'db> InferCtxt<'db> { .borrow_mut() .const_unification_table() .new_key(ConstVariableValue::Unknown { - origin: ConstVariableOrigin { param_def_id: None }, + origin: ConstVariableOrigin {}, universe: self.universe(), }) .vid @@ -657,7 +644,7 @@ impl<'db> InferCtxt<'db> { } pub fn next_const_var_in_universe(&self, universe: UniverseIndex) -> Const<'db> { - let origin = ConstVariableOrigin { param_def_id: None }; + let origin = ConstVariableOrigin {}; let vid = self .inner .borrow_mut() @@ -738,7 +725,7 @@ impl<'db> InferCtxt<'db> { self.next_region_var_in_universe(universe) } - fn var_for_def(&self, id: GenericParamId, name: &Symbol) -> GenericArg<'db> { + fn var_for_def(&self, id: GenericParamId) -> GenericArg<'db> { match id { GenericParamId::LifetimeParamId(_) => { // Create a region inference variable for the given @@ -763,7 +750,7 @@ impl<'db> InferCtxt<'db> { Ty::new_var(self.interner, ty_var_id).into() } GenericParamId::ConstParamId(_) => { - let origin = ConstVariableOrigin { param_def_id: None }; + let origin = ConstVariableOrigin {}; let const_var_id = self .inner .borrow_mut() @@ -778,9 +765,7 @@ impl<'db> InferCtxt<'db> { /// Given a set of generics defined on a type or impl, returns the generic parameters mapping /// each type/region parameter to a fresh inference variable. pub fn fresh_args_for_item(&self, def_id: SolverDefId) -> GenericArgs<'db> { - GenericArgs::for_item(self.interner, def_id, |name, index, kind, _| { - self.var_for_def(kind, name) - }) + GenericArgs::for_item(self.interner, def_id, |_index, kind, _| self.var_for_def(kind)) } /// Like `fresh_args_for_item()`, but first uses the args from `first`. @@ -789,8 +774,8 @@ impl<'db> InferCtxt<'db> { def_id: SolverDefId, first: impl IntoIterator>, ) -> GenericArgs<'db> { - GenericArgs::fill_rest(self.interner, def_id, first, |name, index, kind, _| { - self.var_for_def(kind, name) + GenericArgs::fill_rest(self.interner, def_id, first, |_index, kind, _| { + self.var_for_def(kind) }) } @@ -828,8 +813,8 @@ impl<'db> InferCtxt<'db> { defining_opaque_types_and_generators.contains(&id.into()) } TypingMode::Coherence | TypingMode::PostAnalysis => false, - TypingMode::Borrowck { defining_opaque_types } => unimplemented!(), - TypingMode::PostBorrowckAnalysis { defined_opaque_types } => unimplemented!(), + TypingMode::Borrowck { defining_opaque_types: _ } => unimplemented!(), + TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ } => unimplemented!(), } } @@ -998,7 +983,7 @@ impl<'db> InferCtxt<'db> { // use [`InferCtxt::enter_forall`] instead. pub fn instantiate_binder_with_fresh_vars( &self, - lbrct: BoundRegionConversionTime, + _lbrct: BoundRegionConversionTime, value: Binder<'db, T>, ) -> T where @@ -1014,7 +999,7 @@ impl<'db> InferCtxt<'db> { for bound_var_kind in bound_vars { let arg: GenericArg<'db> = match bound_var_kind { BoundVarKind::Ty(_) => self.next_ty_var().into(), - BoundVarKind::Region(br) => self.next_region_var().into(), + BoundVarKind::Region(_) => self.next_region_var().into(), BoundVarKind::Const => self.next_const_var().into(), }; args.push(arg); @@ -1070,7 +1055,7 @@ impl<'db> InferCtxt<'db> { #[inline] pub fn is_ty_infer_var_definitely_unchanged<'a>( &'a self, - ) -> (impl Fn(TyOrConstInferVar) -> bool + Captures<'db> + 'a) { + ) -> impl Fn(TyOrConstInferVar) -> bool + Captures<'db> + 'a { // This hoists the borrow/release out of the loop body. let inner = self.inner.try_borrow(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/mod.rs index 0f68ec8cdb5b4..06d998488e15b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/mod.rs @@ -1,40 +1,10 @@ //! Things related to the infer context of the next-trait-solver. -use std::sync::Arc; - -use tracing::{debug, instrument}; - -use crate::next_solver::{ - Clause, ClauseKind, FxIndexMap, GenericArgs, OpaqueTypeKey, ProjectionPredicate, SolverDefId, - TypingMode, util::BottomUpFolder, -}; - pub(crate) mod table; pub(crate) use table::{OpaqueTypeStorage, OpaqueTypeTable}; -use crate::next_solver::{ - AliasTy, Binder, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Const, DbInterner, Goal, - ParamEnv, Predicate, PredicateKind, Region, Ty, TyKind, - fold::FnMutDelegate, - infer::{ - DefineOpaqueTypes, InferCtxt, TypeTrace, - traits::{Obligation, PredicateObligations}, - }, -}; -use rustc_type_ir::{ - AliasRelationDirection, AliasTyKind, BoundConstness, BoundVar, Flags, GenericArgKind, InferTy, - Interner, RegionKind, TypeFlags, TypeFoldable, TypeSuperVisitable, TypeVisitable, - TypeVisitableExt, TypeVisitor, Upcast, Variance, - error::{ExpectedFound, TypeError}, - inherent::{DefId, GenericArgs as _, IntoKind, SliceLike}, - relate::{ - Relate, TypeRelation, VarianceDiagInfo, - combine::{super_combine_consts, super_combine_tys}, - }, -}; - -use super::{InferOk, traits::ObligationCause}; +use crate::next_solver::{OpaqueTypeKey, Ty, infer::InferCtxt}; #[derive(Copy, Clone, Debug)] pub struct OpaqueHiddenType<'db> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs index 8ab409d782813..0f8b23870fd02 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/opaque_types/table.rs @@ -54,7 +54,7 @@ impl<'db> OpaqueTypeStorage<'db> { assert!(entry.is_some()); } - pub fn is_empty(&self) -> bool { + pub(crate) fn is_empty(&self) -> bool { let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; opaque_types.is_empty() && duplicate_entries.is_empty() } @@ -66,14 +66,14 @@ impl<'db> OpaqueTypeStorage<'db> { std::mem::take(opaque_types).into_iter().chain(std::mem::take(duplicate_entries)) } - pub fn num_entries(&self) -> OpaqueTypeStorageEntries { + pub(crate) fn num_entries(&self) -> OpaqueTypeStorageEntries { OpaqueTypeStorageEntries { opaque_types: self.opaque_types.len(), duplicate_entries: self.duplicate_entries.len(), } } - pub fn opaque_types_added_since( + pub(crate) fn opaque_types_added_since( &self, prev_entries: OpaqueTypeStorageEntries, ) -> impl Iterator, OpaqueHiddenType<'db>)> { @@ -89,7 +89,7 @@ impl<'db> OpaqueTypeStorage<'db> { /// /// Outside of canonicalization one should generally use `iter_opaque_types` /// to also consider duplicate entries. - pub fn iter_lookup_table( + pub(crate) fn iter_lookup_table( &self, ) -> impl Iterator, OpaqueHiddenType<'db>)> { self.opaque_types.iter().map(|(k, v)| (*k, *v)) @@ -100,13 +100,13 @@ impl<'db> OpaqueTypeStorage<'db> { /// These have to considered when checking all opaque type uses but are e.g. /// irrelevant for canonical inputs as nested queries never meaningfully /// accesses them. - pub fn iter_duplicate_entries( + pub(crate) fn iter_duplicate_entries( &self, ) -> impl Iterator, OpaqueHiddenType<'db>)> { self.duplicate_entries.iter().copied() } - pub fn iter_opaque_types( + pub(crate) fn iter_opaque_types( &self, ) -> impl Iterator, OpaqueHiddenType<'db>)> { let OpaqueTypeStorage { opaque_types, duplicate_entries } = self; @@ -144,7 +144,7 @@ impl<'db> Deref for OpaqueTypeTable<'_, 'db> { impl<'a, 'db> OpaqueTypeTable<'a, 'db> { #[instrument(skip(self), level = "debug")] - pub fn register( + pub(crate) fn register( &mut self, key: OpaqueTypeKey<'db>, hidden_type: OpaqueHiddenType<'db>, @@ -159,7 +159,11 @@ impl<'a, 'db> OpaqueTypeTable<'a, 'db> { None } - pub fn add_duplicate(&mut self, key: OpaqueTypeKey<'db>, hidden_type: OpaqueHiddenType<'db>) { + pub(crate) fn add_duplicate( + &mut self, + key: OpaqueTypeKey<'db>, + hidden_type: OpaqueHiddenType<'db>, + ) { self.storage.duplicate_entries.push((key, hidden_type)); self.undo_log.push(UndoLog::DuplicateOpaqueType); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs index 7f15a467b3e87..ae5930d55c72d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/region_constraints/mod.rs @@ -1,7 +1,6 @@ //! See `README.md`. use std::ops::Range; -use std::sync::Arc; use std::{cmp, fmt, mem}; use ena::undo_log::{Rollback, UndoLogs}; @@ -18,9 +17,7 @@ use super::MemberConstraint; use super::unify_key::RegionVidKey; use crate::next_solver::infer::snapshot::undo_log::{InferCtxtUndoLogs, Snapshot}; use crate::next_solver::infer::unify_key::RegionVariableValue; -use crate::next_solver::{ - AliasTy, Binder, DbInterner, OpaqueTypeKey, ParamTy, PlaceholderTy, Region, Ty, -}; +use crate::next_solver::{AliasTy, Binder, DbInterner, ParamTy, PlaceholderTy, Region, Ty}; #[derive(Debug, Clone, Default)] pub struct RegionConstraintStorage<'db> { @@ -254,6 +251,7 @@ pub(crate) enum UndoLog<'db> { AddConstraint(usize), /// We added the given `verify`. + #[expect(dead_code, reason = "this is used in rustc")] AddVerify(usize), /// We added a GLB/LUB "combination variable". diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs index 7e2735db3b77a..d06984cac11cb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/generalize.rs @@ -7,8 +7,8 @@ use rustc_type_ir::error::TypeError; use rustc_type_ir::inherent::{Const as _, IntoKind, Ty as _}; use rustc_type_ir::relate::VarianceDiagInfo; use rustc_type_ir::{ - AliasRelationDirection, AliasTyKind, ConstVid, InferConst, InferCtxtLike, InferTy, RegionKind, - TermKind, TyVid, UniverseIndex, Variance, + AliasRelationDirection, ConstVid, InferConst, InferCtxtLike, InferTy, RegionKind, TermKind, + TyVid, UniverseIndex, Variance, }; use rustc_type_ir::{Interner, TypeVisitable, TypeVisitableExt}; use tracing::{debug, instrument, warn}; @@ -21,9 +21,8 @@ use crate::next_solver::infer::unify_key::ConstVariableValue; use crate::next_solver::infer::{InferCtxt, relate}; use crate::next_solver::util::MaxUniverse; use crate::next_solver::{ - AliasTy, Binder, ClauseKind, Const, ConstKind, DbInterner, GenericArgs, PredicateKind, - ProjectionPredicate, Region, SolverDefId, Term, TermVid, Ty, TyKind, TypingMode, - UnevaluatedConst, + AliasTy, Binder, ClauseKind, Const, ConstKind, DbInterner, GenericArgs, PredicateKind, Region, + SolverDefId, Term, TermVid, Ty, TyKind, TypingMode, UnevaluatedConst, }; impl<'db> InferCtxt<'db> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/higher_ranked.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/higher_ranked.rs index 62028e0e70399..c523751e03e32 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/higher_ranked.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/higher_ranked.rs @@ -2,13 +2,10 @@ //! the end of the file for details. use rustc_type_ir::TypeFoldable; -use rustc_type_ir::{BoundVar, UniverseIndex}; use tracing::{debug, instrument}; -use super::RelateResult; use crate::next_solver::fold::FnMutDelegate; use crate::next_solver::infer::InferCtxt; -use crate::next_solver::infer::snapshot::CombinedSnapshot; use crate::next_solver::{ Binder, BoundConst, BoundRegion, BoundTy, Const, DbInterner, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Region, Ty, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs index c7f771ffe37f7..374895c337c78 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/relate/lattice.rs @@ -30,7 +30,7 @@ use crate::next_solver::{ AliasTy, Binder, Const, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Region, Span, Ty, TyKind, infer::{ - DefineOpaqueTypes, InferCtxt, TypeTrace, + InferCtxt, TypeTrace, relate::RelateResult, traits::{Obligation, PredicateObligations}, }, @@ -92,10 +92,7 @@ impl<'db> TypeRelation> for LatticeOp<'_, 'db> { match variance { Variance::Invariant => { self.obligations.extend( - self.infcx - .at(&self.trace.cause, self.param_env) - .eq_trace(DefineOpaqueTypes::Yes, self.trace.clone(), a, b)? - .into_obligations(), + self.infcx.at(&self.trace.cause, self.param_env).eq(a, b)?.into_obligations(), ); Ok(a) } @@ -213,12 +210,12 @@ impl<'infcx, 'db> LatticeOp<'infcx, 'db> { let at = self.infcx.at(&self.trace.cause, self.param_env); match self.kind { LatticeOpKind::Glb => { - self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, a)?.into_obligations()); - self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, b)?.into_obligations()); + self.obligations.extend(at.sub(v, a)?.into_obligations()); + self.obligations.extend(at.sub(v, b)?.into_obligations()); } LatticeOpKind::Lub => { - self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, a, v)?.into_obligations()); - self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, b, v)?.into_obligations()); + self.obligations.extend(at.sub(a, v)?.into_obligations()); + self.obligations.extend(at.sub(b, v)?.into_obligations()); } } Ok(()) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/resolve.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/resolve.rs index 4bd3fbd4985d0..b6e5225e5a7e7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/resolve.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/resolve.rs @@ -1,15 +1,14 @@ //! Things for resolving vars in the infer context of the next-trait-solver. use rustc_type_ir::{ - ConstKind, FallibleTypeFolder, InferConst, InferTy, RegionKind, TyKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, data_structures::DelayedMap, - inherent::{Const as _, IntoKind, Ty as _}, + inherent::{Const as _, Ty as _}, }; use crate::next_solver::{Const, DbInterner, ErrorGuaranteed, Region, Ty}; -use super::{FixupError, FixupResult, InferCtxt}; +use super::InferCtxt; /////////////////////////////////////////////////////////////////////////// // OPPORTUNISTIC VAR RESOLVER diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs index 79b0a2933236b..d2f584b38cf47 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs @@ -1,3 +1,5 @@ +#![expect(dead_code, reason = "this is used by rustc")] + use std::ops::ControlFlow; use hir_def::{ImplId, TraitId}; @@ -61,7 +63,7 @@ pub enum NotConstEvaluatable { /// so they are noops when unioned with a definite error, and within /// the categories it's easy to see that the unions are correct. #[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] -pub enum EvaluationResult { +pub(crate) enum EvaluationResult { /// Evaluation successful. EvaluatedToOk, /// Evaluation successful, but there were unevaluated region obligations. @@ -91,17 +93,17 @@ pub enum EvaluationResult { impl EvaluationResult { /// Returns `true` if this evaluation result is known to apply, even /// considering outlives constraints. - pub fn must_apply_considering_regions(self) -> bool { + pub(crate) fn must_apply_considering_regions(self) -> bool { self == EvaluatedToOk } /// Returns `true` if this evaluation result is known to apply, ignoring /// outlives constraints. - pub fn must_apply_modulo_regions(self) -> bool { + pub(crate) fn must_apply_modulo_regions(self) -> bool { self <= EvaluatedToOkModuloRegions } - pub fn may_apply(self) -> bool { + pub(crate) fn may_apply(self) -> bool { match self { EvaluatedToOkModuloOpaqueTypes | EvaluatedToOk @@ -113,7 +115,7 @@ impl EvaluationResult { } } - pub fn is_stack_dependent(self) -> bool { + pub(crate) fn is_stack_dependent(self) -> bool { match self { EvaluatedToAmbigStackDependent => true, @@ -135,9 +137,9 @@ pub enum OverflowError { #[derive(Clone, Debug, PartialEq, Eq)] pub struct SignatureMismatchData<'db> { - pub found_trait_ref: TraitRef<'db>, - pub expected_trait_ref: TraitRef<'db>, - pub terr: TypeError<'db>, + pub(crate) found_trait_ref: TraitRef<'db>, + pub(crate) expected_trait_ref: TraitRef<'db>, + pub(crate) terr: TypeError<'db>, } /// When performing resolution, it is typically the case that there @@ -147,7 +149,7 @@ pub struct SignatureMismatchData<'db> { /// - `Ok(None)`: could not definitely determine anything, usually due /// to inconclusive type inference. /// - `Err(e)`: error `e` occurred -pub type SelectionResult<'db, T> = Result, SelectionError<'db>>; +pub(crate) type SelectionResult<'db, T> = Result, SelectionError<'db>>; /// Given the successful resolution of an obligation, the `ImplSource` /// indicates where the impl comes from. @@ -179,7 +181,7 @@ pub type SelectionResult<'db, T> = Result, SelectionError<'db>>; /// /// See explanation on `ImplSourceUserDefinedData`. #[derive(Debug, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)] -pub enum ImplSource<'db, N> { +pub(crate) enum ImplSource<'db, N> { /// ImplSource identifying a particular impl. UserDefined(ImplSourceUserDefinedData<'db, N>), @@ -194,28 +196,28 @@ pub enum ImplSource<'db, N> { } impl<'db, N> ImplSource<'db, N> { - pub fn nested_obligations(self) -> Vec { + pub(crate) fn nested_obligations(self) -> Vec { match self { ImplSource::UserDefined(i) => i.nested, ImplSource::Param(n) | ImplSource::Builtin(_, n) => n, } } - pub fn borrow_nested_obligations(&self) -> &[N] { + pub(crate) fn borrow_nested_obligations(&self) -> &[N] { match self { ImplSource::UserDefined(i) => &i.nested, ImplSource::Param(n) | ImplSource::Builtin(_, n) => n, } } - pub fn borrow_nested_obligations_mut(&mut self) -> &mut [N] { + pub(crate) fn borrow_nested_obligations_mut(&mut self) -> &mut [N] { match self { ImplSource::UserDefined(i) => &mut i.nested, ImplSource::Param(n) | ImplSource::Builtin(_, n) => n, } } - pub fn map(self, f: F) -> ImplSource<'db, M> + pub(crate) fn map(self, f: F) -> ImplSource<'db, M> where F: FnMut(N) -> M, { @@ -244,15 +246,15 @@ impl<'db, N> ImplSource<'db, N> { /// is `()`, because codegen only requires a shallow resolution of an /// impl, and nested obligations are satisfied later. #[derive(Debug, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)] -pub struct ImplSourceUserDefinedData<'db, N> { +pub(crate) struct ImplSourceUserDefinedData<'db, N> { #[type_visitable(ignore)] #[type_foldable(identity)] - pub impl_def_id: ImplId, - pub args: GenericArgs<'db>, - pub nested: Vec, + pub(crate) impl_def_id: ImplId, + pub(crate) args: GenericArgs<'db>, + pub(crate) nested: Vec, } -pub type Selection<'db> = ImplSource<'db, PredicateObligation<'db>>; +pub(crate) type Selection<'db> = ImplSource<'db, PredicateObligation<'db>>; impl<'db> InferCtxt<'db> { pub(crate) fn select( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs index 74353574e3298..5902f8043b5ea 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/fudge.rs @@ -41,9 +41,7 @@ fn const_vars_since_snapshot<'db>( range.clone(), iter_idx_range(range) .map(|index| match table.probe_value(index) { - ConstVariableValue::Known { value: _ } => { - ConstVariableOrigin { param_def_id: None } - } + ConstVariableValue::Known { value: _ } => ConstVariableOrigin {}, ConstVariableValue::Unknown { origin, universe: _ } => origin, }) .collect(), @@ -228,7 +226,6 @@ impl<'a, 'db> TypeFolder> for InferenceFudger<'a, 'db> { fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { if let RegionKind::ReVar(vid) = r.kind() { if self.snapshot_vars.region_vars.contains(&vid) { - let idx = vid.index() - self.snapshot_vars.region_vars.start.index(); self.infcx.next_region_var() } else { r diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs index 05a1013b3fbd5..c8ec8da7f31c9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs @@ -1,7 +1,5 @@ //! Snapshotting in the infer ctxt of the next-trait-solver. -use std::marker::PhantomData; - use ena::snapshot_vec as sv; use ena::undo_log::{Rollback, UndoLogs}; use ena::unify as ut; @@ -14,7 +12,6 @@ use crate::next_solver::infer::opaque_types::OpaqueHiddenType; use crate::next_solver::infer::unify_key::ConstVidKey; use crate::next_solver::infer::unify_key::RegionVidKey; use crate::next_solver::infer::{InferCtxtInner, region_constraints, type_variable}; -use crate::traits; pub struct Snapshot { pub(crate) undo_len: usize, @@ -31,6 +28,7 @@ pub(crate) enum UndoLog<'db> { FloatUnificationTable(sv::UndoLog>), RegionConstraintCollector(region_constraints::UndoLog<'db>), RegionUnificationTable(sv::UndoLog>>), + #[expect(dead_code, reason = "this is used in rustc")] PushRegionObligation, } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs index bc905c2e0b958..4f000c24cc73c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/traits.rs @@ -9,17 +9,13 @@ use std::{ use hir_def::TraitId; use macros::{TypeFoldable, TypeVisitable}; +use rustc_type_ir::Upcast; use rustc_type_ir::elaborate::Elaboratable; -use rustc_type_ir::{ - PredicatePolarity, Upcast, - solve::{Certainty, NoSolution}, -}; -use rustc_type_ir::{TypeFoldable, TypeVisitable}; use tracing::debug; use crate::next_solver::{ - Binder, Clause, DbInterner, Goal, ParamEnv, PolyTraitPredicate, Predicate, SolverDefId, Span, - TraitPredicate, TraitRef, Ty, + Clause, DbInterner, Goal, ParamEnv, PolyTraitPredicate, Predicate, Span, TraitPredicate, + TraitRef, Ty, }; use super::InferCtxt; @@ -106,9 +102,9 @@ impl<'db> Elaboratable> for PredicateObligation<'db> { fn child_with_derived_cause( &self, clause: Clause<'db>, - span: Span, - parent_trait_pred: PolyTraitPredicate<'db>, - index: usize, + _span: Span, + _parent_trait_pred: PolyTraitPredicate<'db>, + _index: usize, ) -> Self { let cause = ObligationCause::new(); Obligation { @@ -153,16 +149,16 @@ impl<'db, P> From> for Goal<'db, P> { } } -pub type PredicateObligation<'db> = Obligation<'db, Predicate<'db>>; -pub type TraitObligation<'db> = Obligation<'db, TraitPredicate<'db>>; +pub(crate) type PredicateObligation<'db> = Obligation<'db, Predicate<'db>>; +pub(crate) type TraitObligation<'db> = Obligation<'db, TraitPredicate<'db>>; -pub type PredicateObligations<'db> = Vec>; +pub(crate) type PredicateObligations<'db> = Vec>; impl<'db> PredicateObligation<'db> { /// Flips the polarity of the inner predicate. /// /// Given `T: Trait` predicate it returns `T: !Trait` and given `T: !Trait` returns `T: Trait`. - pub fn flip_polarity(&self, tcx: DbInterner<'db>) -> Option> { + pub fn flip_polarity(&self, _interner: DbInterner<'db>) -> Option> { Some(PredicateObligation { cause: self.cause.clone(), param_env: self.param_env, @@ -215,7 +211,7 @@ impl<'db, O> Obligation<'db, O> { /// `bound` or is not known to meet bound (note that this is /// conservative towards *no impl*, which is the opposite of the /// `evaluate` methods). -pub fn type_known_to_meet_bound_modulo_regions<'tcx>( +pub(crate) fn type_known_to_meet_bound_modulo_regions<'tcx>( infcx: &InferCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: Ty<'tcx>, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/unify_key.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/unify_key.rs index dc913b262a7c2..a09f65f082d97 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/unify_key.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/unify_key.rs @@ -6,18 +6,18 @@ use std::marker::PhantomData; use ena::unify::{NoError, UnifyKey, UnifyValue}; use rustc_type_ir::{ConstVid, RegionKind, RegionVid, UniverseIndex, inherent::IntoKind}; -use crate::next_solver::{Const, Region, SolverDefId, Ty}; +use crate::next_solver::{Const, Region}; #[derive(Clone, Debug)] -pub enum RegionVariableValue<'db> { +pub(crate) enum RegionVariableValue<'db> { Known { value: Region<'db> }, Unknown { universe: UniverseIndex }, } #[derive(PartialEq, Copy, Clone, Debug)] -pub struct RegionVidKey<'db> { - pub vid: RegionVid, - pub phantom: PhantomData>, +pub(crate) struct RegionVidKey<'db> { + pub(crate) vid: RegionVid, + pub(crate) phantom: PhantomData>, } impl<'db> From for RegionVidKey<'db> { @@ -41,7 +41,7 @@ impl<'db> UnifyKey for RegionVidKey<'db> { } } -pub struct RegionUnificationError; +pub(crate) struct RegionUnificationError; impl<'db> UnifyValue for RegionVariableValue<'db> { type Error = RegionUnificationError; @@ -90,15 +90,10 @@ impl<'db> UnifyValue for RegionVariableValue<'db> { // Generic consts. #[derive(Copy, Clone, Debug)] -pub struct ConstVariableOrigin { - /// `DefId` of the const parameter this was instantiated for, if any. - /// - /// This should only be used for diagnostics. - pub param_def_id: Option, -} +pub struct ConstVariableOrigin {} #[derive(Clone, Debug)] -pub enum ConstVariableValue<'db> { +pub(crate) enum ConstVariableValue<'db> { Known { value: Const<'db> }, Unknown { origin: ConstVariableOrigin, universe: UniverseIndex }, } @@ -106,7 +101,7 @@ pub enum ConstVariableValue<'db> { impl<'db> ConstVariableValue<'db> { /// If this value is known, returns the const it is known to be. /// Otherwise, `None`. - pub fn known(&self) -> Option> { + pub(crate) fn known(&self) -> Option> { match self { ConstVariableValue::Unknown { .. } => None, ConstVariableValue::Known { value } => Some(*value), @@ -115,9 +110,9 @@ impl<'db> ConstVariableValue<'db> { } #[derive(PartialEq, Copy, Clone, Debug)] -pub struct ConstVidKey<'db> { - pub vid: ConstVid, - pub phantom: PhantomData>, +pub(crate) struct ConstVidKey<'db> { + pub(crate) vid: ConstVid, + pub(crate) phantom: PhantomData>, } impl<'db> From for ConstVidKey<'db> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs index 0db4746721752..d66aa9f277c73 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/inspect.rs @@ -1,4 +1,4 @@ -pub use rustc_next_trait_solver::solve::inspect::*; +pub(crate) use rustc_next_trait_solver::solve::inspect::*; use rustc_ast_ir::try_visit; use rustc_next_trait_solver::{ @@ -23,11 +23,11 @@ use crate::next_solver::{ obligation_ctxt::ObligationCtxt, }; -pub struct InspectConfig { - pub max_depth: usize, +pub(crate) struct InspectConfig { + pub(crate) max_depth: usize, } -pub struct InspectGoal<'a, 'db> { +pub(crate) struct InspectGoal<'a, 'db> { infcx: &'a SolverContext<'db>, depth: usize, orig_values: Vec>, @@ -103,7 +103,7 @@ impl<'db> NormalizesToTermHack<'db> { } } -pub struct InspectCandidate<'a, 'db> { +pub(crate) struct InspectCandidate<'a, 'db> { goal: &'a InspectGoal<'a, 'db>, kind: inspect::ProbeKind>, steps: Vec<&'a inspect::ProbeStep>>, @@ -113,15 +113,15 @@ pub struct InspectCandidate<'a, 'db> { } impl<'a, 'db> InspectCandidate<'a, 'db> { - pub fn kind(&self) -> inspect::ProbeKind> { + pub(crate) fn kind(&self) -> inspect::ProbeKind> { self.kind } - pub fn result(&self) -> Result { + pub(crate) fn result(&self) -> Result { self.result.map(|c| c.value.certainty) } - pub fn goal(&self) -> &'a InspectGoal<'a, 'db> { + pub(crate) fn goal(&self) -> &'a InspectGoal<'a, 'db> { self.goal } @@ -133,14 +133,17 @@ impl<'a, 'db> InspectCandidate<'a, 'db> { /// /// This is *not* the certainty of the candidate's full nested evaluation, which /// can be accessed with [`Self::result`] instead. - pub fn shallow_certainty(&self) -> Certainty { + pub(crate) fn shallow_certainty(&self) -> Certainty { self.shallow_certainty } /// Visit all nested goals of this candidate without rolling /// back their inference constraints. This function modifies /// the state of the `infcx`. - pub fn visit_nested_no_probe>(&self, visitor: &mut V) -> V::Result { + pub(crate) fn visit_nested_no_probe>( + &self, + visitor: &mut V, + ) -> V::Result { for goal in self.instantiate_nested_goals() { try_visit!(goal.visit_with(visitor)); } @@ -152,7 +155,7 @@ impl<'a, 'db> InspectCandidate<'a, 'db> { /// inference constraints. This function modifies the state of the `infcx`. /// /// See [`Self::instantiate_impl_args`] if you need the impl args too. - pub fn instantiate_nested_goals(&self) -> Vec> { + pub(crate) fn instantiate_nested_goals(&self) -> Vec> { let infcx = self.goal.infcx; let param_env = self.goal.goal.param_env; let mut orig_values = self.goal.orig_values.to_vec(); @@ -200,7 +203,7 @@ impl<'a, 'db> InspectCandidate<'a, 'db> { /// Instantiate the args of an impl if this candidate came from a /// `CandidateSource::Impl`. This function modifies the state of the /// `infcx`. - pub fn instantiate_impl_args(&self) -> GenericArgs<'db> { + pub(crate) fn instantiate_impl_args(&self) -> GenericArgs<'db> { let infcx = self.goal.infcx; let param_env = self.goal.goal.param_env; let mut orig_values = self.goal.orig_values.to_vec(); @@ -241,7 +244,7 @@ impl<'a, 'db> InspectCandidate<'a, 'db> { panic!("expected impl args probe step for `instantiate_impl_args`"); } - pub fn instantiate_proof_tree_for_nested_goal( + pub(crate) fn instantiate_proof_tree_for_nested_goal( &self, source: GoalSource, goal: Goal<'db, Predicate<'db>>, @@ -307,29 +310,33 @@ impl<'a, 'db> InspectCandidate<'a, 'db> { /// Visit all nested goals of this candidate, rolling back /// all inference constraints. - pub fn visit_nested_in_probe>(&self, visitor: &mut V) -> V::Result { + #[expect(dead_code, reason = "used in rustc")] + pub(crate) fn visit_nested_in_probe>( + &self, + visitor: &mut V, + ) -> V::Result { self.goal.infcx.probe(|_| self.visit_nested_no_probe(visitor)) } } impl<'a, 'db> InspectGoal<'a, 'db> { - pub fn infcx(&self) -> &'a InferCtxt<'db> { + pub(crate) fn infcx(&self) -> &'a InferCtxt<'db> { self.infcx } - pub fn goal(&self) -> Goal<'db, Predicate<'db>> { + pub(crate) fn goal(&self) -> Goal<'db, Predicate<'db>> { self.goal } - pub fn result(&self) -> Result { + pub(crate) fn result(&self) -> Result { self.result } - pub fn source(&self) -> GoalSource { + pub(crate) fn source(&self) -> GoalSource { self.source } - pub fn depth(&self) -> usize { + pub(crate) fn depth(&self) -> usize { self.depth } @@ -405,7 +412,7 @@ impl<'a, 'db> InspectGoal<'a, 'db> { } } - pub fn candidates(&'a self) -> Vec> { + pub(crate) fn candidates(&'a self) -> Vec> { let mut candidates = vec![]; let mut nested_goals = vec![]; self.candidates_recur(&mut candidates, &mut nested_goals, &self.final_revision); @@ -415,7 +422,7 @@ impl<'a, 'db> InspectGoal<'a, 'db> { /// Returns the single candidate applicable for the current goal, if it exists. /// /// Returns `None` if there are either no or multiple applicable candidates. - pub fn unique_applicable_candidate(&'a self) -> Option> { + pub(crate) fn unique_applicable_candidate(&'a self) -> Option> { // FIXME(-Znext-solver): This does not handle impl candidates // hidden by env candidates. let mut candidates = self.candidates(); @@ -467,7 +474,7 @@ impl<'a, 'db> InspectGoal<'a, 'db> { } /// The public API to interact with proof trees. -pub trait ProofTreeVisitor<'db> { +pub(crate) trait ProofTreeVisitor<'db> { type Result: VisitorResult; fn config(&self) -> InspectConfig { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index 7be891106df33..331bcdcb26d38 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -1,72 +1,48 @@ //! Things related to the Interner in the next-trait-solver. -#![allow(unused)] // FIXME(next-solver): Remove this. use std::{fmt, ops::ControlFlow}; pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db}; use base_db::Crate; -use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variances}; use hir_def::{ - AdtId, AttrDefId, BlockId, CallableDefId, EnumVariantId, GenericDefId, ItemContainerId, Lookup, - StructId, TypeAliasId, UnionId, VariantId, + AdtId, AttrDefId, BlockId, CallableDefId, EnumVariantId, ItemContainerId, StructId, UnionId, + VariantId, lang_item::LangItem, signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags}, }; -use intern::sym::non_exhaustive; -use intern::{Interned, impl_internable, sym}; use la_arena::Idx; -use rustc_abi::{Align, ReprFlags, ReprOptions}; -use rustc_ast_ir::visit::VisitorResult; +use rustc_abi::{ReprFlags, ReprOptions}; use rustc_hash::FxHashSet; -use rustc_index::{IndexVec, bit_set::DenseBitSet}; +use rustc_index::bit_set::DenseBitSet; use rustc_type_ir::{ - AliasTerm, AliasTermKind, AliasTy, AliasTyKind, BoundVar, CollectAndApply, DebruijnIndex, - EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, - ProjectionPredicate, RegionKind, TermKind, TraitPredicate, TraitRef, TypeVisitableExt, - UniverseIndex, Upcast, Variance, WithCachedTypeInfo, - elaborate::{self, elaborate}, + AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, DebruijnIndex, EarlyBinder, + FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, TraitRef, TypeVisitableExt, + UniverseIndex, Upcast, Variance, + elaborate::elaborate, error::TypeError, - inherent::{ - self, AdtDef as _, Const as _, GenericArgs as _, GenericsOf, IntoKind, ParamEnv as _, - Region as _, SliceLike as _, Span as _, Ty as _, - }, - ir_print, + inherent::{self, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _}, lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem}, - relate, solve::SizedTraitKind, }; -use salsa::plumbing::AsId; -use smallvec::{SmallVec, smallvec}; -use syntax::ast::SelfParamKind; -use tracing::debug; -use triomphe::Arc; use crate::{ - ConstScalar, FnAbi, Interner, + FnAbi, db::HirDatabase, - lower_nextsolver::{self, TyLoweringContext}, method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint}, next_solver::{ AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper, CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, InternedWrapperNoDebug, RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper, - TypingMode, - infer::{ - DbInternerInferExt, InferCtxt, - traits::{Obligation, ObligationCause}, - }, - obligation_ctxt::ObligationCtxt, util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls}, }, }; use super::{ - Binder, BoundExistentialPredicate, BoundExistentialPredicates, BoundTy, BoundTyKind, Clause, - ClauseKind, Clauses, Const, ConstKind, ErrorGuaranteed, ExprConst, ExternalConstraints, - ExternalConstraintsData, GenericArg, GenericArgs, InternedClausesWrapper, ParamConst, ParamEnv, - ParamTy, PlaceholderConst, PlaceholderTy, PredefinedOpaques, PredefinedOpaquesData, Predicate, - PredicateKind, SolverDefId, Term, Ty, TyKind, Tys, Valtree, ValueConst, + Binder, BoundExistentialPredicates, BoundTy, BoundTyKind, Clause, ClauseKind, Clauses, Const, + ErrorGuaranteed, ExprConst, ExternalConstraints, GenericArg, GenericArgs, ParamConst, ParamEnv, + ParamTy, PlaceholderConst, PlaceholderTy, PredefinedOpaques, Predicate, SolverDefId, Term, Ty, + TyKind, Tys, Valtree, ValueConst, abi::Safety, fold::{BoundVarReplacer, BoundVarReplacerDelegate, FnMutDelegate}, generics::{Generics, generics}, @@ -631,7 +607,6 @@ impl<'db> inherent::AdtDef> for AdtDef { self, interner: DbInterner<'db>, ) -> Option, Ty<'db>>> { - let db = interner.db(); let hir_def::AdtId::StructId(struct_id) = self.inner().id else { return None; }; @@ -647,23 +622,10 @@ impl<'db> inherent::AdtDef> for AdtDef { ) -> EarlyBinder, impl IntoIterator>> { let db = interner.db(); // FIXME: this is disabled just to match the behavior with chalk right now - let field_tys = |id: VariantId| { - let variant_data = id.fields(db); - let fields = if variant_data.fields().is_empty() { - vec![] - } else { - let field_types = db.field_types_ns(id); - variant_data - .fields() - .iter() - .map(|(idx, _)| { - let ty = field_types[idx]; - ty.skip_binder() - }) - .collect() - }; + let _field_tys = |id: VariantId| { + db.field_types_ns(id).iter().map(|(_, ty)| ty.skip_binder()).collect::>() }; - let field_tys = |id: VariantId| vec![]; + let field_tys = |_id: VariantId| vec![]; let tys: Vec<_> = match self.inner().id { hir_def::AdtId::StructId(id) => field_tys(id.into()), hir_def::AdtId::UnionId(id) => field_tys(id.into()), @@ -696,7 +658,7 @@ impl<'db> inherent::AdtDef> for AdtDef { fn destructor( self, - interner: DbInterner<'db>, + _interner: DbInterner<'db>, ) -> Option { // FIXME(next-solver) None @@ -742,7 +704,7 @@ impl<'db> inherent::Features> for Features { false } - fn feature_bound_holds_in_crate(self, symbol: ()) -> bool { + fn feature_bound_holds_in_crate(self, _symbol: ()) -> bool { false } } @@ -1002,7 +964,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { fn mk_tracked( self, data: T, - dep_node: Self::DepNodeIndex, + _dep_node: Self::DepNodeIndex, ) -> Self::Tracked { Tracked(data) } @@ -1024,7 +986,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { fn canonical_param_env_cache_get_or_insert( self, - param_env: Self::ParamEnv, + _param_env: Self::ParamEnv, f: impl FnOnce() -> rustc_type_ir::CanonicalParamEnvCacheEntry, from_entry: impl FnOnce(&rustc_type_ir::CanonicalParamEnvCacheEntry) -> R, ) -> R { @@ -1162,17 +1124,17 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { (TraitRef::new_from_args(self, trait_def_id.try_into().unwrap(), trait_args), alias_args) } - fn check_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs) -> bool { + fn check_args_compatible(self, _def_id: Self::DefId, _args: Self::GenericArgs) -> bool { // FIXME true } - fn debug_assert_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs) {} + fn debug_assert_args_compatible(self, _def_id: Self::DefId, _args: Self::GenericArgs) {} fn debug_assert_existential_args_compatible( self, - def_id: Self::DefId, - args: Self::GenericArgs, + _def_id: Self::DefId, + _args: Self::GenericArgs, ) { } @@ -1240,11 +1202,11 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { self.db().callable_item_signature(def_id.0) } - fn coroutine_movability(self, def_id: Self::CoroutineId) -> rustc_ast_ir::Movability { + fn coroutine_movability(self, _def_id: Self::CoroutineId) -> rustc_ast_ir::Movability { unimplemented!() } - fn coroutine_for_closure(self, def_id: Self::CoroutineId) -> Self::CoroutineId { + fn coroutine_for_closure(self, _def_id: Self::CoroutineId) -> Self::CoroutineId { unimplemented!() } @@ -1421,9 +1383,10 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { }) } + #[expect(unreachable_code)] fn const_conditions( self, - def_id: Self::DefId, + _def_id: Self::DefId, ) -> EarlyBinder< Self, impl IntoIterator>>, @@ -1431,7 +1394,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { EarlyBinder::bind([unimplemented!()]) } - fn has_target_features(self, def_id: Self::FunctionId) -> bool { + fn has_target_features(self, _def_id: Self::FunctionId) -> bool { false } @@ -1462,7 +1425,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { hir_def::lang_item::LangItemTarget::Union(union_id) => union_id.into(), hir_def::lang_item::LangItemTarget::TypeAlias(type_alias_id) => type_alias_id.into(), hir_def::lang_item::LangItemTarget::Trait(trait_id) => trait_id.into(), - hir_def::lang_item::LangItemTarget::EnumVariant(enum_variant_id) => unimplemented!(), + hir_def::lang_item::LangItemTarget::EnumVariant(_) => unimplemented!(), } } @@ -1552,7 +1515,6 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { CoroutineReturn, CoroutineYield, FutureOutput, - AsyncFnOnceOutput, CallRefFuture, CallOnceFuture, AsyncFnOnceOutput, @@ -1596,7 +1558,6 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { AsyncFnMut, AsyncFnOnce, AsyncFnOnceOutput, - AsyncFnOnceOutput, ) } @@ -1636,7 +1597,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { }; if fps.is_empty() { - for_trait_impls( + _ = for_trait_impls( self.db(), self.krate.expect("Must have self.krate"), self.block, @@ -1658,7 +1619,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { }, ); } else { - for_trait_impls( + _ = for_trait_impls( self.db(), self.krate.expect("Must have self.krate"), self.block, @@ -1698,7 +1659,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { } } - fn has_item_definition(self, def_id: Self::DefId) -> bool { + fn has_item_definition(self, _def_id: Self::DefId) -> bool { // FIXME(next-solver): should check if the associated item has a value. true } @@ -1746,13 +1707,13 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { trait_data.flags.contains(TraitFlags::FUNDAMENTAL) } - fn trait_may_be_implemented_via_object(self, trait_def_id: Self::TraitId) -> bool { + fn trait_may_be_implemented_via_object(self, _trait_def_id: Self::TraitId) -> bool { // FIXME(next-solver): should check the `TraitFlags` for // the `#[rustc_do_not_implement_via_object]` flag true } - fn is_impl_trait_in_trait(self, def_id: Self::DefId) -> bool { + fn is_impl_trait_in_trait(self, _def_id: Self::DefId) -> bool { // FIXME(next-solver) false } @@ -1761,22 +1722,22 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { panic!("Bug encountered in next-trait-solver: {}", msg.to_string()) } - fn is_general_coroutine(self, coroutine_def_id: Self::CoroutineId) -> bool { + fn is_general_coroutine(self, _coroutine_def_id: Self::CoroutineId) -> bool { // FIXME(next-solver) true } - fn coroutine_is_async(self, coroutine_def_id: Self::CoroutineId) -> bool { + fn coroutine_is_async(self, _coroutine_def_id: Self::CoroutineId) -> bool { // FIXME(next-solver) true } - fn coroutine_is_gen(self, coroutine_def_id: Self::CoroutineId) -> bool { + fn coroutine_is_gen(self, _coroutine_def_id: Self::CoroutineId) -> bool { // FIXME(next-solver) false } - fn coroutine_is_async_gen(self, coroutine_def_id: Self::CoroutineId) -> bool { + fn coroutine_is_async_gen(self, _coroutine_def_id: Self::CoroutineId) -> bool { // FIXME(next-solver) false } @@ -1870,19 +1831,19 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { Binder::bind_with_vars(inner, bound_vars) } - fn opaque_types_defined_by(self, defining_anchor: Self::LocalDefId) -> Self::LocalDefIds { + fn opaque_types_defined_by(self, _defining_anchor: Self::LocalDefId) -> Self::LocalDefIds { // FIXME(next-solver) SolverDefIds::new_from_iter(self, []) } - fn alias_has_const_conditions(self, def_id: Self::DefId) -> bool { + fn alias_has_const_conditions(self, _def_id: Self::DefId) -> bool { // FIXME(next-solver) false } fn explicit_implied_const_bounds( self, - def_id: Self::DefId, + _def_id: Self::DefId, ) -> EarlyBinder< Self, impl IntoIterator>>, @@ -1899,14 +1860,14 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { self.db().function_signature(id).flags.contains(FnFlags::CONST) } - fn impl_is_const(self, def_id: Self::ImplId) -> bool { + fn impl_is_const(self, _def_id: Self::ImplId) -> bool { false } fn opt_alias_variances( self, - kind: impl Into, - def_id: Self::DefId, + _kind: impl Into, + _def_id: Self::DefId, ) -> Option { None } @@ -1933,7 +1894,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { fn coroutine_hidden_types( self, - def_id: Self::CoroutineId, + _def_id: Self::CoroutineId, ) -> EarlyBinder>> { // FIXME(next-solver) @@ -1952,14 +1913,14 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { self.db().trait_signature(trait_.0).flags.contains(TraitFlags::UNSAFE) } - fn impl_self_is_guaranteed_unsized(self, def_id: Self::ImplId) -> bool { + fn impl_self_is_guaranteed_unsized(self, _def_id: Self::ImplId) -> bool { false } fn impl_specializes( self, - specializing_impl_def_id: Self::ImplId, - parent_impl_def_id: Self::ImplId, + _specializing_impl_def_id: Self::ImplId, + _parent_impl_def_id: Self::ImplId, ) -> bool { false } @@ -1970,7 +1931,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { fn opaque_types_and_coroutines_defined_by( self, - defining_anchor: Self::LocalDefId, + _defining_anchor: Self::LocalDefId, ) -> Self::LocalDefIds { Default::default() } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs index 69afcf5dde90b..dab0fe9e4a903 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ir_print.rs @@ -5,8 +5,6 @@ use std::any::type_name_of_val; use rustc_type_ir::inherent::SliceLike; use rustc_type_ir::{self as ty, ir_print::IrPrint}; -use crate::db::HirDatabase; - use super::SolverDefId; use super::interner::DbInterner; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs index adbc6094a221c..671f06f1b88a6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs @@ -1,35 +1,25 @@ //! Things useful for mapping to/from Chalk and next-trait-solver types. -use base_db::Crate; use chalk_ir::{ - CanonicalVarKind, CanonicalVarKinds, FnPointer, InferenceVar, Substitution, TyVariableKind, - WellFormed, cast::Cast, fold::Shift, interner::HasInterner, -}; -use hir_def::{ - CallableDefId, ConstParamId, FunctionId, GeneralConstId, LifetimeParamId, TypeAliasId, - TypeOrConstParamId, TypeParamId, signatures::TraitFlags, + InferenceVar, Substitution, TyVariableKind, WellFormed, cast::Cast, fold::Shift, + interner::HasInterner, }; +use hir_def::{CallableDefId, ConstParamId, GeneralConstId, TypeParamId, signatures::TraitFlags}; use hir_def::{GenericDefId, GenericParamId}; -use intern::sym; use rustc_type_ir::{ - AliasTerm, BoundVar, DebruijnIndex, ExistentialProjection, ExistentialTraitRef, Interner as _, + AliasTerm, BoundVar, DebruijnIndex, ExistentialProjection, ExistentialTraitRef, OutlivesPredicate, ProjectionPredicate, TypeFoldable, TypeSuperFoldable, TypeVisitable, - TypeVisitableExt, UniverseIndex, elaborate, - inherent::{BoundVarLike, Clause as _, IntoKind, PlaceholderLike, SliceLike, Ty as _}, + UniverseIndex, elaborate, + inherent::{BoundVarLike, IntoKind, SliceLike, Ty as _}, shift_vars, solve::Goal, }; -use salsa::plumbing::FromId; -use salsa::{Id, plumbing::AsId}; use crate::next_solver::BoundConst; use crate::{ - ConstScalar, ImplTraitId, Interner, MemoryMap, - db::{ - HirDatabase, InternedClosureId, InternedCoroutineId, InternedLifetimeParamId, - InternedOpaqueTyId, InternedTypeOrConstParamId, - }, - from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, + ConstScalar, Interner, MemoryMap, + db::{InternedClosureId, InternedCoroutineId, InternedOpaqueTyId}, + from_assoc_type_id, from_chalk_trait_id, mapping::ToChalk, next_solver::{ Binder, ClauseKind, ConstBytes, TraitPredicate, UnevaluatedConst, @@ -42,11 +32,10 @@ use crate::{ }; use super::{ - BoundExistentialPredicate, BoundExistentialPredicates, BoundRegion, BoundRegionKind, BoundTy, - BoundTyKind, Canonical, CanonicalVars, Clause, Clauses, Const, Ctor, EarlyParamRegion, - ErrorGuaranteed, ExistentialPredicate, GenericArg, GenericArgs, ParamConst, ParamEnv, ParamTy, - Placeholder, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Predicate, PredicateKind, - Region, SolverDefId, SubtypePredicate, Term, TraitRef, Ty, Tys, ValueConst, VariancesOf, + BoundExistentialPredicates, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, Canonical, + CanonicalVars, Clause, Clauses, Const, EarlyParamRegion, ErrorGuaranteed, ExistentialPredicate, + GenericArg, GenericArgs, ParamConst, ParamEnv, ParamTy, Predicate, PredicateKind, Region, + SolverDefId, SubtypePredicate, Term, TraitRef, Ty, Tys, ValueConst, }; // FIXME: This should urgently go (as soon as we finish the migration off Chalk, that is). @@ -167,7 +156,7 @@ where } impl NextSolverToChalk<'_, chalk_ir::Mutability> for rustc_ast_ir::Mutability { - fn to_chalk(self, interner: DbInterner<'_>) -> chalk_ir::Mutability { + fn to_chalk(self, _interner: DbInterner<'_>) -> chalk_ir::Mutability { match self { rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, @@ -176,7 +165,7 @@ impl NextSolverToChalk<'_, chalk_ir::Mutability> for rustc_ast_ir::Mutability { } impl NextSolverToChalk<'_, chalk_ir::Safety> for crate::next_solver::abi::Safety { - fn to_chalk(self, interner: DbInterner<'_>) -> chalk_ir::Safety { + fn to_chalk(self, _interner: DbInterner<'_>) -> chalk_ir::Safety { match self { crate::next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, crate::next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe, @@ -349,8 +338,6 @@ impl<'db> ChalkToNextSolver<'db, Ty<'db>> for chalk_ir::Ty { let id = from_assoc_type_id(projection.associated_ty_id); let def_id = SolverDefId::TypeAliasId(id); - let generics = interner.generics_of(def_id); - let parent_len = generics.parent_count; let substs = projection.substitution.iter(Interner).skip(1); let args = GenericArgs::new_from_iter( @@ -363,7 +350,7 @@ impl<'db> ChalkToNextSolver<'db, Ty<'db>> for chalk_ir::Ty { ); (def_id, args) } - chalk_ir::AliasTy::Opaque(opaque_ty) => { + chalk_ir::AliasTy::Opaque(_opaque_ty) => { panic!("Invalid ExistentialPredicate (opaques can't be named)."); } }; @@ -379,10 +366,10 @@ impl<'db> ChalkToNextSolver<'db, Ty<'db>> for chalk_ir::Ty { ); ExistentialPredicate::Projection(projection) } - chalk_ir::WhereClause::LifetimeOutlives(lifetime_outlives) => { + chalk_ir::WhereClause::LifetimeOutlives(_lifetime_outlives) => { return None; } - chalk_ir::WhereClause::TypeOutlives(type_outlives) => return None, + chalk_ir::WhereClause::TypeOutlives(_type_outlives) => return None, }; Some(Binder::bind_with_vars(clause, bound_vars)) @@ -621,7 +608,7 @@ impl<'db> NextSolverToChalk<'db, chalk_ir::VariableKinds> for BoundVar } impl<'db> ChalkToNextSolver<'db, BoundVarKind> for chalk_ir::VariableKind { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKind { + fn to_nextsolver(&self, _interner: DbInterner<'db>) -> BoundVarKind { match self { chalk_ir::VariableKind::Ty(_ty_variable_kind) => BoundVarKind::Ty(BoundTyKind::Anon), chalk_ir::VariableKind::Lifetime => BoundVarKind::Region(BoundRegionKind::Anon), @@ -631,7 +618,7 @@ impl<'db> ChalkToNextSolver<'db, BoundVarKind> for chalk_ir::VariableKind NextSolverToChalk<'db, chalk_ir::VariableKind> for BoundVarKind { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::VariableKind { + fn to_chalk(self, _interner: DbInterner<'db>) -> chalk_ir::VariableKind { match self { BoundVarKind::Ty(_) => chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General), BoundVarKind::Region(_) => chalk_ir::VariableKind::Lifetime, @@ -676,7 +663,7 @@ impl<'db> ChalkToNextSolver<'db, crate::lower_nextsolver::ImplTraitIdx<'db>> { fn to_nextsolver( &self, - interner: DbInterner<'db>, + _interner: DbInterner<'db>, ) -> crate::lower_nextsolver::ImplTraitIdx<'db> { crate::lower_nextsolver::ImplTraitIdx::from_raw(self.into_raw()) } @@ -739,7 +726,7 @@ impl<'db> NextSolverToChalk<'db, chalk_ir::UniverseIndex> for rustc_type_ir::Uni impl<'db> ChalkToNextSolver<'db, rustc_type_ir::InferTy> for (chalk_ir::InferenceVar, chalk_ir::TyVariableKind) { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::InferTy { + fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_type_ir::InferTy { match self.1 { chalk_ir::TyVariableKind::General => { rustc_type_ir::InferTy::TyVar(rustc_type_ir::TyVid::from_u32(self.0.index())) @@ -755,7 +742,7 @@ impl<'db> ChalkToNextSolver<'db, rustc_type_ir::InferTy> } impl<'db> ChalkToNextSolver<'db, rustc_ast_ir::Mutability> for chalk_ir::Mutability { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_ast_ir::Mutability { + fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_ast_ir::Mutability { match self { chalk_ir::Mutability::Mut => rustc_ast_ir::Mutability::Mut, chalk_ir::Mutability::Not => rustc_ast_ir::Mutability::Not, @@ -808,7 +795,7 @@ impl<'db, T: HasInterner + ChalkToNextSolver<'db, U>, U> chalk_ir::VariableKind::Lifetime => { rustc_type_ir::CanonicalVarKind::Region(UniverseIndex::ROOT) } - chalk_ir::VariableKind::Const(ty) => { + chalk_ir::VariableKind::Const(_ty) => { rustc_type_ir::CanonicalVarKind::Const(UniverseIndex::ROOT) } }), @@ -863,25 +850,25 @@ impl<'db, T: NextSolverToChalk<'db, U>, U: HasInterner> impl<'db> ChalkToNextSolver<'db, Predicate<'db>> for chalk_ir::Goal { fn to_nextsolver(&self, interner: DbInterner<'db>) -> Predicate<'db> { match self.data(Interner) { - chalk_ir::GoalData::Quantified(quantifier_kind, binders) => { + chalk_ir::GoalData::Quantified(_quantifier_kind, binders) => { if !binders.binders.is_empty(Interner) { panic!("Should not be constructed."); } let (val, _) = binders.clone().into_value_and_skipped_binders(); val.shifted_out(Interner).unwrap().to_nextsolver(interner) } - chalk_ir::GoalData::Implies(program_clauses, goal) => { + chalk_ir::GoalData::Implies(_program_clauses, _goal) => { panic!("Should not be constructed.") } - chalk_ir::GoalData::All(goals) => panic!("Should not be constructed."), - chalk_ir::GoalData::Not(goal) => panic!("Should not be constructed."), + chalk_ir::GoalData::All(_goals) => panic!("Should not be constructed."), + chalk_ir::GoalData::Not(_goal) => panic!("Should not be constructed."), chalk_ir::GoalData::EqGoal(eq_goal) => { let arg_to_term = |g: &chalk_ir::GenericArg| match g.data(Interner) { chalk_ir::GenericArgData::Ty(ty) => Term::Ty(ty.to_nextsolver(interner)), chalk_ir::GenericArgData::Const(const_) => { Term::Const(const_.to_nextsolver(interner)) } - chalk_ir::GenericArgData::Lifetime(lifetime) => unreachable!(), + chalk_ir::GenericArgData::Lifetime(_lifetime) => unreachable!(), }; let pred_kind = PredicateKind::AliasRelate( arg_to_term(&eq_goal.a), @@ -1112,16 +1099,16 @@ impl<'db> ChalkToNextSolver<'db, PredicateKind<'db>> for chalk_ir::DomainGoal panic!("Should not be constructed."), - chalk_ir::DomainGoal::IsUpstream(ty) => panic!("Should not be constructed."), - chalk_ir::DomainGoal::IsFullyVisible(ty) => panic!("Should not be constructed."), - chalk_ir::DomainGoal::LocalImplAllowed(trait_ref) => { + chalk_ir::DomainGoal::IsLocal(_ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::IsUpstream(_ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::IsFullyVisible(_ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::LocalImplAllowed(_trait_ref) => { panic!("Should not be constructed.") } chalk_ir::DomainGoal::Compatible => panic!("Should not be constructed."), - chalk_ir::DomainGoal::DownstreamType(ty) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::DownstreamType(_ty) => panic!("Should not be constructed."), chalk_ir::DomainGoal::Reveal => panic!("Should not be constructed."), - chalk_ir::DomainGoal::ObjectSafe(trait_id) => panic!("Should not be constructed."), + chalk_ir::DomainGoal::ObjectSafe(_trait_id) => panic!("Should not be constructed."), } } } @@ -1176,7 +1163,7 @@ impl<'db> NextSolverToChalk<'db, chalk_ir::GoalData> for PredicateKind rustc_type_ir::PredicateKind::AliasRelate( alias_term, target_term, - alias_relation_direction, + _alias_relation_direction, ) => { let term_to_generic_arg = |term: Term<'db>| match term { Term::Ty(ty) => chalk_ir::GenericArg::new( @@ -1462,7 +1449,7 @@ pub fn convert_ty_for_result<'db>(interner: DbInterner<'db>, ty: Ty<'db>) -> cra }, // For `Placeholder`, `Bound` and `Param`, see the comment on the reverse conversion. - rustc_type_ir::TyKind::Placeholder(placeholder) => { + rustc_type_ir::TyKind::Placeholder(_placeholder) => { unimplemented!( "A `rustc_type_ir::TyKind::Placeholder` doesn't have a direct \ correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ @@ -1511,10 +1498,10 @@ pub fn convert_ty_for_result<'db>(interner: DbInterner<'db>, ty: Ty<'db>) -> cra let binders = chalk_ir::VariableKinds::from_iter( Interner, p.bound_vars().iter().map(|b| match b { - BoundVarKind::Ty(kind) => { + BoundVarKind::Ty(_kind) => { chalk_ir::VariableKind::Ty(TyVariableKind::General) } - BoundVarKind::Region(kind) => chalk_ir::VariableKind::Lifetime, + BoundVarKind::Region(_kind) => chalk_ir::VariableKind::Lifetime, BoundVarKind::Const => { chalk_ir::VariableKind::Const(crate::TyKind::Error.intern(Interner)) } @@ -1644,7 +1631,7 @@ pub fn convert_const_for_result<'db>( rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Var(var)) => { chalk_ir::ConstValue::InferenceVar(chalk_ir::InferenceVar::from(var.as_u32())) } - rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Fresh(fresh)) => { + rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Fresh(_fresh)) => { panic!("Vars should not be freshened.") } rustc_type_ir::ConstKind::Param(param) => { @@ -1657,7 +1644,7 @@ pub fn convert_const_for_result<'db>( var.var.index(), )) } - rustc_type_ir::ConstKind::Placeholder(placeholder_const) => { + rustc_type_ir::ConstKind::Placeholder(_placeholder_const) => { unimplemented!( "A `rustc_type_ir::ConstKind::Placeholder` doesn't have a direct \ correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ @@ -1717,7 +1704,7 @@ pub fn convert_region_for_result<'db>( bound.var.as_usize(), )) } - rustc_type_ir::RegionKind::RePlaceholder(placeholder) => unimplemented!( + rustc_type_ir::RegionKind::RePlaceholder(_placeholder) => unimplemented!( "A `rustc_type_ir::RegionKind::RePlaceholder` doesn't have a direct \ correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ It therefore feels safer to leave it panicking, but if you hit this panic \ diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/normalize.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/normalize.rs index 2f241f8fecbe7..bd678b3e78ff3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/normalize.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/normalize.rs @@ -5,7 +5,6 @@ use rustc_type_ir::{ inherent::{IntoKind, Term as _}, }; -use crate::next_solver::SolverDefId; use crate::next_solver::{ Binder, Const, ConstKind, DbInterner, Goal, ParamEnv, Predicate, PredicateKind, Term, Ty, TyKind, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/obligation_ctxt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/obligation_ctxt.rs index e85574a8826ff..ae92aea855276 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/obligation_ctxt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/obligation_ctxt.rs @@ -1,14 +1,15 @@ use hir_def::TraitId; -use rustc_type_ir::relate::Relate; use rustc_type_ir::{TypeFoldable, Upcast, Variance}; -use crate::next_solver::fulfill::{FulfillmentCtxt, NextSolverError}; -use crate::next_solver::infer::at::ToTrace; -use crate::next_solver::infer::traits::{ - Obligation, ObligationCause, PredicateObligation, PredicateObligations, +use crate::next_solver::{ + Const, DbInterner, ParamEnv, Term, TraitRef, Ty, TypeError, + fulfill::{FulfillmentCtxt, NextSolverError}, + infer::{ + InferCtxt, InferOk, + at::ToTrace, + traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations}, + }, }; -use crate::next_solver::infer::{DefineOpaqueTypes, InferCtxt, InferOk, TypeTrace}; -use crate::next_solver::{Const, DbInterner, ParamEnv, Term, TraitRef, Ty, TypeError}; /// Used if you want to have pleasant experience when dealing /// with obligations outside of hir or mir typeck. @@ -69,21 +70,7 @@ impl<'a, 'db> ObligationCtxt<'a, 'db> { ) -> Result<(), TypeError<'db>> { self.infcx .at(cause, param_env) - .eq(DefineOpaqueTypes::Yes, expected, actual) - .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) - } - - pub fn eq_trace>>( - &mut self, - cause: &ObligationCause, - param_env: ParamEnv<'db>, - trace: TypeTrace<'db>, - expected: T, - actual: T, - ) -> Result<(), TypeError<'db>> { - self.infcx - .at(cause, param_env) - .eq_trace(DefineOpaqueTypes::Yes, trace, expected, actual) + .eq(expected, actual) .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) } @@ -97,7 +84,7 @@ impl<'a, 'db> ObligationCtxt<'a, 'db> { ) -> Result<(), TypeError<'db>> { self.infcx .at(cause, param_env) - .sub(DefineOpaqueTypes::Yes, expected, actual) + .sub(expected, actual) .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) } @@ -111,7 +98,7 @@ impl<'a, 'db> ObligationCtxt<'a, 'db> { ) -> Result<(), TypeError<'db>> { self.infcx .at(cause, param_env) - .relate(DefineOpaqueTypes::Yes, expected, variance, actual) + .relate(expected, variance, actual) .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) } @@ -125,7 +112,7 @@ impl<'a, 'db> ObligationCtxt<'a, 'db> { ) -> Result<(), TypeError<'db>> { self.infcx .at(cause, param_env) - .sup(DefineOpaqueTypes::Yes, expected, actual) + .sup(expected, actual) .map(|infer_ok| self.register_infer_ok_obligations(infer_ok)) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs index 0aee779ed04f0..8714c95f27d8d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs @@ -1,11 +1,10 @@ //! Things related to opaques in the next-trait-solver. -use intern::Interned; use rustc_ast_ir::try_visit; use crate::next_solver::SolverDefId; -use super::{CanonicalVarKind, DbInterner, interned_vec_nolifetime_salsa}; +use super::{DbInterner, interned_vec_nolifetime_salsa}; pub type OpaqueTypeKey<'db> = rustc_type_ir::OpaqueTypeKey>; pub type PredefinedOpaquesData<'db> = rustc_type_ir::solve::PredefinedOpaquesData>; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs index 6a0a07705a8c0..70b6f20ede04d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs @@ -2,19 +2,16 @@ use std::cmp::Ordering; -use intern::Interned; use macros::{TypeFoldable, TypeVisitable}; -use rustc_ast_ir::try_visit; use rustc_type_ir::{ self as ty, CollectAndApply, DebruijnIndex, EarlyBinder, FlagComputation, Flags, PredicatePolarity, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, - TypeVisitable, Upcast, UpcastFrom, VisitorResult, WithCachedTypeInfo, + TypeVisitable, Upcast, UpcastFrom, WithCachedTypeInfo, elaborate::Elaboratable, error::{ExpectedFound, TypeError}, inherent::{IntoKind, SliceLike}, - relate::Relate, }; -use smallvec::{SmallVec, smallvec}; +use smallvec::SmallVec; use crate::next_solver::TraitIdWrapper; @@ -56,11 +53,11 @@ fn stable_cmp_existential_predicate<'db>( // FIXME: this is actual unstable - see impl in predicate.rs in `rustc_middle` match (a, b) { (ExistentialPredicate::Trait(_), ExistentialPredicate::Trait(_)) => Ordering::Equal, - (ExistentialPredicate::Projection(a), ExistentialPredicate::Projection(b)) => { + (ExistentialPredicate::Projection(_a), ExistentialPredicate::Projection(_b)) => { // Should sort by def path hash Ordering::Equal } - (ExistentialPredicate::AutoTrait(a), ExistentialPredicate::AutoTrait(b)) => { + (ExistentialPredicate::AutoTrait(_a), ExistentialPredicate::AutoTrait(_b)) => { // Should sort by def path hash Ordering::Equal } @@ -283,8 +280,6 @@ impl<'db> std::hash::Hash for InternedClausesWrapper<'db> { } } -type InternedClauses<'db> = Interned>; - #[salsa::interned(constructor = new_)] pub struct Clauses<'db> { #[returns(ref)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs index 5e7eb7532bb09..a3cfa65eb3734 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs @@ -1,10 +1,9 @@ //! Things related to regions. use hir_def::LifetimeParamId; -use intern::{Interned, Symbol}; +use intern::Symbol; use rustc_type_ir::{ BoundVar, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags, TypeFoldable, TypeVisitable, - VisitorResult, inherent::{IntoKind, PlaceholderLike, SliceLike}, relate::Relate, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs index 2457447ee39ba..487d164f8691c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/solver.rs @@ -1,29 +1,22 @@ //! Defining `SolverContext` for next-trait-solver. -use hir_def::{AssocItemId, GeneralConstId, TypeAliasId}; +use hir_def::{AssocItemId, GeneralConstId}; use rustc_next_trait_solver::delegate::SolverDelegate; use rustc_type_ir::GenericArgKind; use rustc_type_ir::lang_items::SolverTraitLangItem; use rustc_type_ir::{ - InferCtxtLike, Interner, PredicatePolarity, TypeFlags, TypeVisitableExt, UniverseIndex, - inherent::{IntoKind, SliceLike, Span as _, Term as _, Ty as _}, + InferCtxtLike, Interner, PredicatePolarity, TypeFlags, TypeVisitableExt, + inherent::{IntoKind, Term as _, Ty as _}, solve::{Certainty, NoSolution}, }; -use crate::next_solver::mapping::NextSolverToChalk; use crate::next_solver::{CanonicalVarKind, ImplIdWrapper}; -use crate::{ - TraitRefExt, - db::HirDatabase, - next_solver::{ - ClauseKind, CoercePredicate, PredicateKind, SubtypePredicate, mapping::ChalkToNextSolver, - util::sizedness_fast_path, - }, +use crate::next_solver::{ + ClauseKind, CoercePredicate, PredicateKind, SubtypePredicate, util::sizedness_fast_path, }; use super::{ - Canonical, CanonicalVarValues, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, - ParamEnv, Predicate, SolverDefId, Span, Ty, UnevaluatedConst, + DbInterner, ErrorGuaranteed, GenericArg, SolverDefId, Span, infer::{DbInternerInferExt, InferCtxt, canonical::instantiate::CanonicalExt}, }; @@ -66,7 +59,7 @@ impl<'db> SolverDelegate for SolverContext<'db> { (SolverContext(infcx), value, vars) } - fn fresh_var_for_kind_with_span(&self, arg: GenericArg<'db>, span: Span) -> GenericArg<'db> { + fn fresh_var_for_kind_with_span(&self, arg: GenericArg<'db>, _span: Span) -> GenericArg<'db> { match arg.kind() { GenericArgKind::Lifetime(_) => self.next_region_var().into(), GenericArgKind::Type(_) => self.next_ty_var().into(), @@ -76,15 +69,15 @@ impl<'db> SolverDelegate for SolverContext<'db> { fn leak_check( &self, - max_input_universe: rustc_type_ir::UniverseIndex, + _max_input_universe: rustc_type_ir::UniverseIndex, ) -> Result<(), NoSolution> { Ok(()) } fn well_formed_goals( &self, - param_env: ::ParamEnv, - arg: ::Term, + _param_env: ::ParamEnv, + _arg: ::Term, ) -> Option< Vec< rustc_type_ir::solve::Goal< @@ -123,7 +116,7 @@ impl<'db> SolverDelegate for SolverContext<'db> { fn instantiate_canonical_var( &self, kind: CanonicalVarKind<'db>, - span: ::Span, + _span: ::Span, var_values: &[GenericArg<'db>], universe_map: impl Fn(rustc_type_ir::UniverseIndex) -> rustc_type_ir::UniverseIndex, ) -> GenericArg<'db> { @@ -132,11 +125,11 @@ impl<'db> SolverDelegate for SolverContext<'db> { fn add_item_bounds_for_hidden_type( &self, - def_id: ::DefId, - args: ::GenericArgs, - param_env: ::ParamEnv, - hidden_ty: ::Ty, - goals: &mut Vec< + _def_id: ::DefId, + _args: ::GenericArgs, + _param_env: ::ParamEnv, + _hidden_ty: ::Ty, + _goals: &mut Vec< rustc_type_ir::solve::Goal< Self::Interner, ::Predicate, @@ -148,21 +141,10 @@ impl<'db> SolverDelegate for SolverContext<'db> { fn fetch_eligible_assoc_item( &self, - goal_trait_ref: rustc_type_ir::TraitRef, + _goal_trait_ref: rustc_type_ir::TraitRef, trait_assoc_def_id: SolverDefId, impl_id: ImplIdWrapper, ) -> Result, ErrorGuaranteed> { - let trait_ = self - .0 - .interner - .db() - .impl_trait(impl_id.0) - // ImplIds for impls where the trait ref can't be resolved should never reach solver - .expect("invalid impl passed to next-solver") - .skip_binder() - .def_id - .0; - let trait_data = trait_.trait_items(self.0.interner.db()); let impl_items = impl_id.0.impl_items(self.0.interner.db()); let id = match trait_assoc_def_id { SolverDefId::TypeAliasId(trait_assoc_id) => { @@ -208,16 +190,16 @@ impl<'db> SolverDelegate for SolverContext<'db> { fn is_transmutable( &self, - dst: ::Ty, - src: ::Ty, - assume: ::Const, + _dst: ::Ty, + _src: ::Ty, + _assume: ::Const, ) -> Result { unimplemented!() } fn evaluate_const( &self, - param_env: ::ParamEnv, + _param_env: ::ParamEnv, uv: rustc_type_ir::UnevaluatedConst, ) -> Option<::Const> { let c = match uv.def { @@ -236,7 +218,7 @@ impl<'db> SolverDelegate for SolverContext<'db> { Self::Interner, ::Predicate, >, - span: ::Span, + _span: ::Span, ) -> Option { if let Some(trait_pred) = goal.predicate.as_trait_clause() { if self.shallow_resolve(trait_pred.self_ty().skip_binder()).is_ty_var() @@ -279,8 +261,8 @@ impl<'db> SolverDelegate for SolverContext<'db> { let pred = goal.predicate.kind(); match pred.no_bound_vars()? { - PredicateKind::Clause(ClauseKind::RegionOutlives(outlives)) => Some(Certainty::Yes), - PredicateKind::Clause(ClauseKind::TypeOutlives(outlives)) => Some(Certainty::Yes), + PredicateKind::Clause(ClauseKind::RegionOutlives(_outlives)) => Some(Certainty::Yes), + PredicateKind::Clause(ClauseKind::TypeOutlives(_outlives)) => Some(Certainty::Yes), PredicateKind::Subtype(SubtypePredicate { a, b, .. }) | PredicateKind::Coerce(CoercePredicate { a, b }) => { if self.shallow_resolve(a).is_ty_var() && self.shallow_resolve(b).is_ty_var() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs index 44b85abba0ef3..8932f519785c0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs @@ -1,34 +1,30 @@ //! Things related to tys in the next-trait-solver. -use std::iter; use std::ops::ControlFlow; use hir_def::{ - AdtId, DefWithBodyId, GenericDefId, HasModule, TypeOrConstParamId, TypeParamId, + AdtId, HasModule, TypeParamId, hir::generics::{TypeOrConstParamData, TypeParamProvenance}, lang_item::LangItem, }; use hir_def::{TraitId, type_ref::Rawness}; -use intern::{Interned, Symbol, sym}; use rustc_abi::{Float, Integer, Size}; use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult}; use rustc_type_ir::{ - AliasTyKind, BoundVar, ClosureKind, CollectAndApply, FlagComputation, Flags, FloatTy, FloatVid, - InferTy, IntTy, IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, - TypeVisitable, TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo, + AliasTyKind, BoundVar, ClosureKind, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy, + IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, + TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo, inherent::{ - Abi, AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _, + AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _, IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _, }, relate::Relate, solve::SizedTraitKind, walk::TypeWalker, }; -use salsa::plumbing::{AsId, FromId}; -use smallvec::SmallVec; use crate::{ - FnAbi, ImplTraitId, + ImplTraitId, db::HirDatabase, interner::InternedWrapperNoDebug, next_solver::{ @@ -83,7 +79,7 @@ impl<'db> Ty<'db> { Ty::new(interner, TyKind::Adt(AdtDef::new(adt_id, interner), args)) } - pub fn new_param(interner: DbInterner<'db>, id: TypeParamId, index: u32, name: Symbol) -> Self { + pub fn new_param(interner: DbInterner<'db>, id: TypeParamId, index: u32) -> Self { Ty::new(interner, TyKind::Param(ParamTy { id, index })) } @@ -404,7 +400,7 @@ impl<'db> Ty<'db> { Some(interner.fn_sig(callable).instantiate(interner, args)) } TyKind::FnPtr(sig, hdr) => Some(sig.with(hdr)), - TyKind::Closure(closure_id, closure_args) => closure_args + TyKind::Closure(_, closure_args) => closure_args .split_closure_args_untupled() .closure_sig_as_fn_ptr_ty .callable_sig(interner), @@ -1222,7 +1218,7 @@ pub struct ParamTy { impl ParamTy { pub fn to_ty<'db>(self, interner: DbInterner<'db>) -> Ty<'db> { - Ty::new_param(interner, self.id, self.index, sym::MISSING_NAME.clone()) + Ty::new_param(interner, self.id, self.index) } } @@ -1269,11 +1265,11 @@ impl<'db> TypeVisitable> for ErrorGuaranteed { impl<'db> TypeFoldable> for ErrorGuaranteed { fn try_fold_with>>( self, - folder: &mut F, + _folder: &mut F, ) -> Result { Ok(self) } - fn fold_with>>(self, folder: &mut F) -> Self { + fn fold_with>>(self, _folder: &mut F) -> Self { self } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs index ae240a942f576..97f536305805e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs @@ -5,20 +5,19 @@ use std::ops::{self, ControlFlow}; use base_db::Crate; use hir_def::lang_item::LangItem; -use hir_def::{BlockId, HasModule, ItemContainerId, Lookup}; +use hir_def::{BlockId, HasModule}; use intern::sym; use la_arena::Idx; use rustc_abi::{Float, HasDataLayout, Integer, IntegerType, Primitive, ReprOptions}; use rustc_type_ir::data_structures::IndexMap; use rustc_type_ir::inherent::{ - AdtDef, Const as _, GenericArg as _, GenericArgs as _, ParamEnv as _, Region as _, SliceLike, - Ty as _, + AdtDef, GenericArg as _, GenericArgs as _, ParamEnv as _, SliceLike, Ty as _, }; use rustc_type_ir::lang_items::SolverTraitLangItem; use rustc_type_ir::solve::SizedTraitKind; use rustc_type_ir::{ BoundVar, Canonical, DebruijnIndex, GenericArgKind, INNERMOST, Interner, PredicatePolarity, - TypeFlags, TypeVisitable, TypeVisitableExt, + TypeVisitableExt, }; use rustc_type_ir::{ ConstKind, CoroutineArgs, FloatTy, IntTy, RegionKind, TypeFolder, TypeSuperFoldable, @@ -29,17 +28,14 @@ use rustc_type_ir::{InferCtxtLike, TypeFoldable}; use crate::lower_nextsolver::{LifetimeElisionKind, TyLoweringContext}; use crate::next_solver::infer::InferCtxt; use crate::next_solver::{ - BoundConst, CanonicalVarKind, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, - PlaceholderRegion, TypingMode, + BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion, }; use crate::{ db::HirDatabase, - from_foreign_def_id, method_resolution::{TraitImpls, TyFingerprint}, }; use super::fold::{BoundVarReplacer, FnMutDelegate}; -use super::generics::generics; use super::{ AliasTerm, AliasTy, Binder, BoundRegion, BoundTy, BoundTyKind, BoundVarKind, BoundVarKinds, CanonicalVars, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, GenericArg, @@ -530,7 +526,7 @@ pub(crate) fn mini_canonicalize<'db, T: TypeFoldable>>( max_universe: UniverseIndex::from_u32(1), variables: CanonicalVars::new_from_iter( context.cx(), - vars.iter().enumerate().map(|(idx, (k, v))| match (*k).kind() { + vars.iter().enumerate().map(|(idx, (k, _v))| match (*k).kind() { GenericArgKind::Type(ty) => match ty.kind() { TyKind::Int(..) | TyKind::Uint(..) => rustc_type_ir::CanonicalVarKind::Int, TyKind::Float(..) => rustc_type_ir::CanonicalVarKind::Float, @@ -617,7 +613,7 @@ impl<'db> TypeFolder> for MiniCanonicalizer<'_, 'db> { } r } - RegionKind::ReVar(vid) => { + RegionKind::ReVar(_vid) => { let len = self.vars.len(); let var = *self.vars.entry(r.into()).or_insert(len); Region::new( @@ -646,7 +642,7 @@ impl<'db> TypeFolder> for MiniCanonicalizer<'_, 'db> { } c } - ConstKind::Infer(infer) => { + ConstKind::Infer(_infer) => { let len = self.vars.len(); let var = *self.vars.entry(c.into()).or_insert(len); Const::new( @@ -666,14 +662,8 @@ pub fn explicit_item_bounds<'db>( let db = interner.db(); match def_id { SolverDefId::TypeAliasId(type_alias) => { - let trait_ = match type_alias.lookup(db).container { - ItemContainerId::TraitId(t) => t, - _ => panic!("associated type not in trait"), - }; - // Lower bounds -- we could/should maybe move this to a separate query in `lower` let type_alias_data = db.type_alias_signature(type_alias); - let generic_params = generics(db, type_alias.into()); let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); let mut ctx = TyLoweringContext::new( db, @@ -805,7 +795,7 @@ pub fn explicit_item_bounds<'db>( GenericArgs::new_from_iter(interner, [item_ty.into()]), ), term: match out.kind() { - GenericArgKind::Lifetime(lt) => panic!(), + GenericArgKind::Lifetime(_lt) => panic!(), GenericArgKind::Type(ty) => Term::Ty(ty), GenericArgKind::Const(const_) => Term::Const(const_), }, @@ -993,26 +983,6 @@ impl<'db> TypeFolder> for PlaceholderReplacer<'_, 'db> { } } -pub(crate) fn needs_normalization<'db, T: TypeVisitable>>( - infcx: &InferCtxt<'db>, - value: &T, -) -> bool { - let mut flags = TypeFlags::HAS_ALIAS; - - // Opaques are treated as rigid outside of `TypingMode::PostAnalysis`, - // so we can ignore those. - match infcx.typing_mode() { - // FIXME(#132279): We likely want to reveal opaques during post borrowck analysis - TypingMode::Coherence - | TypingMode::Analysis { .. } - | TypingMode::Borrowck { .. } - | TypingMode::PostBorrowckAnalysis { .. } => flags.remove(TypeFlags::HAS_TY_OPAQUE), - TypingMode::PostAnalysis => {} - } - - value.has_type_flags(flags) -} - pub fn sizedness_fast_path<'db>( tcx: DbInterner<'db>, predicate: Predicate<'db>, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs index d2901f7fc53d2..9ffb112fe6170 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs @@ -1,7 +1,7 @@ //! A few helper functions for dealing with primitives. -pub use chalk_ir::{FloatTy, IntTy, UintTy}; pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}; +pub use rustc_type_ir::{FloatTy, IntTy, UintTy}; pub fn int_ty_to_string(ty: IntTy) -> &'static str { match ty { @@ -33,68 +33,3 @@ pub fn float_ty_to_string(ty: FloatTy) -> &'static str { FloatTy::F128 => "f128", } } - -pub fn int_ty_to_string_ns(ty: rustc_type_ir::IntTy) -> &'static str { - use rustc_type_ir::IntTy; - match ty { - IntTy::Isize => "isize", - IntTy::I8 => "i8", - IntTy::I16 => "i16", - IntTy::I32 => "i32", - IntTy::I64 => "i64", - IntTy::I128 => "i128", - } -} - -pub fn uint_ty_to_string_ns(ty: rustc_type_ir::UintTy) -> &'static str { - use rustc_type_ir::UintTy; - match ty { - UintTy::Usize => "usize", - UintTy::U8 => "u8", - UintTy::U16 => "u16", - UintTy::U32 => "u32", - UintTy::U64 => "u64", - UintTy::U128 => "u128", - } -} - -pub fn float_ty_to_string_ns(ty: rustc_type_ir::FloatTy) -> &'static str { - use rustc_type_ir::FloatTy; - match ty { - FloatTy::F16 => "f16", - FloatTy::F32 => "f32", - FloatTy::F64 => "f64", - FloatTy::F128 => "f128", - } -} - -pub(super) fn int_ty_from_builtin(t: BuiltinInt) -> IntTy { - match t { - BuiltinInt::Isize => IntTy::Isize, - BuiltinInt::I8 => IntTy::I8, - BuiltinInt::I16 => IntTy::I16, - BuiltinInt::I32 => IntTy::I32, - BuiltinInt::I64 => IntTy::I64, - BuiltinInt::I128 => IntTy::I128, - } -} - -pub(super) fn uint_ty_from_builtin(t: BuiltinUint) -> UintTy { - match t { - BuiltinUint::Usize => UintTy::Usize, - BuiltinUint::U8 => UintTy::U8, - BuiltinUint::U16 => UintTy::U16, - BuiltinUint::U32 => UintTy::U32, - BuiltinUint::U64 => UintTy::U64, - BuiltinUint::U128 => UintTy::U128, - } -} - -pub(super) fn float_ty_from_builtin(t: BuiltinFloat) -> FloatTy { - match t { - BuiltinFloat::F16 => FloatTy::F16, - BuiltinFloat::F32 => FloatTy::F32, - BuiltinFloat::F64 => FloatTy::F64, - BuiltinFloat::F128 => FloatTy::F128, - } -} diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 36c8c3051cf52..78b4533a94b0e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1803,7 +1803,7 @@ impl Adt { let env = db.trait_environment(self.into()); let interner = DbInterner::new_with(db, Some(env.krate), env.block); let adt_id = AdtId::from(self); - let args = GenericArgs::for_item_with_defaults(interner, adt_id.into(), |_, _, id, _| { + let args = GenericArgs::for_item_with_defaults(interner, adt_id.into(), |_, id, _| { GenericArg::error_from_id(interner, id) }); db.layout_of_adt(adt_id, args, env) @@ -4184,8 +4184,7 @@ impl TypeParam { let resolver = self.id.parent().resolver(db); let interner = DbInterner::new_with(db, None, None); let index = hir_ty::param_idx(db, self.id.into()).unwrap(); - let name = self.name(db).symbol().clone(); - let ty = Ty::new_param(interner, self.id, index as u32, name); + let ty = Ty::new_param(interner, self.id, index as u32); Type::new_with_resolver_inner(db, &resolver, ty) } @@ -6438,7 +6437,7 @@ fn generic_args_from_tys<'db>( args: impl IntoIterator>, ) -> GenericArgs<'db> { let mut args = args.into_iter(); - GenericArgs::for_item(interner, def_id, |_, _, id, _| { + GenericArgs::for_item(interner, def_id, |_, id, _| { if matches!(id, GenericParamId::TypeParamId(_)) && let Some(arg) = args.next() { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index eecca02440919..62ce3daab75df 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -1657,14 +1657,11 @@ impl<'db> SemanticsImpl<'db> { ) -> Option { let interner = DbInterner::new_with(self.db, None, None); let mut subst = subst.into_iter(); - let substs = hir_ty::next_solver::GenericArgs::for_item( - interner, - trait_.id.into(), - |_, _, id, _| { + let substs = + hir_ty::next_solver::GenericArgs::for_item(interner, trait_.id.into(), |_, id, _| { assert!(matches!(id, hir_def::GenericParamId::TypeParamId(_)), "expected a type"); subst.next().expect("too few subst").ty.into() - }, - ); + }); assert!(subst.next().is_none(), "too many subst"); Some(self.db.lookup_impl_method(env.env, func.into(), substs).0.into()) } From 2c48c398f77c4a490fc3a6c2a59d53d6cca4ed25 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Mon, 20 Oct 2025 15:04:51 +0800 Subject: [PATCH 37/76] Fix missing RestPat for convert_named_struct_to_tuple_struct Example --- ```rust struct Inner; struct A$0 { inner: Inner } fn foo(A { .. }: A) {} ``` **Before this PR**: ```rust struct Inner; struct A(Inner); fn foo(A(): A) {} ``` **After this PR**: ```rust struct Inner; struct A(Inner); fn foo(A(..): A) {} ``` --- .../convert_named_struct_to_tuple_struct.rs | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs index 8d27574eb2ca5..0847719d6922b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -202,6 +202,9 @@ fn process_struct_name_reference( .record_pat_field_list()? .fields() .filter_map(|pat| pat.pat()) + .chain(record_struct_pat.record_pat_field_list()? + .rest_pat() + .map(Into::into)) ) .to_string() ); @@ -346,6 +349,37 @@ impl A { ); } + #[test] + fn convert_struct_and_rest_pat() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Inner; +struct A$0 { inner: Inner } +fn foo(A { .. }: A) {} +"#, + r#" +struct Inner; +struct A(Inner); +fn foo(A(..): A) {} +"#, + ); + + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Inner; +struct A$0 { inner: Inner, extra: Inner } +fn foo(A { inner, .. }: A) {} +"#, + r#" +struct Inner; +struct A(Inner, Inner); +fn foo(A(inner, ..): A) {} +"#, + ); + } + #[test] fn convert_simple_struct_cursor_on_visibility_keyword() { check_assist( From 9b0d532687e1f8039be5311d0893f2a77e4d5b61 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 17 Oct 2025 08:45:08 +0300 Subject: [PATCH 38/76] Rip Chalk out of the codebase! --- src/tools/rust-analyzer/Cargo.lock | 24 - src/tools/rust-analyzer/Cargo.toml | 4 - .../rust-analyzer/crates/hir-ty/Cargo.toml | 2 - .../crates/hir-ty/src/builder.rs | 211 -- .../crates/hir-ty/src/chalk_db.rs | 9 - .../crates/hir-ty/src/chalk_ext.rs | 46 - .../crates/hir-ty/src/consteval_chalk.rs | 108 - .../rust-analyzer/crates/hir-ty/src/db.rs | 233 +- .../diagnostics/match_check/pat_analysis.rs | 2 +- .../crates/hir-ty/src/display.rs | 22 +- .../rust-analyzer/crates/hir-ty/src/drop.rs | 4 +- .../crates/hir-ty/src/dyn_compatibility.rs | 52 +- .../crates/hir-ty/src/generics.rs | 47 - .../rust-analyzer/crates/hir-ty/src/infer.rs | 14 +- .../crates/hir-ty/src/infer/cast.rs | 2 +- .../crates/hir-ty/src/infer/diagnostics.rs | 4 +- .../crates/hir-ty/src/infer/expr.rs | 10 +- .../crates/hir-ty/src/infer/mutability.rs | 2 +- .../crates/hir-ty/src/infer/pat.rs | 6 +- .../crates/hir-ty/src/infer/path.rs | 4 +- .../crates/hir-ty/src/infer/unify.rs | 2 +- .../crates/hir-ty/src/inhabitedness.rs | 2 +- .../crates/hir-ty/src/interner.rs | 403 --- .../rust-analyzer/crates/hir-ty/src/layout.rs | 4 +- .../rust-analyzer/crates/hir-ty/src/lib.rs | 408 +-- .../rust-analyzer/crates/hir-ty/src/lower.rs | 2183 +++++++++++------ .../crates/hir-ty/src/lower/path.rs | 769 +++--- .../crates/hir-ty/src/lower_nextsolver.rs | 2138 ---------------- .../hir-ty/src/lower_nextsolver/path.rs | 1327 ---------- .../crates/hir-ty/src/mapping.rs | 169 -- .../crates/hir-ty/src/method_resolution.rs | 15 +- .../rust-analyzer/crates/hir-ty/src/mir.rs | 2 +- .../crates/hir-ty/src/mir/eval.rs | 16 +- .../crates/hir-ty/src/mir/eval/shim.rs | 2 +- .../crates/hir-ty/src/mir/eval/shim/simd.rs | 2 +- .../crates/hir-ty/src/next_solver.rs | 2 +- .../crates/hir-ty/src/next_solver/consts.rs | 3 +- .../hir-ty/src/next_solver/generic_arg.rs | 4 +- .../crates/hir-ty/src/next_solver/interner.rs | 19 +- .../crates/hir-ty/src/next_solver/mapping.rs | 1733 +------------ .../hir-ty/src/next_solver/predicate.rs | 5 +- .../crates/hir-ty/src/next_solver/ty.rs | 8 +- .../crates/hir-ty/src/next_solver/util.rs | 197 +- .../crates/hir-ty/src/tests/incremental.rs | 28 +- .../rust-analyzer/crates/hir-ty/src/tls.rs | 155 -- .../rust-analyzer/crates/hir-ty/src/traits.rs | 199 +- .../rust-analyzer/crates/hir-ty/src/utils.rs | 137 +- .../crates/hir-ty/src/variance.rs | 2 +- .../rust-analyzer/crates/hir/src/display.rs | 2 +- src/tools/rust-analyzer/crates/hir/src/lib.rs | 18 +- .../crates/hir/src/source_analyzer.rs | 10 +- .../crates/ide/src/hover/tests.rs | 2 +- 52 files changed, 2265 insertions(+), 8507 deletions(-) delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/builder.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/consteval_chalk.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/interner.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/tls.rs diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 539f8cf1b9330..5eb71eb9d4f01 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -258,28 +258,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" -[[package]] -name = "chalk-derive" -version = "0.104.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ea9b1e80910f66ae87c772247591432032ef3f6a67367ff17f8343db05beafa" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - -[[package]] -name = "chalk-ir" -version = "0.104.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7047a516de16226cd17344d41a319d0ea1064bf9e60bd612ab341ab4a34bbfa8" -dependencies = [ - "bitflags 2.9.4", - "chalk-derive", -] - [[package]] name = "clap" version = "4.5.48" @@ -777,8 +755,6 @@ dependencies = [ "arrayvec", "base-db", "bitflags 2.9.4", - "chalk-derive", - "chalk-ir", "cov-mark", "either", "ena", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index f94fd37e52a76..8a108974681a1 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -37,8 +37,6 @@ debug = 2 [patch.'crates-io'] # rowan = { path = "../rowan" } -# chalk-ir = { path = "../chalk/chalk-ir" } -# chalk-derive = { path = "../chalk/chalk-derive" } # line-index = { path = "lib/line-index" } # la-arena = { path = "lib/la-arena" } # lsp-server = { path = "lib/lsp-server" } @@ -110,8 +108,6 @@ arrayvec = "0.7.6" bitflags = "2.9.1" cargo_metadata = "0.21.0" camino = "1.1.10" -chalk-ir = "0.104.0" -chalk-derive = "0.104.0" crossbeam-channel = "0.5.15" dissimilar = "1.0.10" dot = "0.1.4" diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index ec6563315407a..378a0f0382c36 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -24,8 +24,6 @@ oorandom = "11.1.5" tracing.workspace = true rustc-hash.workspace = true scoped-tls = "1.0.1" -chalk-ir.workspace = true -chalk-derive.workspace = true la-arena.workspace = true triomphe.workspace = true typed-arena = "2.0.2" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs deleted file mode 100644 index 4cd0af28f33f8..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs +++ /dev/null @@ -1,211 +0,0 @@ -//! `TyBuilder`, a helper for building instances of `Ty` and related types. - -use chalk_ir::{ - DebruijnIndex, - cast::{Cast, Caster}, -}; -use hir_def::{GenericDefId, GenericParamId, TraitId}; -use smallvec::SmallVec; - -use crate::{ - BoundVar, GenericArg, GenericArgData, Interner, Substitution, TraitRef, Ty, TyKind, - consteval::unknown_const_as_generic, - db::HirDatabase, - error_lifetime, - generics::generics, - infer::unify::InferenceTable, - next_solver::{ - DbInterner, EarlyBinder, - mapping::{ChalkToNextSolver, NextSolverToChalk}, - }, - to_chalk_trait_id, -}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) enum ParamKind { - Type, - Lifetime, - Const(Ty), -} - -/// This is a builder for `Ty` or anything that needs a `Substitution`. -pub(crate) struct TyBuilder { - /// The `data` field is used to keep track of what we're building (e.g. an - /// ADT, a `TraitRef`, ...). - data: D, - vec: SmallVec<[GenericArg; 2]>, - param_kinds: SmallVec<[ParamKind; 2]>, - parent_subst: Substitution, -} - -impl TyBuilder { - fn with_data(self, data: B) -> TyBuilder { - TyBuilder { - data, - vec: self.vec, - param_kinds: self.param_kinds, - parent_subst: self.parent_subst, - } - } -} - -impl TyBuilder { - fn new( - data: D, - param_kinds: SmallVec<[ParamKind; 2]>, - parent_subst: Option, - ) -> Self { - let parent_subst = parent_subst.unwrap_or_else(|| Substitution::empty(Interner)); - Self { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds, parent_subst } - } - - fn build_internal(self) -> (D, Substitution) { - assert_eq!( - self.vec.len(), - self.param_kinds.len(), - "{} args received, {} expected ({:?})", - self.vec.len(), - self.param_kinds.len(), - &self.param_kinds - ); - for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) { - self.assert_match_kind(a, e); - } - let subst = Substitution::from_iter( - Interner, - self.parent_subst.iter(Interner).cloned().chain(self.vec), - ); - (self.data, subst) - } - - pub(crate) fn remaining(&self) -> usize { - self.param_kinds.len() - self.vec.len() - } - - pub(crate) fn fill_with_bound_vars( - self, - debruijn: DebruijnIndex, - starting_from: usize, - ) -> Self { - // self.fill is inlined to make borrow checker happy - let mut this = self; - let other = &this.param_kinds[this.vec.len()..]; - let filler = (starting_from..).zip(other).map(|(idx, kind)| match kind { - ParamKind::Type => BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner), - ParamKind::Const(ty) => { - BoundVar::new(debruijn, idx).to_const(Interner, ty.clone()).cast(Interner) - } - ParamKind::Lifetime => { - BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner) - } - }); - this.vec.extend(filler.take(this.remaining()).casted(Interner)); - assert_eq!(this.remaining(), 0); - this - } - - #[tracing::instrument(skip_all)] - pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self { - self.fill(|x| { - match x { - ParamKind::Type => crate::next_solver::GenericArg::Ty(table.next_ty_var()), - ParamKind::Const(_) => table.next_const_var().into(), - ParamKind::Lifetime => table.next_region_var().into(), - } - .to_chalk(table.interner()) - }) - } - - pub(crate) fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self { - self.vec.extend(self.param_kinds[self.vec.len()..].iter().map(filler)); - assert_eq!(self.remaining(), 0); - self - } - - fn assert_match_kind(&self, a: &chalk_ir::GenericArg, e: &ParamKind) { - match (a.data(Interner), e) { - (GenericArgData::Ty(_), ParamKind::Type) - | (GenericArgData::Const(_), ParamKind::Const(_)) - | (GenericArgData::Lifetime(_), ParamKind::Lifetime) => (), - _ => panic!("Mismatched kinds: {a:?}, {:?}, {:?}", self.vec, self.param_kinds), - } - } -} - -impl TyBuilder<()> { - pub(crate) fn usize() -> Ty { - TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner) - } - - pub(crate) fn unknown_subst( - db: &dyn HirDatabase, - def: impl Into, - ) -> Substitution { - let interner = DbInterner::conjure(); - let params = generics(db, def.into()); - Substitution::from_iter( - Interner, - params.iter_id().map(|id| match id { - GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner), - GenericParamId::ConstParamId(id) => { - unknown_const_as_generic(db.const_param_ty_ns(id)) - .to_chalk(interner) - .cast(Interner) - } - GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), - }), - ) - } - - #[tracing::instrument(skip_all)] - pub(crate) fn subst_for_def( - db: &dyn HirDatabase, - def: impl Into, - parent_subst: Option, - ) -> TyBuilder<()> { - let generics = generics(db, def.into()); - assert!(generics.parent_generics().is_some() == parent_subst.is_some()); - let params = generics - .iter_self() - .map(|(id, _data)| match id { - GenericParamId::TypeParamId(_) => ParamKind::Type, - GenericParamId::ConstParamId(id) => ParamKind::Const(db.const_param_ty(id)), - GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, - }) - .collect(); - TyBuilder::new((), params, parent_subst) - } - - pub(crate) fn build(self) -> Substitution { - let ((), subst) = self.build_internal(); - subst - } -} - -impl TyBuilder { - pub(crate) fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder { - TyBuilder::subst_for_def(db, def, None).with_data(def) - } - - pub(crate) fn build(self) -> TraitRef { - let (trait_id, substitution) = self.build_internal(); - TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution } - } -} - -impl<'db, T: rustc_type_ir::TypeFoldable>> TyBuilder> { - pub(crate) fn build(self, interner: DbInterner<'db>) -> T { - let (b, subst) = self.build_internal(); - let args: crate::next_solver::GenericArgs<'db> = subst.to_nextsolver(interner); - b.instantiate(interner, args) - } -} - -impl<'db> TyBuilder>> { - pub(crate) fn impl_self_ty( - db: &'db dyn HirDatabase, - def: hir_def::ImplId, - ) -> TyBuilder>> { - TyBuilder::subst_for_def(db, def, None).with_data(db.impl_self_ty(def)) - } -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs deleted file mode 100644 index a6b859b37210f..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ /dev/null @@ -1,9 +0,0 @@ -//! The implementation of `RustIrDatabase` for Chalk, which provides information -//! about the code that Chalk needs. - -use crate::Interner; - -pub(crate) type AssocTypeId = chalk_ir::AssocTypeId; -pub(crate) type TraitId = chalk_ir::TraitId; -pub(crate) type AdtId = chalk_ir::AdtId; -pub(crate) type ImplId = chalk_ir::ImplId; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs deleted file mode 100644 index 4ea563d46e6e7..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ /dev/null @@ -1,46 +0,0 @@ -//! Various extensions traits for Chalk types. - -use hir_def::{ItemContainerId, Lookup, TraitId}; - -use crate::{ - Interner, ProjectionTy, Substitution, TraitRef, Ty, db::HirDatabase, from_assoc_type_id, - from_chalk_trait_id, generics::generics, to_chalk_trait_id, -}; - -pub(crate) trait ProjectionTyExt { - fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef; - fn trait_(&self, db: &dyn HirDatabase) -> TraitId; - fn self_type_parameter(&self, db: &dyn HirDatabase) -> Ty; -} - -impl ProjectionTyExt for ProjectionTy { - fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef { - // FIXME: something like `Split` trait from chalk-solve might be nice. - let generics = generics(db, from_assoc_type_id(self.associated_ty_id).into()); - let parent_len = generics.parent_generics().map_or(0, |g| g.len_self()); - let substitution = - Substitution::from_iter(Interner, self.substitution.iter(Interner).take(parent_len)); - TraitRef { trait_id: to_chalk_trait_id(self.trait_(db)), substitution } - } - - fn trait_(&self, db: &dyn HirDatabase) -> TraitId { - match from_assoc_type_id(self.associated_ty_id).lookup(db).container { - ItemContainerId::TraitId(it) => it, - _ => panic!("projection ty without parent trait"), - } - } - - fn self_type_parameter(&self, db: &dyn HirDatabase) -> Ty { - self.trait_ref(db).self_type_parameter(Interner) - } -} - -pub(crate) trait TraitRefExt { - fn hir_trait_id(&self) -> TraitId; -} - -impl TraitRefExt for TraitRef { - fn hir_trait_id(&self) -> TraitId { - from_chalk_trait_id(self.trait_id) - } -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval_chalk.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval_chalk.rs deleted file mode 100644 index 07b783ea92925..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval_chalk.rs +++ /dev/null @@ -1,108 +0,0 @@ -//! Constant evaluation details - -use base_db::Crate; -use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast}; -use hir_def::{ - expr_store::{HygieneId, path::Path}, - resolver::{Resolver, ValueNs}, - type_ref::LiteralConstRef, -}; -use stdx::never; - -use crate::{ - Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, - TraitEnvironment, Ty, - db::HirDatabase, - generics::Generics, - lower::ParamLoweringMode, - next_solver::{DbInterner, mapping::ChalkToNextSolver}, - to_placeholder_idx, -}; - -pub(crate) fn path_to_const<'g>( - db: &dyn HirDatabase, - resolver: &Resolver<'_>, - path: &Path, - mode: ParamLoweringMode, - args: impl FnOnce() -> &'g Generics, - debruijn: DebruijnIndex, - expected_ty: Ty, -) -> Option { - match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) { - Some(ValueNs::GenericParam(p)) => { - let ty = db.const_param_ty(p); - let args = args(); - let value = match mode { - ParamLoweringMode::Placeholder => { - let idx = args.type_or_const_param_idx(p.into()).unwrap(); - ConstValue::Placeholder(to_placeholder_idx(db, p.into(), idx as u32)) - } - ParamLoweringMode::Variable => match args.type_or_const_param_idx(p.into()) { - Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)), - None => { - never!( - "Generic list doesn't contain this param: {:?}, {:?}, {:?}", - args, - path, - p - ); - return None; - } - }, - }; - Some(ConstData { ty, value }.intern(Interner)) - } - Some(ValueNs::ConstId(c)) => Some(intern_const_scalar( - ConstScalar::UnevaluatedConst(c.into(), Substitution::empty(Interner)), - expected_ty, - )), - // FIXME: With feature(adt_const_params), we also need to consider other things here, e.g. struct constructors. - _ => None, - } -} - -pub(crate) fn unknown_const(ty: Ty) -> Const { - ConstData { - ty, - value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: ConstScalar::Unknown }), - } - .intern(Interner) -} - -pub(crate) fn unknown_const_as_generic(ty: Ty) -> GenericArg { - unknown_const(ty).cast(Interner) -} - -/// Interns a constant scalar with the given type -pub(crate) fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const { - ConstData { ty, value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: value }) } - .intern(Interner) -} - -/// Interns a constant scalar with the given type -pub(crate) fn intern_const_ref( - db: &dyn HirDatabase, - value: &LiteralConstRef, - ty: Ty, - krate: Crate, -) -> Const { - let interner = DbInterner::new_with(db, Some(krate), None); - let layout = || db.layout_of_ty(ty.to_nextsolver(interner), TraitEnvironment::empty(krate)); - let bytes = match value { - LiteralConstRef::Int(i) => { - // FIXME: We should handle failure of layout better. - let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16); - ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()) - } - LiteralConstRef::UInt(i) => { - let size = layout().map(|it| it.size.bytes_usize()).unwrap_or(16); - ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default()) - } - LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()), - LiteralConstRef::Char(c) => { - ConstScalar::Bytes((*c as u32).to_le_bytes().into(), MemoryMap::default()) - } - LiteralConstRef::Unknown => ConstScalar::Unknown, - }; - intern_const_scalar(bytes, ty) -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index a4c19eea162e2..c79ff98578980 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -1,8 +1,7 @@ //! The home of `HirDatabase`, which is the Salsa database containing all the //! type inference-related queries. -use base_db::Crate; -use base_db::target::TargetLoadError; +use base_db::{Crate, target::TargetLoadError}; use hir_def::{ AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, @@ -16,13 +15,14 @@ use smallvec::SmallVec; use triomphe::Arc; use crate::{ - Binders, ImplTraitId, ImplTraits, InferenceResult, TraitEnvironment, Ty, TyDefId, ValueTyDefId, + ImplTraitId, InferenceResult, TraitEnvironment, TyDefId, ValueTyDefId, consteval::ConstEvalError, dyn_compatibility::DynCompatibilityViolation, layout::{Layout, LayoutError}, - lower::{Diagnostics, GenericDefaults, GenericPredicates}, + lower::{Diagnostics, GenericDefaults, GenericPredicates, ImplTraits}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, + next_solver::{Const, EarlyBinder, GenericArgs, PolyFnSig, TraitRef, Ty, VariancesOf}, }; #[query_group::query_group] @@ -51,7 +51,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn monomorphized_mir_body<'db>( &'db self, def: DefWithBodyId, - subst: crate::next_solver::GenericArgs<'db>, + subst: GenericArgs<'db>, env: Arc>, ) -> Result>, MirLowerError<'db>>; @@ -59,7 +59,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn monomorphized_mir_body_for_closure<'db>( &'db self, def: InternedClosureId, - subst: crate::next_solver::GenericArgs<'db>, + subst: GenericArgs<'db>, env: Arc>, ) -> Result>, MirLowerError<'db>>; @@ -75,16 +75,13 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn const_eval<'db>( &'db self, def: GeneralConstId, - subst: crate::next_solver::GenericArgs<'db>, + subst: GenericArgs<'db>, trait_env: Option>>, - ) -> Result, ConstEvalError<'db>>; + ) -> Result, ConstEvalError<'db>>; #[salsa::invoke(crate::consteval::const_eval_static_query)] #[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)] - fn const_eval_static<'db>( - &'db self, - def: StaticId, - ) -> Result, ConstEvalError<'db>>; + fn const_eval_static<'db>(&'db self, def: StaticId) -> Result, ConstEvalError<'db>>; #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)] #[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)] @@ -99,8 +96,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { &'db self, env: Arc>, func: FunctionId, - fn_subst: crate::next_solver::GenericArgs<'db>, - ) -> (FunctionId, crate::next_solver::GenericArgs<'db>); + fn_subst: GenericArgs<'db>, + ) -> (FunctionId, GenericArgs<'db>); // endregion:mir @@ -109,7 +106,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { fn layout_of_adt<'db>( &'db self, def: AdtId, - args: crate::next_solver::GenericArgs<'db>, + args: GenericArgs<'db>, trait_env: Arc>, ) -> Result, LayoutError>; @@ -117,7 +114,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)] fn layout_of_ty<'db>( &'db self, - ty: crate::next_solver::Ty<'db>, + ty: Ty<'db>, env: Arc>, ) -> Result, LayoutError>; @@ -127,149 +124,130 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)] fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option; - #[salsa::invoke(crate::lower_nextsolver::ty_query)] + #[salsa::invoke(crate::lower::ty_query)] #[salsa::transparent] - fn ty<'db>( - &'db self, - def: TyDefId, - ) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>; + fn ty<'db>(&'db self, def: TyDefId) -> EarlyBinder<'db, Ty<'db>>; - #[salsa::invoke(crate::lower_nextsolver::type_for_type_alias_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower_nextsolver::type_for_type_alias_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)] fn type_for_type_alias_with_diagnostics<'db>( &'db self, def: TypeAliasId, - ) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics); + ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics); /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is /// a `StructId` or `EnumVariantId` with a record constructor. - #[salsa::invoke(crate::lower_nextsolver::value_ty_query)] - fn value_ty<'db>( - &'db self, - def: ValueTyDefId, - ) -> Option>>; + #[salsa::invoke(crate::lower::value_ty_query)] + fn value_ty<'db>(&'db self, def: ValueTyDefId) -> Option>>; - #[salsa::invoke(crate::lower_nextsolver::impl_self_ty_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower_nextsolver::impl_self_ty_with_diagnostics_cycle_result)] + #[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)] fn impl_self_ty_with_diagnostics<'db>( &'db self, def: ImplId, - ) -> (crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, Diagnostics); + ) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics); - #[salsa::invoke(crate::lower_nextsolver::impl_self_ty_query)] + #[salsa::invoke(crate::lower::impl_self_ty_query)] #[salsa::transparent] - fn impl_self_ty<'db>( - &'db self, - def: ImplId, - ) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>; + fn impl_self_ty<'db>(&'db self, def: ImplId) -> EarlyBinder<'db, Ty<'db>>; // FIXME: Make this a non-interned query. - #[salsa::invoke_interned(crate::lower_nextsolver::const_param_ty_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower_nextsolver::const_param_ty_with_diagnostics_cycle_result)] - fn const_param_ty_with_diagnostics<'db>( - &'db self, - def: ConstParamId, - ) -> (crate::next_solver::Ty<'db>, Diagnostics); + #[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)] + #[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)] + fn const_param_ty_with_diagnostics<'db>(&'db self, def: ConstParamId) + -> (Ty<'db>, Diagnostics); - // FIXME: Make this a non-interned query. - #[salsa::invoke_interned(crate::lower::const_param_ty_query)] - #[salsa::cycle(cycle_result = crate::lower::const_param_ty_cycle_result)] - fn const_param_ty(&self, def: ConstParamId) -> Ty; + #[salsa::invoke(crate::lower::const_param_ty_query)] + #[salsa::transparent] + fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> Ty<'db>; - #[salsa::invoke(crate::lower_nextsolver::impl_trait_with_diagnostics_query)] + #[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)] fn impl_trait_with_diagnostics<'db>( &'db self, def: ImplId, - ) -> Option<( - crate::next_solver::EarlyBinder<'db, crate::next_solver::TraitRef<'db>>, - Diagnostics, - )>; + ) -> Option<(EarlyBinder<'db, TraitRef<'db>>, Diagnostics)>; - #[salsa::invoke(crate::lower_nextsolver::impl_trait_query)] + #[salsa::invoke(crate::lower::impl_trait_query)] #[salsa::transparent] - fn impl_trait<'db>( - &'db self, - def: ImplId, - ) -> Option>>; + fn impl_trait<'db>(&'db self, def: ImplId) -> Option>>; - #[salsa::invoke(crate::lower_nextsolver::field_types_with_diagnostics_query)] + #[salsa::invoke(crate::lower::field_types_with_diagnostics_query)] fn field_types_with_diagnostics<'db>( &'db self, var: VariantId, - ) -> ( - Arc< - ArenaMap< - LocalFieldId, - crate::next_solver::EarlyBinder<'db, crate::next_solver::Ty<'db>>, - >, - >, - Diagnostics, - ); + ) -> (Arc>>>, Diagnostics); #[salsa::invoke(crate::lower::field_types_query)] #[salsa::transparent] - fn field_types(&self, var: VariantId) -> Arc>>; + fn field_types<'db>( + &'db self, + var: VariantId, + ) -> Arc>>>; - #[salsa::invoke(crate::lower_nextsolver::callable_item_signature_query)] + #[salsa::invoke(crate::lower::callable_item_signature_query)] fn callable_item_signature<'db>( &'db self, def: CallableDefId, - ) -> crate::next_solver::EarlyBinder<'db, crate::next_solver::PolyFnSig<'db>>; + ) -> EarlyBinder<'db, PolyFnSig<'db>>; #[salsa::invoke(crate::lower::return_type_impl_traits)] - fn return_type_impl_traits(&self, def: FunctionId) -> Option>>; + fn return_type_impl_traits<'db>( + &'db self, + def: FunctionId, + ) -> Option>>>; #[salsa::invoke(crate::lower::type_alias_impl_traits)] - fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option>>; - - #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] - #[salsa::cycle(cycle_result = crate::lower::generic_predicates_for_param_cycle_result)] - fn generic_predicates_for_param( - &self, - def: GenericDefId, - param_id: TypeOrConstParamId, - assoc_name: Option, - ) -> GenericPredicates; - - #[salsa::invoke(crate::lower::generic_predicates_query)] - fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates; + fn type_alias_impl_traits<'db>( + &'db self, + def: TypeAliasId, + ) -> Option>>>; - #[salsa::invoke( - crate::lower_nextsolver::generic_predicates_without_parent_with_diagnostics_query - )] + #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)] fn generic_predicates_without_parent_with_diagnostics<'db>( &'db self, def: GenericDefId, - ) -> (crate::lower_nextsolver::GenericPredicates<'db>, Diagnostics); + ) -> (GenericPredicates<'db>, Diagnostics); - #[salsa::invoke(crate::lower_nextsolver::generic_predicates_without_parent_query)] + #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)] #[salsa::transparent] fn generic_predicates_without_parent<'db>( &'db self, def: GenericDefId, - ) -> crate::lower_nextsolver::GenericPredicates<'db>; + ) -> GenericPredicates<'db>; + + #[salsa::invoke(crate::lower::generic_predicates_for_param_query)] + #[salsa::cycle(cycle_result = crate::lower::generic_predicates_for_param_cycle_result)] + fn generic_predicates_for_param<'db>( + &'db self, + def: GenericDefId, + param_id: TypeOrConstParamId, + assoc_name: Option, + ) -> GenericPredicates<'db>; + + #[salsa::invoke(crate::lower::generic_predicates_query)] + fn generic_predicates<'db>(&'db self, def: GenericDefId) -> GenericPredicates<'db>; - #[salsa::invoke(crate::lower_nextsolver::trait_environment_for_body_query)] + #[salsa::invoke(crate::lower::trait_environment_for_body_query)] #[salsa::transparent] fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId) -> Arc>; - #[salsa::invoke(crate::lower_nextsolver::trait_environment_query)] + #[salsa::invoke(crate::lower::trait_environment_query)] fn trait_environment<'db>(&'db self, def: GenericDefId) -> Arc>; #[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)] #[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)] - fn generic_defaults_with_diagnostics( - &self, + fn generic_defaults_with_diagnostics<'db>( + &'db self, def: GenericDefId, - ) -> (GenericDefaults, Diagnostics); + ) -> (GenericDefaults<'db>, Diagnostics); /// This returns an empty list if no parameter has default. /// /// The binders of the returned defaults are only up to (not including) this parameter. #[salsa::invoke(crate::lower::generic_defaults_query)] #[salsa::transparent] - fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults; + fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>; #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] fn inherent_impls_in_crate(&self, krate: Crate) -> Arc; @@ -297,7 +275,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)] fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc]>; - // Interned IDs for Chalk integration + // Interned IDs for solver integration #[salsa::interned] fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId; @@ -313,66 +291,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { // cycle_initial = crate::variance::variances_of_cycle_initial, cycle_result = crate::variance::variances_of_cycle_initial, )] - fn variances_of(&self, def: GenericDefId) -> crate::next_solver::VariancesOf<'_>; - - // next trait solver - - #[salsa::invoke(crate::lower_nextsolver::const_param_ty_query)] - #[salsa::transparent] - fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> crate::next_solver::Ty<'db>; - - #[salsa::invoke(crate::lower_nextsolver::field_types_query)] - #[salsa::transparent] - fn field_types_ns<'db>( - &'db self, - var: VariantId, - ) -> Arc< - ArenaMap>>, - >; - - #[salsa::invoke(crate::lower_nextsolver::return_type_impl_traits)] - fn return_type_impl_traits_ns<'db>( - &'db self, - def: FunctionId, - ) -> Option>>>; - - #[salsa::invoke(crate::lower_nextsolver::type_alias_impl_traits)] - fn type_alias_impl_traits_ns<'db>( - &'db self, - def: TypeAliasId, - ) -> Option>>>; - - #[salsa::invoke(crate::lower_nextsolver::generic_predicates_for_param_query)] - #[salsa::cycle(cycle_result = crate::lower_nextsolver::generic_predicates_for_param_cycle_result)] - fn generic_predicates_for_param_ns<'db>( - &'db self, - def: GenericDefId, - param_id: TypeOrConstParamId, - assoc_name: Option, - ) -> crate::lower_nextsolver::GenericPredicates<'db>; - - #[salsa::invoke(crate::lower_nextsolver::generic_predicates_query)] - fn generic_predicates_ns<'db>( - &'db self, - def: GenericDefId, - ) -> crate::lower_nextsolver::GenericPredicates<'db>; - - #[salsa::invoke(crate::lower_nextsolver::generic_defaults_with_diagnostics_query)] - #[salsa::cycle(cycle_result = crate::lower_nextsolver::generic_defaults_with_diagnostics_cycle_result)] - fn generic_defaults_ns_with_diagnostics<'db>( - &'db self, - def: GenericDefId, - ) -> (crate::lower_nextsolver::GenericDefaults<'db>, Diagnostics); - - /// This returns an empty list if no parameter has default. - /// - /// The binders of the returned defaults are only up to (not including) this parameter. - #[salsa::invoke(crate::lower_nextsolver::generic_defaults_query)] - #[salsa::transparent] - fn generic_defaults_ns<'db>( - &'db self, - def: GenericDefId, - ) -> crate::lower_nextsolver::GenericDefaults<'db>; + fn variances_of(&self, def: GenericDefId) -> VariancesOf<'_>; } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index f0efadeafcea7..fb942e336e659 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -150,7 +150,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> { ) -> impl Iterator)> { let (_, substs) = ty.as_adt().unwrap(); - let field_tys = self.db.field_types_ns(variant); + let field_tys = self.db.field_types(variant); let fields_len = variant.fields(self.db).fields().len() as u32; (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 2c6cbdd03f13c..2b92408f0f6b8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -626,7 +626,7 @@ fn write_projection<'db>( // FIXME: We shouldn't use `param.id`, it should be removed. We should know the // `GenericDefId` from the formatted type (store it inside the `HirFormatter`). let bounds = - f.db.generic_predicates_ns(param.id.parent()) + f.db.generic_predicates(param.id.parent()) .instantiate_identity() .into_iter() .flatten() @@ -902,7 +902,7 @@ fn render_const_scalar_inner<'db>( hir_def::AdtId::StructId(s) => { let data = f.db.struct_signature(s); write!(f, "{}", data.name.display(f.db, f.edition()))?; - let field_types = f.db.field_types_ns(s.into()); + let field_types = f.db.field_types(s.into()); render_variant_after_name( s.fields(f.db), f, @@ -934,7 +934,7 @@ fn render_const_scalar_inner<'db>( .1 .display(f.db, f.edition()) )?; - let field_types = f.db.field_types_ns(var_id.into()); + let field_types = f.db.field_types(var_id.into()); render_variant_after_name( var_id.fields(f.db), f, @@ -1121,7 +1121,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> { let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id { let datas = db - .return_type_impl_traits_ns(func) + .return_type_impl_traits(func) .expect("impl trait id without data"); let data = (*datas).as_ref().map_bound(|rpit| { &rpit.impl_traits[idx.to_nextsolver(interner)].predicates @@ -1353,9 +1353,8 @@ impl<'db> HirDisplay<'db> for Ty<'db> { let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id); match impl_trait_id { ImplTraitId::ReturnTypeImplTrait(func, idx) => { - let datas = db - .return_type_impl_traits_ns(func) - .expect("impl trait id without data"); + let datas = + db.return_type_impl_traits(func).expect("impl trait id without data"); let data = (*datas).as_ref().map_bound(|rpit| { &rpit.impl_traits[idx.to_nextsolver(interner)].predicates }); @@ -1373,9 +1372,8 @@ impl<'db> HirDisplay<'db> for Ty<'db> { // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution } ImplTraitId::TypeAliasImplTrait(alias, idx) => { - let datas = db - .type_alias_impl_traits_ns(alias) - .expect("impl trait id without data"); + let datas = + db.type_alias_impl_traits(alias).expect("impl trait id without data"); let data = (*datas).as_ref().map_bound(|rpit| { &rpit.impl_traits[idx.to_nextsolver(interner)].predicates }); @@ -1501,7 +1499,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> { } TypeParamProvenance::ArgumentImplTrait => { let bounds = db - .generic_predicates_ns(param.id.parent()) + .generic_predicates(param.id.parent()) .instantiate_identity() .into_iter() .flatten() @@ -1621,7 +1619,7 @@ fn generic_args_sans_defaults<'ga, 'db>( parameters: &'ga [GenericArg<'db>], ) -> &'ga [GenericArg<'db>] { if f.display_kind.is_source_code() || f.omit_verbose_types() { - match generic_def.map(|generic_def_id| f.db.generic_defaults_ns(generic_def_id)) { + match generic_def.map(|generic_def_id| f.db.generic_defaults(generic_def_id)) { None => parameters, Some(default_parameters) => { let should_show = |arg: GenericArg<'db>, i: usize| match default_parameters.get(i) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs index aaf274799c63f..b09d1fb196c4c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs @@ -85,7 +85,7 @@ fn has_drop_glue_impl<'db>( { return DropGlue::None; } - db.field_types_ns(id.into()) + db.field_types(id.into()) .iter() .map(|(_, field_ty)| { has_drop_glue_impl( @@ -105,7 +105,7 @@ fn has_drop_glue_impl<'db>( .variants .iter() .map(|&(variant, _, _)| { - db.field_types_ns(variant.into()) + db.field_types(variant.into()) .iter() .map(|(_, field_ty)| { has_drop_glue_impl( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index e35a798703294..437141e41db92 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -18,10 +18,10 @@ use smallvec::SmallVec; use crate::{ ImplTraitId, db::{HirDatabase, InternedOpaqueTyId}, - lower_nextsolver::associated_ty_item_bounds, + lower::associated_ty_item_bounds, next_solver::{ - Clause, Clauses, DbInterner, GenericArgs, ParamEnv, SolverDefId, TraitPredicate, TraitRef, - TypingMode, infer::DbInternerInferExt, mk_param, + Binder, Clause, Clauses, DbInterner, EarlyBinder, GenericArgs, Goal, ParamEnv, ParamTy, + SolverDefId, TraitPredicate, TraitRef, Ty, TypingMode, infer::DbInternerInferExt, mk_param, }, traits::next_trait_solve_in_ctxt, }; @@ -136,7 +136,7 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b }; let interner = DbInterner::new_with(db, Some(krate), None); - let predicates = db.generic_predicates_ns(def); + let predicates = db.generic_predicates(def); // FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to // rust-analyzer yet // https://github.com/rust-lang/rust/blob/ddaf12390d3ffb7d5ba74491a48f3cd528e5d777/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L490 @@ -162,7 +162,7 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b // but we don't have good way to render such locations. // So, just return single boolean value for existence of such `Self` reference fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool { - db.generic_predicates_ns(trait_.into()) + db.generic_predicates(trait_.into()) .iter() .any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No)) } @@ -378,7 +378,7 @@ where }) = pred && let trait_data = db.trait_signature(pred_trait_ref.def_id.0) && trait_data.flags.contains(TraitFlags::AUTO) - && let rustc_type_ir::TyKind::Param(crate::next_solver::ParamTy { index: 0, .. }) = + && let rustc_type_ir::TyKind::Param(ParamTy { index: 0, .. }) = pred_trait_ref.self_ty().kind() { continue; @@ -397,10 +397,7 @@ fn receiver_is_dispatchable<'db>( db: &dyn HirDatabase, trait_: TraitId, func: FunctionId, - sig: &crate::next_solver::EarlyBinder< - 'db, - crate::next_solver::Binder<'db, rustc_type_ir::FnSig>>, - >, + sig: &EarlyBinder<'db, Binder<'db, rustc_type_ir::FnSig>>>, ) -> bool { let sig = sig.instantiate_identity(); @@ -409,10 +406,8 @@ fn receiver_is_dispatchable<'db>( parent: trait_.into(), local_id: LocalTypeOrConstParamId::from_raw(la_arena::RawIdx::from_u32(0)), }); - let self_param_ty = crate::next_solver::Ty::new( - interner, - rustc_type_ir::TyKind::Param(crate::next_solver::ParamTy { index: 0, id: self_param_id }), - ); + let self_param_ty = + Ty::new(interner, rustc_type_ir::TyKind::Param(ParamTy { index: 0, id: self_param_id })); // `self: Self` can't be dispatched on, but this is already considered dyn-compatible // See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437 @@ -440,12 +435,12 @@ fn receiver_is_dispatchable<'db>( // Type `U` // FIXME: That seems problematic to fake a generic param like that? - let unsized_self_ty = crate::next_solver::Ty::new_param(interner, self_param_id, u32::MAX); + let unsized_self_ty = Ty::new_param(interner, self_param_id, u32::MAX); // `Receiver[Self => U]` let unsized_receiver_ty = receiver_for_self_ty(interner, func, receiver_ty, unsized_self_ty); let param_env = { - let generic_predicates = &*db.generic_predicates_ns(func.into()); + let generic_predicates = &*db.generic_predicates(func.into()); // Self: Unsize let unsize_predicate = @@ -475,7 +470,7 @@ fn receiver_is_dispatchable<'db>( // Receiver: DispatchFromDyn U]> let predicate = TraitRef::new(interner, dispatch_from_dyn_did.into(), [receiver_ty, unsized_receiver_ty]); - let goal = crate::next_solver::Goal::new(interner, param_env, predicate); + let goal = Goal::new(interner, param_env, predicate); let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis()); // the receiver is dispatchable iff the obligation holds @@ -486,26 +481,19 @@ fn receiver_is_dispatchable<'db>( fn receiver_for_self_ty<'db>( interner: DbInterner<'db>, func: FunctionId, - receiver_ty: crate::next_solver::Ty<'db>, - self_ty: crate::next_solver::Ty<'db>, -) -> crate::next_solver::Ty<'db> { - let args = crate::next_solver::GenericArgs::for_item( - interner, - SolverDefId::FunctionId(func), - |index, kind, _| { - if index == 0 { self_ty.into() } else { mk_param(interner, index, kind) } - }, - ); + receiver_ty: Ty<'db>, + self_ty: Ty<'db>, +) -> Ty<'db> { + let args = GenericArgs::for_item(interner, SolverDefId::FunctionId(func), |index, kind, _| { + if index == 0 { self_ty.into() } else { mk_param(interner, index, kind) } + }); - crate::next_solver::EarlyBinder::bind(receiver_ty).instantiate(interner, args) + EarlyBinder::bind(receiver_ty).instantiate(interner, args) } fn contains_illegal_impl_trait_in_trait<'db>( db: &'db dyn HirDatabase, - sig: &crate::next_solver::EarlyBinder< - 'db, - crate::next_solver::Binder<'db, rustc_type_ir::FnSig>>, - >, + sig: &EarlyBinder<'db, Binder<'db, rustc_type_ir::FnSig>>>, ) -> Option { struct OpaqueTypeCollector(FxHashSet); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index 3ca5f0dcb2476..26e03aa01a1d2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -9,7 +9,6 @@ //! where parent follows the same scheme. use std::ops; -use chalk_ir::{BoundVar, DebruijnIndex, cast::Cast as _}; use hir_def::{ ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId, Lookup, TypeOrConstParamId, TypeParamId, @@ -23,8 +22,6 @@ use hir_def::{ use itertools::chain; use triomphe::Arc; -use crate::{Interner, Substitution, db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx}; - pub fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); let (params, store) = db.generic_params_and_store(def); @@ -230,50 +227,6 @@ impl Generics { pub(crate) fn parent_generics(&self) -> Option<&Generics> { self.parent_generics.as_deref() } - - pub(crate) fn parent_or_self(&self) -> &Generics { - self.parent_generics.as_deref().unwrap_or(self) - } - - /// Returns a Substitution that replaces each parameter by a bound variable. - pub(crate) fn bound_vars_subst( - &self, - db: &dyn HirDatabase, - debruijn: DebruijnIndex, - ) -> Substitution { - Substitution::from_iter( - Interner, - self.iter_id().enumerate().map(|(idx, id)| match id { - GenericParamId::ConstParamId(id) => BoundVar::new(debruijn, idx) - .to_const(Interner, db.const_param_ty(id)) - .cast(Interner), - GenericParamId::TypeParamId(_) => { - BoundVar::new(debruijn, idx).to_ty(Interner).cast(Interner) - } - GenericParamId::LifetimeParamId(_) => { - BoundVar::new(debruijn, idx).to_lifetime(Interner).cast(Interner) - } - }), - ) - } - - /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`). - pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution { - Substitution::from_iter( - Interner, - self.iter_id().enumerate().map(|(index, id)| match id { - GenericParamId::TypeParamId(id) => { - to_placeholder_idx(db, id.into(), index as u32).to_ty(Interner).cast(Interner) - } - GenericParamId::ConstParamId(id) => to_placeholder_idx(db, id.into(), index as u32) - .to_const(Interner, db.const_param_ty(id)) - .cast(Interner), - GenericParamId::LifetimeParamId(id) => { - lt_to_placeholder_idx(db, id, index as u32).to_lifetime(Interner).cast(Interner) - } - }), - ) - } } pub(crate) fn trait_self_param_idx(db: &dyn DefDatabase, def: GenericDefId) -> Option { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index b2dd90a3d0df0..9891f3f248bd8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -63,8 +63,9 @@ use crate::{ expr::ExprIsRead, unify::InferenceTable, }, - lower::diagnostics::TyLoweringDiagnostic, - lower_nextsolver::{ImplTraitIdx, ImplTraitLoweringMode, LifetimeElisionKind}, + lower::{ + ImplTraitIdx, ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic, + }, mir::MirSpan, next_solver::{ AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind, @@ -1159,7 +1160,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { }, ); let return_ty = self.insert_type_vars(return_ty); - if let Some(rpits) = self.db.return_type_impl_traits_ns(func) { + if let Some(rpits) = self.db.return_type_impl_traits(func) { let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default()); let result = self.insert_inference_vars_for_impl_trait(return_ty, &mut mode); if let ImplTraitReplacingMode::ReturnPosition(taits) = mode { @@ -1234,7 +1235,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } return ty; } - (self.db.return_type_impl_traits_ns(def), idx) + (self.db.return_type_impl_traits(def), idx) } ImplTraitId::TypeAliasImplTrait(def, idx) => { if let ImplTraitReplacingMode::ReturnPosition(taits) = mode { @@ -1243,7 +1244,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { taits.insert(ty); return ty; } - (self.db.type_alias_impl_traits_ns(def), idx) + (self.db.type_alias_impl_traits(def), idx) } _ => unreachable!(), }; @@ -1604,8 +1605,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { match ty.kind() { TyKind::Adt(adt_def, substs) => match adt_def.def_id().0 { AdtId::StructId(struct_id) => { - match self.db.field_types_ns(struct_id.into()).values().next_back().copied() - { + match self.db.field_types(struct_id.into()).values().next_back().copied() { Some(field) => { ty = field.instantiate(self.interner(), substs); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs index 990281a7c8965..c128977d7b085 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs @@ -394,7 +394,7 @@ fn pointer_kind<'db>( let struct_data = id.fields(ctx.db); if let Some((last_field, _)) = struct_data.fields().iter().last() { let last_field_ty = - ctx.db.field_types_ns(id.into())[last_field].instantiate(ctx.interner(), subst); + ctx.db.field_types(id.into())[last_field].instantiate(ctx.interner(), subst); pointer_kind(last_field_ty, ctx) } else { Ok(Some(PointerKind::Thin)) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs index 39e70c262a24a..844eb02ab0d43 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/diagnostics.rs @@ -15,8 +15,8 @@ use la_arena::{Idx, RawIdx}; use crate::{ InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringDiagnostic, db::HirDatabase, - lower_nextsolver::path::{PathDiagnosticCallback, PathLoweringContext}, - lower_nextsolver::{LifetimeElisionKind, TyLoweringContext}, + lower::path::{PathDiagnosticCallback, PathLoweringContext}, + lower::{LifetimeElisionKind, TyLoweringContext}, }; // Unfortunately, this struct needs to use interior mutability (but we encapsulate it) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index e1964608a3f06..efb7244ff6375 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -37,7 +37,7 @@ use crate::{ pat::contains_explicit_ref_binding, }, lang_items::lang_items_for_bin_op, - lower_nextsolver::{ + lower::{ LifetimeElisionKind, lower_mutability, path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings}, }, @@ -564,7 +564,7 @@ impl<'db> InferenceContext<'_, 'db> { match def_id { _ if fields.is_empty() => {} Some(def) => { - let field_types = self.db.field_types_ns(def); + let field_types = self.db.field_types(def); let variant_data = def.fields(self.db); let visibilities = self.db.field_visibilities(def); for field in fields.iter() { @@ -1622,7 +1622,7 @@ impl<'db> InferenceContext<'_, 'db> { } return None; } - let ty = self.db.field_types_ns(field_id.parent)[field_id.local_id] + let ty = self.db.field_types(field_id.parent)[field_id.local_id] .instantiate(interner, parameters); Some((Either::Left(field_id), ty)) }); @@ -1637,7 +1637,7 @@ impl<'db> InferenceContext<'_, 'db> { None => { let (field_id, subst) = private_field?; let adjustments = autoderef.adjust_steps(); - let ty = self.db.field_types_ns(field_id.parent)[field_id.local_id] + let ty = self.db.field_types(field_id.parent)[field_id.local_id] .instantiate(self.interner(), subst); let ty = self.process_remote_user_written_ty(ty); @@ -2320,7 +2320,7 @@ impl<'db> InferenceContext<'_, 'db> { let callable_ty = self.table.try_structurally_resolve_type(callable_ty); if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind() { let generic_predicates = - self.db.generic_predicates_ns(GenericDefId::from_callable(self.db, fn_def.0)); + self.db.generic_predicates(GenericDefId::from_callable(self.db, fn_def.0)); if let Some(predicates) = generic_predicates.instantiate(self.interner(), parameters) { let interner = self.interner(); let param_env = self.table.trait_env.env; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 9edbc9dda0f10..71a9c94bf5e57 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -18,7 +18,7 @@ use crate::next_solver::{GenericArgs, TraitRef}; use crate::{ Adjust, Adjustment, AutoBorrow, OverloadedDeref, infer::{Expectation, InferenceContext, expr::ExprIsRead}, - lower_nextsolver::lower_mutability, + lower::lower_mutability, next_solver::TyKind, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 61255d31d2810..8019844b5df36 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -19,7 +19,7 @@ use crate::{ AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch, coerce::CoerceNever, expr::ExprIsRead, }, - lower_nextsolver::lower_mutability, + lower::lower_mutability, next_solver::{GenericArgs, Ty, TyKind}, }; @@ -59,7 +59,7 @@ impl<'db> InferenceContext<'_, 'db> { match def { _ if subs.is_empty() => {} Some(def) => { - let field_types = self.db.field_types_ns(def); + let field_types = self.db.field_types(def); let variant_data = def.fields(self.db); let visibilities = self.db.field_visibilities(def); @@ -128,7 +128,7 @@ impl<'db> InferenceContext<'_, 'db> { match def { _ if subs.len() == 0 => {} Some(def) => { - let field_types = self.db.field_types_ns(def); + let field_types = self.db.field_types(def); let variant_data = def.fields(self.db); let visibilities = self.db.field_visibilities(def); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index 84d17db6c663a..2dae7cb04ffa1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -13,7 +13,7 @@ use crate::{ InferenceDiagnostic, ValueTyDefId, generics::generics, infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext, - lower_nextsolver::LifetimeElisionKind, + lower::LifetimeElisionKind, method_resolution::{self, VisibleFromModule}, next_solver::{ GenericArg, GenericArgs, TraitRef, Ty, @@ -221,7 +221,7 @@ impl<'db> InferenceContext<'_, 'db> { def: GenericDefId, subst: GenericArgs<'db>, ) { - let predicates = self.db.generic_predicates_ns(def); + let predicates = self.db.generic_predicates(def); let interner = self.interner(); let param_env = self.table.trait_env.env; if let Some(predicates) = predicates.instantiate(self.interner(), subst) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index beb26f7d68908..a18cdda559d0f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -800,7 +800,7 @@ impl<'db> InferenceTable<'db> { while let Some((AdtId::StructId(id), subst)) = ty.as_adt() { let struct_data = id.fields(self.db); if let Some((last_field, _)) = struct_data.fields().iter().next_back() { - let last_field_ty = self.db.field_types_ns(id.into())[last_field] + let last_field_ty = self.db.field_types(id.into())[last_field] .instantiate(self.interner(), subst); if structs.contains(&ty) { // A struct recursively contains itself as a tail field somewhere. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs index 7ebc2df6f75dd..8aed2608d6cdb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs @@ -156,7 +156,7 @@ impl<'a, 'db> UninhabitedFrom<'a, 'db> { } let is_enum = matches!(variant, VariantId::EnumVariantId(..)); - let field_tys = self.db().field_types_ns(variant); + let field_tys = self.db().field_types(variant); let field_vis = if is_enum { None } else { Some(self.db().field_visibilities(variant)) }; for (fid, _) in fields.iter() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs deleted file mode 100644 index 57ef5523b4332..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs +++ /dev/null @@ -1,403 +0,0 @@ -//! Implementation of the Chalk `Interner` trait, which allows customizing the -//! representation of the various objects Chalk deals with (types, goals etc.). - -use crate::{ - AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData, ConstScalar, FnAbi, - FnDefId, GenericArg, GenericArgData, Goal, GoalData, InEnvironment, Lifetime, LifetimeData, - OpaqueTy, OpaqueTyId, ProgramClause, ProjectionTy, QuantifiedWhereClause, - QuantifiedWhereClauses, Substitution, Ty, TyKind, VariableKind, chalk_db, tls, -}; -use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance}; -use hir_def::TypeAliasId; -use intern::{Interned, impl_internable}; -use smallvec::SmallVec; -use std::fmt; -use triomphe::Arc; - -type TyData = chalk_ir::TyData; -type VariableKinds = chalk_ir::VariableKinds; -type Goals = chalk_ir::Goals; -type ProgramClauseData = chalk_ir::ProgramClauseData; -type Constraint = chalk_ir::Constraint; -type Constraints = chalk_ir::Constraints; -type ProgramClauses = chalk_ir::ProgramClauses; - -#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] -pub struct Interner; - -#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] -pub struct InternedWrapper(pub(crate) T); - -impl fmt::Debug for InternedWrapper { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&self.0, f) - } -} - -#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] -pub struct InternedWrapperNoDebug(pub(crate) T); - -impl std::ops::Deref for InternedWrapper { - type Target = T; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl_internable!( - InternedWrapper>, - InternedWrapper>, - InternedWrapper, - InternedWrapper, - InternedWrapper, - InternedWrapper, - InternedWrapper>, - InternedWrapper>, - InternedWrapper>, - InternedWrapper>, -); - -impl chalk_ir::interner::Interner for Interner { - type InternedType = Interned>; - type InternedLifetime = Interned>; - type InternedConst = Interned>; - type InternedConcreteConst = ConstScalar; - type InternedGenericArg = GenericArgData; - // We could do the following, but that saves "only" 20mb on self while increasing inference - // time by ~2.5% - // type InternedGoal = Interned>; - type InternedGoal = Arc; - type InternedGoals = Vec; - type InternedSubstitution = Interned>>; - type InternedProgramClauses = Interned>>; - type InternedProgramClause = ProgramClauseData; - type InternedQuantifiedWhereClauses = Interned>>; - type InternedVariableKinds = Interned>>; - type InternedCanonicalVarKinds = Interned>>; - type InternedConstraints = Vec>; - type InternedVariances = SmallVec<[Variance; 16]>; - type DefId = salsa::Id; - type InternedAdtId = hir_def::AdtId; - type Identifier = TypeAliasId; - type FnAbi = FnAbi; - - fn debug_adt_id( - type_kind_id: chalk_db::AdtId, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt))) - } - - fn debug_trait_id( - type_kind_id: chalk_db::TraitId, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt))) - } - - fn debug_assoc_type_id( - id: chalk_db::AssocTypeId, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt))) - } - - fn debug_opaque_ty_id( - opaque_ty_id: OpaqueTyId, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "OpaqueTy#{:?}", opaque_ty_id.0)) - } - - fn debug_fn_def_id(fn_def_id: FnDefId, fmt: &mut fmt::Formatter<'_>) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt))) - } - - fn debug_closure_id( - _fn_def_id: ClosureId, - _fmt: &mut fmt::Formatter<'_>, - ) -> Option { - None - } - - fn debug_alias(alias: &AliasTy, fmt: &mut fmt::Formatter<'_>) -> Option { - use std::fmt::Debug; - match alias { - AliasTy::Projection(projection_ty) => Interner::debug_projection_ty(projection_ty, fmt), - AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)), - } - } - - fn debug_projection_ty( - proj: &ProjectionTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt))) - .or_else(|| Some(fmt.write_str("ProjectionTy"))) - } - - fn debug_opaque_ty(opaque_ty: &OpaqueTy, fmt: &mut fmt::Formatter<'_>) -> Option { - Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id)) - } - - fn debug_ty(ty: &Ty, fmt: &mut fmt::Formatter<'_>) -> Option { - Some(write!(fmt, "{:?}", ty.data(Interner))) - } - - fn debug_lifetime(lifetime: &Lifetime, fmt: &mut fmt::Formatter<'_>) -> Option { - Some(write!(fmt, "{:?}", lifetime.data(Interner))) - } - - fn debug_const(constant: &Const, fmt: &mut fmt::Formatter<'_>) -> Option { - Some(write!(fmt, "{:?}", constant.data(Interner))) - } - - fn debug_generic_arg( - parameter: &GenericArg, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", parameter.data(Interner).inner_debug())) - } - - fn debug_variable_kinds( - variable_kinds: &VariableKinds, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner))) - } - - fn debug_variable_kinds_with_angles( - variable_kinds: &VariableKinds, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner))) - } - - fn debug_canonical_var_kinds( - canonical_var_kinds: &CanonicalVarKinds, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner))) - } - fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option { - let goal_data = goal.data(Interner); - Some(write!(fmt, "{goal_data:?}")) - } - fn debug_goals(goals: &Goals, fmt: &mut fmt::Formatter<'_>) -> Option { - Some(write!(fmt, "{:?}", goals.debug(Interner))) - } - fn debug_program_clause_implication( - pci: &ProgramClauseImplication, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", pci.debug(Interner))) - } - fn debug_program_clause( - clause: &ProgramClause, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", clause.data(Interner))) - } - fn debug_program_clauses( - clauses: &ProgramClauses, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", clauses.as_slice(Interner))) - } - fn debug_substitution( - substitution: &Substitution, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", substitution.debug(Interner))) - } - fn debug_separator_trait_ref( - separator_trait_ref: &SeparatorTraitRef<'_, Interner>, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner))) - } - - fn debug_quantified_where_clauses( - clauses: &QuantifiedWhereClauses, - fmt: &mut fmt::Formatter<'_>, - ) -> Option { - Some(write!(fmt, "{:?}", clauses.as_slice(Interner))) - } - - fn debug_constraints( - _clauses: &Constraints, - _fmt: &mut fmt::Formatter<'_>, - ) -> Option { - None - } - - fn intern_ty(self, kind: TyKind) -> Self::InternedType { - let flags = kind.compute_flags(self); - Interned::new(InternedWrapper(TyData { kind, flags })) - } - - fn ty_data(self, ty: &Self::InternedType) -> &TyData { - &ty.0 - } - - fn intern_lifetime(self, lifetime: LifetimeData) -> Self::InternedLifetime { - Interned::new(InternedWrapper(lifetime)) - } - - fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &LifetimeData { - &lifetime.0 - } - - fn intern_const(self, constant: ConstData) -> Self::InternedConst { - Interned::new(InternedWrapper(constant)) - } - - fn const_data(self, constant: &Self::InternedConst) -> &ConstData { - &constant.0 - } - - fn const_eq( - self, - _ty: &Self::InternedType, - c1: &Self::InternedConcreteConst, - c2: &Self::InternedConcreteConst, - ) -> bool { - !matches!(c1, ConstScalar::Bytes(..)) || !matches!(c2, ConstScalar::Bytes(..)) || (c1 == c2) - } - - fn intern_generic_arg(self, parameter: GenericArgData) -> Self::InternedGenericArg { - parameter - } - - fn generic_arg_data(self, parameter: &Self::InternedGenericArg) -> &GenericArgData { - parameter - } - - fn intern_goal(self, goal: GoalData) -> Self::InternedGoal { - Arc::new(goal) - } - - fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData { - goal - } - - fn intern_goals( - self, - data: impl IntoIterator>, - ) -> Result { - // let hash = - // std::hash::BuildHasher::hash_one(&BuildHasherDefault::::default(), &goal); - // Interned::new(InternedWrapper(PreHashedWrapper(goal, hash))) - data.into_iter().collect() - } - - fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal] { - goals - } - - fn intern_substitution( - self, - data: impl IntoIterator>, - ) -> Result { - Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) - } - - fn substitution_data(self, substitution: &Self::InternedSubstitution) -> &[GenericArg] { - &substitution.as_ref().0 - } - - fn intern_program_clause(self, data: ProgramClauseData) -> Self::InternedProgramClause { - data - } - - fn program_clause_data(self, clause: &Self::InternedProgramClause) -> &ProgramClauseData { - clause - } - - fn intern_program_clauses( - self, - data: impl IntoIterator>, - ) -> Result { - Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) - } - - fn program_clauses_data(self, clauses: &Self::InternedProgramClauses) -> &[ProgramClause] { - clauses - } - - fn intern_quantified_where_clauses( - self, - data: impl IntoIterator>, - ) -> Result { - Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) - } - - fn quantified_where_clauses_data( - self, - clauses: &Self::InternedQuantifiedWhereClauses, - ) -> &[QuantifiedWhereClause] { - clauses - } - - fn intern_generic_arg_kinds( - self, - data: impl IntoIterator>, - ) -> Result { - Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) - } - - fn variable_kinds_data(self, parameter_kinds: &Self::InternedVariableKinds) -> &[VariableKind] { - ¶meter_kinds.as_ref().0 - } - - fn intern_canonical_var_kinds( - self, - data: impl IntoIterator>, - ) -> Result { - Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) - } - - fn canonical_var_kinds_data( - self, - canonical_var_kinds: &Self::InternedCanonicalVarKinds, - ) -> &[CanonicalVarKind] { - canonical_var_kinds - } - fn intern_constraints( - self, - data: impl IntoIterator, E>>, - ) -> Result { - data.into_iter().collect() - } - fn constraints_data( - self, - constraints: &Self::InternedConstraints, - ) -> &[InEnvironment] { - constraints - } - - fn intern_variances( - self, - data: impl IntoIterator>, - ) -> Result { - data.into_iter().collect::>() - } - - fn variances_data(self, variances: &Self::InternedVariances) -> &[Variance] { - variances - } -} - -impl chalk_ir::interner::HasInterner for Interner { - type Interner = Self; -} - -#[macro_export] -macro_rules! has_interner { - ($t:ty) => { - impl HasInterner for $t { - type Interner = $crate::Interner; - } - }; -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index a857602fa08a8..fc0b9d30b3333 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -138,7 +138,7 @@ fn layout_of_simd_ty<'db>( // * #[repr(simd)] struct S([T; 4]) // // where T is a primitive scalar (integer/float/pointer). - let fields = db.field_types_ns(id.into()); + let fields = db.field_types(id.into()); let mut fields = fields.iter(); let Some(TyKind::Array(e_ty, e_len)) = fields .next() @@ -401,7 +401,7 @@ fn field_ty<'a>( fd: LocalFieldId, args: &GenericArgs<'a>, ) -> Ty<'a> { - db.field_types_ns(def)[fd].instantiate(DbInterner::new_with(db, None, None), args) + db.field_types(def)[fd].instantiate(DbInterner::new_with(db, None, None), args) } fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index b698fd9a14541..536c81ab03b2c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -23,23 +23,15 @@ extern crate ra_ap_rustc_next_trait_solver as rustc_next_trait_solver; extern crate self as hir_ty; -mod builder; -mod chalk_db; -mod chalk_ext; mod infer; mod inhabitedness; -mod interner; mod lower; -mod lower_nextsolver; -mod mapping; pub mod next_solver; mod target_feature; -mod tls; mod utils; pub mod autoderef; pub mod consteval; -mod consteval_chalk; pub mod db; pub mod diagnostics; pub mod display; @@ -61,16 +53,11 @@ mod variance; use std::hash::Hash; -use chalk_ir::{ - VariableKinds, - fold::{Shift, TypeFoldable}, - interner::HasInterner, -}; -use hir_def::{CallableDefId, GeneralConstId, TypeOrConstParamId, hir::ExprId, type_ref::Rawness}; +use hir_def::{CallableDefId, TypeOrConstParamId, hir::ExprId, type_ref::Rawness}; use hir_expand::name::Name; use indexmap::{IndexMap, map::Entry}; use intern::{Symbol, sym}; -use la_arena::{Arena, Idx}; +use la_arena::Idx; use mir::{MirEvalError, VTableMap}; use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; use rustc_type_ir::{ @@ -82,13 +69,14 @@ use traits::FnTrait; use triomphe::Arc; use crate::{ - builder::TyBuilder, - chalk_ext::*, db::HirDatabase, display::{DisplayTarget, HirDisplay}, - generics::Generics, infer::unify::InferenceTable, - next_solver::DbInterner, + next_solver::{ + AliasTy, Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, Canonical, + CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, FnSig, PolyFnSig, Predicate, + Region, RegionKind, TraitRef, Ty, TyKind, Tys, abi, + }, }; pub use autoderef::autoderef; @@ -99,15 +87,9 @@ pub use infer::{ closure::analysis::{CaptureKind, CapturedItem}, could_coerce, could_unify, could_unify_deeply, }; -pub use interner::Interner; -pub use lower::{ImplTraitLoweringMode, ParamLoweringMode, TyDefId, ValueTyDefId, diagnostics::*}; -pub use lower_nextsolver::{ - LifetimeElisionKind, TyLoweringContext, associated_type_shorthand_candidates, -}; -pub use mapping::{ - ToChalk, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, - lt_from_placeholder_idx, lt_to_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, - to_foreign_def_id, to_placeholder_idx, to_placeholder_idx_no_index, +pub use lower::{ + LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId, + associated_type_shorthand_candidates, diagnostics::*, }; pub use method_resolution::check_orphan_rules; pub use next_solver::interner::{attach_db, attach_db_allow_change, with_attached_db}; @@ -118,76 +100,6 @@ pub use utils::{ is_fn_unsafe_to_call, target_feature_is_safe_in_target, }; -use chalk_ir::{BoundVar, DebruijnIndex, Safety, Scalar}; - -pub(crate) type ForeignDefId = chalk_ir::ForeignDefId; -pub(crate) type AssocTypeId = chalk_ir::AssocTypeId; -pub(crate) type FnDefId = chalk_ir::FnDefId; -pub(crate) type ClosureId = chalk_ir::ClosureId; -pub(crate) type OpaqueTyId = chalk_ir::OpaqueTyId; -pub(crate) type PlaceholderIndex = chalk_ir::PlaceholderIndex; - -pub(crate) type CanonicalVarKinds = chalk_ir::CanonicalVarKinds; - -pub(crate) type VariableKind = chalk_ir::VariableKind; -/// Represents generic parameters and an item bound by them. When the item has parent, the binders -/// also contain the generic parameters for its parent. See chalk's documentation for details. -/// -/// One thing to keep in mind when working with `Binders` (and `Substitution`s, which represent -/// generic arguments) in rust-analyzer is that the ordering within *is* significant - the generic -/// parameters/arguments for an item MUST come before those for its parent. This is to facilitate -/// the integration with chalk-solve, which mildly puts constraints as such. See #13335 for its -/// motivation in detail. -pub(crate) type Binders = chalk_ir::Binders; -/// Interned list of generic arguments for an item. When an item has parent, the `Substitution` for -/// it contains generic arguments for both its parent and itself. See chalk's documentation for -/// details. -/// -/// See `Binders` for the constraint on the ordering. -pub(crate) type Substitution = chalk_ir::Substitution; -pub(crate) type GenericArg = chalk_ir::GenericArg; -pub(crate) type GenericArgData = chalk_ir::GenericArgData; - -pub(crate) type Ty = chalk_ir::Ty; -pub type TyKind = chalk_ir::TyKind; -pub(crate) type DynTy = chalk_ir::DynTy; -pub(crate) type FnPointer = chalk_ir::FnPointer; -pub(crate) use chalk_ir::FnSubst; // a re-export so we don't lose the tuple constructor - -pub type AliasTy = chalk_ir::AliasTy; - -pub(crate) type ProjectionTy = chalk_ir::ProjectionTy; -pub(crate) type OpaqueTy = chalk_ir::OpaqueTy; - -pub(crate) type Lifetime = chalk_ir::Lifetime; -pub(crate) type LifetimeData = chalk_ir::LifetimeData; -pub(crate) type LifetimeOutlives = chalk_ir::LifetimeOutlives; - -pub(crate) type ConstValue = chalk_ir::ConstValue; - -pub(crate) type Const = chalk_ir::Const; -pub(crate) type ConstData = chalk_ir::ConstData; - -pub(crate) type TraitRef = chalk_ir::TraitRef; -pub(crate) type QuantifiedWhereClause = Binders; -pub(crate) type Canonical = chalk_ir::Canonical; - -pub(crate) type ChalkTraitId = chalk_ir::TraitId; -pub(crate) type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses; - -pub(crate) type FnSig = chalk_ir::FnSig; - -pub(crate) type InEnvironment = chalk_ir::InEnvironment; -pub type AliasEq = chalk_ir::AliasEq; -pub type WhereClause = chalk_ir::WhereClause; - -pub(crate) type DomainGoal = chalk_ir::DomainGoal; -pub(crate) type Goal = chalk_ir::Goal; - -pub(crate) type CanonicalVarKind = chalk_ir::CanonicalVarKind; -pub(crate) type GoalData = chalk_ir::GoalData; -pub(crate) type ProgramClause = chalk_ir::ProgramClause; - /// A constant can have reference to other things. Memory map job is holding /// the necessary bits of memory of the const eval session to keep the constant /// meaningful. @@ -221,7 +133,7 @@ impl ComplexMemoryMap<'_> { } impl<'db> MemoryMap<'db> { - pub fn vtable_ty(&self, id: usize) -> Result, MirEvalError<'db>> { + pub fn vtable_ty(&self, id: usize) -> Result, MirEvalError<'db>> { match self { MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)), MemoryMap::Complex(cm) => cm.vtable.ty(id), @@ -271,118 +183,11 @@ impl<'db> MemoryMap<'db> { } } -// FIXME(next-solver): add a lifetime to this -/// A concrete constant value -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ConstScalar { - Bytes(Box<[u8]>, MemoryMap<'static>), - // FIXME: this is a hack to get around chalk not being able to represent unevaluatable - // constants - UnevaluatedConst(GeneralConstId, Substitution), - /// Case of an unknown value that rustc might know but we don't - // FIXME: this is a hack to get around chalk not being able to represent unevaluatable - // constants - // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177 - // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348 - Unknown, -} - -impl Hash for ConstScalar { - fn hash(&self, state: &mut H) { - core::mem::discriminant(self).hash(state); - if let ConstScalar::Bytes(b, _) = self { - b.hash(state) - } - } -} - -/// A concrete constant value -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ConstScalarNs<'db> { - Bytes(Box<[u8]>, MemoryMap<'db>), - // FIXME: this is a hack to get around chalk not being able to represent unevaluatable - // constants - UnevaluatedConst(GeneralConstId, Substitution), - /// Case of an unknown value that rustc might know but we don't - // FIXME: this is a hack to get around chalk not being able to represent unevaluatable - // constants - // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177 - // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348 - Unknown, -} - -impl Hash for ConstScalarNs<'_> { - fn hash(&self, state: &mut H) { - core::mem::discriminant(self).hash(state); - if let ConstScalarNs::Bytes(b, _) = self { - b.hash(state) - } - } -} - /// Return an index of a parameter in the generic type parameter list by it's id. pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option { generics::generics(db, id.parent).type_or_const_param_idx(id) } -pub(crate) fn wrap_empty_binders(value: T) -> Binders -where - T: TypeFoldable + HasInterner, -{ - Binders::empty(Interner, value.shifted_in_from(Interner, DebruijnIndex::ONE)) -} - -pub(crate) fn make_single_type_binders>( - value: T, -) -> Binders { - Binders::new( - chalk_ir::VariableKinds::from_iter( - Interner, - std::iter::once(chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)), - ), - value, - ) -} - -pub(crate) fn make_binders>( - db: &dyn HirDatabase, - generics: &Generics, - value: T, -) -> Binders { - Binders::new(variable_kinds_from_iter(db, generics.iter_id()), value) -} - -pub(crate) fn variable_kinds_from_iter( - db: &dyn HirDatabase, - iter: impl Iterator, -) -> VariableKinds { - VariableKinds::from_iter( - Interner, - iter.map(|x| match x { - hir_def::GenericParamId::ConstParamId(id) => { - chalk_ir::VariableKind::Const(db.const_param_ty(id)) - } - hir_def::GenericParamId::TypeParamId(_) => { - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) - } - hir_def::GenericParamId::LifetimeParamId(_) => chalk_ir::VariableKind::Lifetime, - }), - ) -} - -// FIXME: get rid of this, just replace it by FnPointer -/// A function signature as seen by type inference: Several parameter types and -/// one return type. -#[derive(Clone, PartialEq, Eq, Debug)] -pub(crate) struct CallableSig { - params_and_return: Arc<[Ty]>, - is_varargs: bool, - safety: Safety, - abi: FnAbi, -} - -has_interner!(CallableSig); - #[derive(Debug, Copy, Clone, Eq)] pub enum FnAbi { Aapcs, @@ -534,81 +339,21 @@ pub enum ImplTraitId { } #[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTraits { - pub(crate) impl_traits: Arena, -} - -has_interner!(ImplTraits); - -#[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTrait { - pub(crate) bounds: Binders>, -} +pub struct ImplTrait {} pub type ImplTraitIdx = Idx; -pub fn static_lifetime() -> Lifetime { - LifetimeData::Static.intern(Interner) -} - -pub fn error_lifetime() -> Lifetime { - LifetimeData::Error.intern(Interner) -} - -pub(crate) fn fold_free_vars + TypeFoldable>( - t: T, - for_ty: impl FnMut(BoundVar, DebruijnIndex) -> Ty, - for_const: impl FnMut(Ty, BoundVar, DebruijnIndex) -> Const, -) -> T { - use chalk_ir::fold::TypeFolder; - - #[derive(chalk_derive::FallibleTypeFolder)] - #[has_interner(Interner)] - struct FreeVarFolder< - F1: FnMut(BoundVar, DebruijnIndex) -> Ty, - F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const, - >(F1, F2); - impl Ty, F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const> - TypeFolder for FreeVarFolder - { - fn as_dyn(&mut self) -> &mut dyn TypeFolder { - self - } - - fn interner(&self) -> Interner { - Interner - } - - fn fold_free_var_ty(&mut self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> Ty { - self.0(bound_var, outer_binder) - } - - fn fold_free_var_const( - &mut self, - ty: Ty, - bound_var: BoundVar, - outer_binder: DebruijnIndex, - ) -> Const { - self.1(ty, bound_var, outer_binder) - } - } - t.fold_with(&mut FreeVarFolder(for_ty, for_const), DebruijnIndex::INNERMOST) -} - /// 'Canonicalizes' the `t` by replacing any errors with new variables. Also /// ensures there are no unbound variables or inference variables anywhere in /// the `t`. -pub fn replace_errors_with_variables<'db, T>( - interner: DbInterner<'db>, - t: &T, -) -> crate::next_solver::Canonical<'db, T> +pub fn replace_errors_with_variables<'db, T>(interner: DbInterner<'db>, t: &T) -> Canonical<'db, T> where T: rustc_type_ir::TypeFoldable> + Clone, { use rustc_type_ir::{FallibleTypeFolder, TypeSuperFoldable}; struct ErrorReplacer<'db> { interner: DbInterner<'db>, - vars: Vec>, + vars: Vec>, binder: rustc_type_ir::DebruijnIndex, } impl<'db> FallibleTypeFolder> for ErrorReplacer<'db> { @@ -621,10 +366,7 @@ where self.interner } - fn try_fold_binder( - &mut self, - t: crate::next_solver::Binder<'db, T>, - ) -> Result, Self::Error> + fn try_fold_binder(&mut self, t: Binder<'db, T>) -> Result, Self::Error> where T: rustc_type_ir::TypeFoldable>, { @@ -634,10 +376,7 @@ where result } - fn try_fold_ty( - &mut self, - t: crate::next_solver::Ty<'db>, - ) -> Result, Self::Error> { + fn try_fold_ty(&mut self, t: Ty<'db>) -> Result, Self::Error> { if !t.has_type_flags( rustc_type_ir::TypeFlags::HAS_ERROR | rustc_type_ir::TypeFlags::HAS_TY_INFER @@ -650,39 +389,28 @@ where #[cfg(debug_assertions)] let error = || Err(()); #[cfg(not(debug_assertions))] - let error = || { - Ok(crate::next_solver::Ty::new_error( - self.interner, - crate::next_solver::ErrorGuaranteed, - )) - }; + let error = || Ok(Ty::new_error(self.interner, crate::next_solver::ErrorGuaranteed)); match t.kind() { - crate::next_solver::TyKind::Error(_) => { + TyKind::Error(_) => { let var = rustc_type_ir::BoundVar::from_usize(self.vars.len()); - self.vars.push(crate::next_solver::CanonicalVarKind::Ty { + self.vars.push(CanonicalVarKind::Ty { ui: rustc_type_ir::UniverseIndex::ZERO, sub_root: var, }); - Ok(crate::next_solver::Ty::new_bound( + Ok(Ty::new_bound( self.interner, self.binder, - crate::next_solver::BoundTy { - var, - kind: crate::next_solver::BoundTyKind::Anon, - }, + BoundTy { var, kind: BoundTyKind::Anon }, )) } - crate::next_solver::TyKind::Infer(_) => error(), - crate::next_solver::TyKind::Bound(index, _) if index > self.binder => error(), + TyKind::Infer(_) => error(), + TyKind::Bound(index, _) if index > self.binder => error(), _ => t.try_super_fold_with(self), } } - fn try_fold_const( - &mut self, - ct: crate::next_solver::Const<'db>, - ) -> Result, Self::Error> { + fn try_fold_const(&mut self, ct: Const<'db>) -> Result, Self::Error> { if !ct.has_type_flags( rustc_type_ir::TypeFlags::HAS_ERROR | rustc_type_ir::TypeFlags::HAS_TY_INFER @@ -695,52 +423,38 @@ where #[cfg(debug_assertions)] let error = || Err(()); #[cfg(not(debug_assertions))] - let error = || Ok(crate::next_solver::Const::error(self.interner)); + let error = || Ok(Const::error(self.interner)); match ct.kind() { - crate::next_solver::ConstKind::Error(_) => { + ConstKind::Error(_) => { let var = rustc_type_ir::BoundVar::from_usize(self.vars.len()); - self.vars.push(crate::next_solver::CanonicalVarKind::Const( - rustc_type_ir::UniverseIndex::ZERO, - )); - Ok(crate::next_solver::Const::new_bound( - self.interner, - self.binder, - crate::next_solver::BoundConst { var }, - )) + self.vars.push(CanonicalVarKind::Const(rustc_type_ir::UniverseIndex::ZERO)); + Ok(Const::new_bound(self.interner, self.binder, BoundConst { var })) } - crate::next_solver::ConstKind::Infer(_) => error(), - crate::next_solver::ConstKind::Bound(index, _) if index > self.binder => error(), + ConstKind::Infer(_) => error(), + ConstKind::Bound(index, _) if index > self.binder => error(), _ => ct.try_super_fold_with(self), } } - fn try_fold_region( - &mut self, - region: crate::next_solver::Region<'db>, - ) -> Result, Self::Error> { + fn try_fold_region(&mut self, region: Region<'db>) -> Result, Self::Error> { #[cfg(debug_assertions)] let error = || Err(()); #[cfg(not(debug_assertions))] - let error = || Ok(crate::next_solver::Region::error(self.interner)); + let error = || Ok(Region::error(self.interner)); match region.kind() { - crate::next_solver::RegionKind::ReError(_) => { + RegionKind::ReError(_) => { let var = rustc_type_ir::BoundVar::from_usize(self.vars.len()); - self.vars.push(crate::next_solver::CanonicalVarKind::Region( - rustc_type_ir::UniverseIndex::ZERO, - )); - Ok(crate::next_solver::Region::new_bound( + self.vars.push(CanonicalVarKind::Region(rustc_type_ir::UniverseIndex::ZERO)); + Ok(Region::new_bound( self.interner, self.binder, - crate::next_solver::BoundRegion { - var, - kind: crate::next_solver::BoundRegionKind::Anon, - }, + BoundRegion { var, kind: BoundRegionKind::Anon }, )) } - crate::next_solver::RegionKind::ReVar(_) => error(), - crate::next_solver::RegionKind::ReBound(index, _) if index > self.binder => error(), + RegionKind::ReVar(_) => error(), + RegionKind::ReBound(index, _) if index > self.binder => error(), _ => Ok(region), } } @@ -752,18 +466,18 @@ where Ok(t) => t, Err(_) => panic!("Encountered unbound or inference vars in {t:?}"), }; - crate::next_solver::Canonical { + Canonical { value, max_universe: rustc_type_ir::UniverseIndex::ZERO, - variables: crate::next_solver::CanonicalVars::new_from_iter(interner, error_replacer.vars), + variables: CanonicalVars::new_from_iter(interner, error_replacer.vars), } } pub fn callable_sig_from_fn_trait<'db>( - self_ty: crate::next_solver::Ty<'db>, + self_ty: Ty<'db>, trait_env: Arc>, db: &'db dyn HirDatabase, -) -> Option<(FnTrait, crate::next_solver::PolyFnSig<'db>)> { +) -> Option<(FnTrait, PolyFnSig<'db>)> { let krate = trait_env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; let output_assoc_type = fn_once_trait @@ -771,54 +485,46 @@ pub fn callable_sig_from_fn_trait<'db>( .associated_type_by_name(&Name::new_symbol_root(sym::Output))?; let mut table = InferenceTable::new(db, trait_env.clone()); - let b = TyBuilder::trait_ref(db, fn_once_trait); - if b.remaining() != 2 { - return None; - } // Register two obligations: // - Self: FnOnce // - >::Output == ?ret_ty let args_ty = table.next_ty_var(); let args = [self_ty, args_ty]; - let trait_ref = crate::next_solver::TraitRef::new(table.interner(), fn_once_trait.into(), args); - let projection = crate::next_solver::Ty::new_alias( + let trait_ref = TraitRef::new(table.interner(), fn_once_trait.into(), args); + let projection = Ty::new_alias( table.interner(), rustc_type_ir::AliasTyKind::Projection, - crate::next_solver::AliasTy::new(table.interner(), output_assoc_type.into(), args), + AliasTy::new(table.interner(), output_assoc_type.into(), args), ); - let pred = crate::next_solver::Predicate::upcast_from(trait_ref, table.interner()); + let pred = Predicate::upcast_from(trait_ref, table.interner()); if !table.try_obligation(pred).no_solution() { table.register_obligation(pred); let return_ty = table.normalize_alias_ty(projection); for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] { let fn_x_trait = fn_x.get_id(db, krate)?; - let trait_ref = - crate::next_solver::TraitRef::new(table.interner(), fn_x_trait.into(), args); + let trait_ref = TraitRef::new(table.interner(), fn_x_trait.into(), args); if !table - .try_obligation(crate::next_solver::Predicate::upcast_from( - trait_ref, - table.interner(), - )) + .try_obligation(Predicate::upcast_from(trait_ref, table.interner())) .no_solution() { let ret_ty = table.resolve_completely(return_ty); let args_ty = table.resolve_completely(args_ty); - let crate::next_solver::TyKind::Tuple(params) = args_ty.kind() else { + let TyKind::Tuple(params) = args_ty.kind() else { return None; }; - let inputs_and_output = crate::next_solver::Tys::new_from_iter( + let inputs_and_output = Tys::new_from_iter( table.interner(), params.iter().chain(std::iter::once(ret_ty)), ); return Some(( fn_x, - crate::next_solver::Binder::dummy(crate::next_solver::FnSig { + Binder::dummy(FnSig { inputs_and_output, c_variadic: false, - safety: crate::next_solver::abi::Safety::Safe, + safety: abi::Safety::Safe, abi: FnAbi::RustCall, }), )); @@ -837,16 +543,16 @@ struct ParamCollector { impl<'db> rustc_type_ir::TypeVisitor> for ParamCollector { type Result = (); - fn visit_ty(&mut self, ty: crate::next_solver::Ty<'db>) -> Self::Result { - if let crate::next_solver::TyKind::Param(param) = ty.kind() { + fn visit_ty(&mut self, ty: Ty<'db>) -> Self::Result { + if let TyKind::Param(param) = ty.kind() { self.params.insert(param.id.into()); } ty.super_visit_with(self); } - fn visit_const(&mut self, konst: crate::next_solver::Const<'db>) -> Self::Result { - if let crate::next_solver::ConstKind::Param(param) = konst.kind() { + fn visit_const(&mut self, konst: Const<'db>) -> Self::Result { + if let ConstKind::Param(param) = konst.kind() { self.params.insert(param.id.into()); } @@ -865,7 +571,7 @@ where } pub fn known_const_to_ast<'db>( - konst: crate::next_solver::Const<'db>, + konst: Const<'db>, db: &'db dyn HirDatabase, display_target: DisplayTarget, ) -> Option { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index b18d713c411ef..42f7290962bd4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -11,82 +11,93 @@ pub(crate) mod path; use std::{ cell::OnceCell, iter, mem, - ops::{self, Not as _}, + ops::{self, Deref, Not as _}, }; use base_db::Crate; -use chalk_ir::{ - Mutability, Safety, TypeOutlives, - cast::Cast, - fold::{Shift, TypeFoldable}, - interner::HasInterner, -}; - use either::Either; use hir_def::{ - AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, - GenericParamId, LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, + AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, + LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId, builtin_type::BuiltinType, - expr_store::{ExpressionStore, path::Path}, - hir::generics::{GenericParamDataRef, TypeOrConstParamData, WherePredicate}, + expr_store::{ExpressionStore, HygieneId, path::Path}, + hir::generics::{ + GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate, + }, + item_tree::FieldsShape, lang_item::LangItem, - resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, - signatures::TraitFlags, + resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs}, + signatures::{FunctionSignature, TraitFlags, TypeAliasFlags}, type_ref::{ - ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, TypeBound, TypeRef, - TypeRefId, + ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, + TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId, }, }; use hir_expand::name::Name; -use la_arena::{Arena, ArenaMap}; +use la_arena::{Arena, ArenaMap, Idx}; +use path::{PathDiagnosticCallback, PathLoweringContext}; +use rustc_ast_ir::Mutability; use rustc_hash::FxHashSet; +use rustc_pattern_analysis::Captures; +use rustc_type_ir::{ + AliasTyKind, ConstKind, DebruijnIndex, ExistentialPredicate, ExistentialProjection, + ExistentialTraitRef, FnSig, OutlivesPredicate, + TyKind::{self}, + TypeVisitableExt, + inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _}, +}; +use salsa::plumbing::AsId; +use smallvec::{SmallVec, smallvec}; use stdx::{impl_from, never}; use triomphe::{Arc, ThinArc}; use crate::{ - AliasTy, Binders, BoundVar, Const, DebruijnIndex, DynTy, FnAbi, FnPointer, FnSig, FnSubst, - ImplTrait, ImplTraitId, ImplTraits, Interner, Lifetime, LifetimeData, LifetimeOutlives, - QuantifiedWhereClause, QuantifiedWhereClauses, Substitution, TraitRef, TraitRefExt, Ty, - TyBuilder, TyKind, WhereClause, all_super_traits, - consteval_chalk::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic}, + FnAbi, ImplTraitId, TraitEnvironment, TyLoweringDiagnostic, TyLoweringDiagnosticKind, + consteval::intern_const_ref, db::HirDatabase, - error_lifetime, generics::{Generics, generics, trait_self_param_idx}, - lower::{ - diagnostics::*, - path::{PathDiagnosticCallback, PathLoweringContext}, - }, - make_binders, - mapping::{from_chalk_trait_id, lt_to_placeholder_idx}, next_solver::{ - DbInterner, - mapping::{ChalkToNextSolver, NextSolverToChalk}, + AliasTy, Binder, BoundExistentialPredicates, Clause, Clauses, Const, DbInterner, + EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs, ParamConst, + ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef, Ty, Tys, + UnevaluatedConst, abi::Safety, }, - static_lifetime, to_chalk_trait_id, to_placeholder_idx, - utils::all_super_trait_refs, - variable_kinds_from_iter, }; +pub(crate) struct PathDiagnosticCallbackData(pub(crate) TypeRefId); + +#[derive(PartialEq, Eq, Debug, Hash)] +pub struct ImplTraits<'db> { + pub(crate) impl_traits: Arena>, +} + +#[derive(PartialEq, Eq, Debug, Hash)] +pub struct ImplTrait<'db> { + pub(crate) predicates: Vec>, +} + +pub type ImplTraitIdx<'db> = Idx>; + #[derive(Debug, Default)] -struct ImplTraitLoweringState { +struct ImplTraitLoweringState<'db> { /// When turning `impl Trait` into opaque types, we have to collect the /// bounds at the same time to get the IDs correct (without becoming too /// complicated). mode: ImplTraitLoweringMode, // This is structured as a struct with fields and not as an enum because it helps with the borrow checker. - opaque_type_data: Arena, + opaque_type_data: Arena>, } -impl ImplTraitLoweringState { - fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState { + +impl<'db> ImplTraitLoweringState<'db> { + fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState<'db> { Self { mode, opaque_type_data: Arena::new() } } } -pub(crate) struct PathDiagnosticCallbackData(pub(crate) TypeRefId); - #[derive(Debug, Clone)] -pub(crate) enum LifetimeElisionKind { +pub enum LifetimeElisionKind<'db> { /// Create a new anonymous lifetime parameter and reference it. /// /// If `report_in_path`, report an error when encountering lifetime elision in a path: @@ -104,75 +115,109 @@ pub(crate) enum LifetimeElisionKind { AnonymousCreateParameter { report_in_path: bool }, /// Replace all anonymous lifetimes by provided lifetime. - Elided(Lifetime), + Elided(Region<'db>), /// Give a hard error when either `&` or `'_` is written. Used to /// rule out things like `where T: Foo<'_>`. Does not imply an /// error on default object bounds (e.g., `Box`). AnonymousReportError, + /// Resolves elided lifetimes to `'static` if there are no other lifetimes in scope, + /// otherwise give a warning that the previous behavior of introducing a new early-bound + /// lifetime is a bug and will be removed (if `only_lint` is enabled). + StaticIfNoLifetimeInScope { only_lint: bool }, + + /// Signal we cannot find which should be the anonymous lifetime. + ElisionFailure, + /// Infer all elided lifetimes. Infer, } -impl LifetimeElisionKind { +impl<'db> LifetimeElisionKind<'db> { + #[inline] + pub(crate) fn for_const( + interner: DbInterner<'db>, + const_parent: ItemContainerId, + ) -> LifetimeElisionKind<'db> { + match const_parent { + ItemContainerId::ExternBlockId(_) | ItemContainerId::ModuleId(_) => { + LifetimeElisionKind::Elided(Region::new_static(interner)) + } + ItemContainerId::ImplId(_) => { + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: true } + } + ItemContainerId::TraitId(_) => { + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: false } + } + } + } + #[inline] - pub(crate) fn for_fn_ret() -> LifetimeElisionKind { + pub(crate) fn for_fn_params(data: &FunctionSignature) -> LifetimeElisionKind<'db> { + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: data.is_async() } + } + + #[inline] + pub(crate) fn for_fn_ret(interner: DbInterner<'db>) -> LifetimeElisionKind<'db> { // FIXME: We should use the elided lifetime here, or `ElisionFailure`. - LifetimeElisionKind::Elided(error_lifetime()) + LifetimeElisionKind::Elided(Region::error(interner)) } } #[derive(Debug)] -pub(crate) struct TyLoweringContext<'db> { +pub struct TyLoweringContext<'db, 'a> { pub db: &'db dyn HirDatabase, - resolver: &'db Resolver<'db>, - store: &'db ExpressionStore, + interner: DbInterner<'db>, + resolver: &'a Resolver<'db>, + store: &'a ExpressionStore, def: GenericDefId, generics: OnceCell, in_binders: DebruijnIndex, - /// Note: Conceptually, it's thinkable that we could be in a location where - /// some type params should be represented as placeholders, and others - /// should be converted to variables. I think in practice, this isn't - /// possible currently, so this should be fine for now. - pub type_param_mode: ParamLoweringMode, - impl_trait_mode: ImplTraitLoweringState, + impl_trait_mode: ImplTraitLoweringState<'db>, /// Tracks types with explicit `?Sized` bounds. - pub(crate) unsized_types: FxHashSet, + pub(crate) unsized_types: FxHashSet>, pub(crate) diagnostics: Vec, - lifetime_elision: LifetimeElisionKind, + lifetime_elision: LifetimeElisionKind<'db>, + /// When lowering the defaults for generic params, this contains the index of the currently lowered param. + /// We disallow referring to later params, or to ADT's `Self`. + lowering_param_default: Option, } -impl<'db> TyLoweringContext<'db> { - pub(crate) fn new( +impl<'db, 'a> TyLoweringContext<'db, 'a> { + pub fn new( db: &'db dyn HirDatabase, - resolver: &'db Resolver<'db>, - store: &'db ExpressionStore, + resolver: &'a Resolver<'db>, + store: &'a ExpressionStore, def: GenericDefId, - lifetime_elision: LifetimeElisionKind, + lifetime_elision: LifetimeElisionKind<'db>, ) -> Self { let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed); - let type_param_mode = ParamLoweringMode::Placeholder; - let in_binders = DebruijnIndex::INNERMOST; + let in_binders = DebruijnIndex::ZERO; Self { db, + interner: DbInterner::new_with(db, Some(resolver.krate()), None), resolver, def, generics: Default::default(), store, in_binders, impl_trait_mode, - type_param_mode, unsized_types: FxHashSet::default(), diagnostics: Vec::new(), lifetime_elision, + lowering_param_default: None, } } + pub(crate) fn set_lifetime_elision(&mut self, lifetime_elision: LifetimeElisionKind<'db>) { + self.lifetime_elision = lifetime_elision; + } + pub(crate) fn with_debruijn( &mut self, debruijn: DebruijnIndex, - f: impl FnOnce(&mut TyLoweringContext<'_>) -> T, + f: impl FnOnce(&mut TyLoweringContext<'db, '_>) -> T, ) -> T { let old_debruijn = mem::replace(&mut self.in_binders, debruijn); let result = f(self); @@ -183,28 +228,22 @@ impl<'db> TyLoweringContext<'db> { pub(crate) fn with_shifted_in( &mut self, debruijn: DebruijnIndex, - f: impl FnOnce(&mut TyLoweringContext<'_>) -> T, - ) -> T { - self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f) - } - - fn with_lifetime_elision( - &mut self, - lifetime_elision: LifetimeElisionKind, - f: impl FnOnce(&mut TyLoweringContext<'_>) -> T, + f: impl FnOnce(&mut TyLoweringContext<'db, '_>) -> T, ) -> T { - let old_lifetime_elision = mem::replace(&mut self.lifetime_elision, lifetime_elision); - let result = f(self); - self.lifetime_elision = old_lifetime_elision; - result + self.with_debruijn(self.in_binders.shifted_in(debruijn.as_u32()), f) } pub(crate) fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self } } - pub(crate) fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self { - Self { type_param_mode, ..self } + pub(crate) fn impl_trait_mode(&mut self, impl_trait_mode: ImplTraitLoweringMode) -> &mut Self { + self.impl_trait_mode = ImplTraitLoweringState::new(impl_trait_mode); + self + } + + pub(crate) fn lowering_param_default(&mut self, index: u32) { + self.lowering_param_default = Some(index); } pub(crate) fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) { @@ -213,7 +252,7 @@ impl<'db> TyLoweringContext<'db> { } #[derive(Copy, Clone, Debug, PartialEq, Eq, Default)] -pub enum ImplTraitLoweringMode { +pub(crate) enum ImplTraitLoweringMode { /// `impl Trait` gets lowered into an opaque type that doesn't unify with /// anything except itself. This is used in places where values flow 'out', /// i.e. for arguments of the function we're currently checking, and return @@ -224,30 +263,17 @@ pub enum ImplTraitLoweringMode { Disallowed, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum ParamLoweringMode { - Placeholder, - Variable, -} - -impl<'db> TyLoweringContext<'db> { - pub(crate) fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty { +impl<'db, 'a> TyLoweringContext<'db, 'a> { + pub fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> { self.lower_ty_ext(type_ref).0 } - pub(crate) fn lower_const(&mut self, const_ref: &ConstRef, const_type: Ty) -> Const { + pub(crate) fn lower_const(&mut self, const_ref: ConstRef, const_type: Ty<'db>) -> Const<'db> { let const_ref = &self.store[const_ref.expr]; match const_ref { - hir_def::hir::Expr::Path(path) => path_to_const( - self.db, - self.resolver, - path, - self.type_param_mode, - || self.generics(), - self.in_binders, - const_type.clone(), - ) - .unwrap_or_else(|| unknown_const(const_type)), + hir_def::hir::Expr::Path(path) => { + self.path_to_const(path).unwrap_or_else(|| unknown_const(const_type)) + } hir_def::hir::Expr::Literal(literal) => intern_const_ref( self.db, &match *literal { @@ -290,32 +316,88 @@ impl<'db> TyLoweringContext<'db> { } } - pub(crate) fn lower_path_as_const(&mut self, path: &Path, const_type: Ty) -> Const { - path_to_const( - self.db, - self.resolver, - path, - self.type_param_mode, - || self.generics(), - self.in_binders, - const_type.clone(), - ) - .unwrap_or_else(|| unknown_const(const_type)) + pub(crate) fn path_to_const(&mut self, path: &Path) -> Option> { + match self.resolver.resolve_path_in_value_ns_fully(self.db, path, HygieneId::ROOT) { + Some(ValueNs::GenericParam(p)) => { + let args = self.generics(); + match args.type_or_const_param_idx(p.into()) { + Some(idx) => Some(self.const_param(p, idx as u32)), + None => { + never!( + "Generic list doesn't contain this param: {:?}, {:?}, {:?}", + args, + path, + p + ); + None + } + } + } + Some(ValueNs::ConstId(c)) => { + let args = GenericArgs::new_from_iter(self.interner, []); + Some(Const::new( + self.interner, + rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new( + SolverDefId::ConstId(c), + args, + )), + )) + } + _ => None, + } + } + + pub(crate) fn lower_path_as_const(&mut self, path: &Path, const_type: Ty<'db>) -> Const<'db> { + self.path_to_const(path).unwrap_or_else(|| unknown_const(const_type)) } fn generics(&self) -> &Generics { self.generics.get_or_init(|| generics(self.db, self.def)) } - pub(crate) fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty, Option) { + fn param_index_is_disallowed(&self, index: u32) -> bool { + self.lowering_param_default + .is_some_and(|disallow_params_after| index >= disallow_params_after) + } + + fn type_param(&mut self, id: TypeParamId, index: u32) -> Ty<'db> { + if self.param_index_is_disallowed(index) { + // FIXME: Report an error. + Ty::new_error(self.interner, ErrorGuaranteed) + } else { + Ty::new_param(self.interner, id, index) + } + } + + fn const_param(&mut self, id: ConstParamId, index: u32) -> Const<'db> { + if self.param_index_is_disallowed(index) { + // FIXME: Report an error. + Const::error(self.interner) + } else { + Const::new_param(self.interner, ParamConst { id, index }) + } + } + + fn region_param(&mut self, id: LifetimeParamId, index: u32) -> Region<'db> { + if self.param_index_is_disallowed(index) { + // FIXME: Report an error. + Region::error(self.interner) + } else { + Region::new_early_param(self.interner, EarlyParamRegion { id, index }) + } + } + + #[tracing::instrument(skip(self), ret)] + pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty<'db>, Option) { + let interner = self.interner; let mut res = None; let type_ref = &self.store[type_ref_id]; + tracing::debug!(?type_ref); let ty = match type_ref { - TypeRef::Never => TyKind::Never.intern(Interner), + TypeRef::Never => Ty::new(interner, TyKind::Never), TypeRef::Tuple(inner) => { let inner_tys = inner.iter().map(|&tr| self.lower_ty(tr)); - TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys)) - .intern(Interner) + Ty::new_tup_from_iter(interner, inner_tys) } TypeRef::Path(path) => { let (ty, res_) = @@ -325,81 +407,61 @@ impl<'db> TyLoweringContext<'db> { } &TypeRef::TypeParam(type_param_id) => { res = Some(TypeNs::GenericParam(type_param_id)); - match self.type_param_mode { - ParamLoweringMode::Placeholder => { - let generics = self.generics(); - let idx = generics.type_or_const_param_idx(type_param_id.into()).unwrap(); - TyKind::Placeholder(to_placeholder_idx( - self.db, - type_param_id.into(), - idx as u32, - )) - } - ParamLoweringMode::Variable => { - let idx = - self.generics().type_or_const_param_idx(type_param_id.into()).unwrap(); - TyKind::BoundVar(BoundVar::new(self.in_binders, idx)) - } - } - .intern(Interner) + + let generics = self.generics(); + let (idx, _data) = + generics.type_or_const_param(type_param_id.into()).expect("matching generics"); + self.type_param(type_param_id, idx as u32) } &TypeRef::RawPtr(inner, mutability) => { let inner_ty = self.lower_ty(inner); - TyKind::Raw(lower_to_chalk_mutability(mutability), inner_ty).intern(Interner) + Ty::new(interner, TyKind::RawPtr(inner_ty, lower_mutability(mutability))) } TypeRef::Array(array) => { let inner_ty = self.lower_ty(array.ty); - let const_len = self.lower_const(&array.len, TyBuilder::usize()); - TyKind::Array(inner_ty, const_len).intern(Interner) + let const_len = self.lower_const(array.len, Ty::new_usize(interner)); + Ty::new_array_with_const_len(interner, inner_ty, const_len) } &TypeRef::Slice(inner) => { let inner_ty = self.lower_ty(inner); - TyKind::Slice(inner_ty).intern(Interner) + Ty::new_slice(interner, inner_ty) } TypeRef::Reference(ref_) => { let inner_ty = self.lower_ty(ref_.ty); - // FIXME: It should infer the eldided lifetimes instead of stubbing with static + // FIXME: It should infer the eldided lifetimes instead of stubbing with error let lifetime = ref_ .lifetime - .as_ref() - .map_or_else(error_lifetime, |&lr| self.lower_lifetime(lr)); - TyKind::Ref(lower_to_chalk_mutability(ref_.mutability), lifetime, inner_ty) - .intern(Interner) + .map_or_else(|| Region::error(interner), |lr| self.lower_lifetime(lr)); + Ty::new_ref(interner, lifetime, inner_ty, lower_mutability(ref_.mutability)) } - TypeRef::Placeholder => TyKind::Error.intern(Interner), + TypeRef::Placeholder => Ty::new_error(interner, ErrorGuaranteed), TypeRef::Fn(fn_) => { - let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - let (params, ret) = fn_.split_params_and_ret(); - let mut subst = Vec::with_capacity(fn_.params.len()); - ctx.with_lifetime_elision( - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }, - |ctx| { - subst.extend(params.iter().map(|&(_, tr)| ctx.lower_ty(tr))); - }, - ); - ctx.with_lifetime_elision(LifetimeElisionKind::for_fn_ret(), |ctx| { - subst.push(ctx.lower_ty(ret)); - }); - Substitution::from_iter(Interner, subst) - }); - TyKind::Function(FnPointer { - num_binders: 0, // FIXME lower `for<'a> fn()` correctly - sig: FnSig { + let substs = self.with_shifted_in( + DebruijnIndex::from_u32(1), + |ctx: &mut TyLoweringContext<'_, '_>| { + Tys::new_from_iter( + interner, + fn_.params.iter().map(|&(_, tr)| ctx.lower_ty(tr)), + ) + }, + ); + Ty::new_fn_ptr( + interner, + Binder::dummy(FnSig { abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe }, - variadic: fn_.is_varargs, - }, - substitution: FnSubst(substs), - }) - .intern(Interner) + c_variadic: fn_.is_varargs, + inputs_and_output: substs, + }), + ) } TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds), TypeRef::ImplTrait(bounds) => { match self.impl_trait_mode.mode { ImplTraitLoweringMode::Opaque => { - let origin = match self.def { - GenericDefId::FunctionId(it) => Either::Left(it), - GenericDefId::TypeAliasId(it) => Either::Right(it), + let origin = match self.resolver.generic_def() { + Some(GenericDefId::FunctionId(it)) => Either::Left(it), + Some(GenericDefId::TypeAliasId(it)) => Either::Right(it), _ => panic!( "opaque impl trait lowering must be in function or type alias" ), @@ -408,9 +470,19 @@ impl<'db> TyLoweringContext<'db> { // this dance is to make sure the data is in the right // place even if we encounter more opaque types while // lowering the bounds - let idx = self.impl_trait_mode.opaque_type_data.alloc(ImplTrait { - bounds: crate::make_single_type_binders(Vec::default()), - }); + let idx = self + .impl_trait_mode + .opaque_type_data + .alloc(ImplTrait { predicates: Vec::default() }); + + // FIXME(next-solver): this from_raw/into_raw dance isn't nice, but it's minimal + let impl_trait_id = origin.either( + |f| ImplTraitId::ReturnTypeImplTrait(f, Idx::from_raw(idx.into_raw())), + |a| ImplTraitId::TypeAliasImplTrait(a, Idx::from_raw(idx.into_raw())), + ); + let opaque_ty_id: SolverDefId = + self.db.intern_impl_trait_id(impl_trait_id).into(); + // We don't want to lower the bounds inside the binders // we're currently in, because they don't end up inside // those binders. E.g. when we have `impl Trait TyLoweringContext<'db> { // parameter of the outer function, it's just one binder // away instead of two. let actual_opaque_type_data = self - .with_debruijn(DebruijnIndex::INNERMOST, |ctx| { - ctx.lower_impl_trait(bounds, self.resolver.krate()) + .with_debruijn(DebruijnIndex::ZERO, |ctx| { + ctx.lower_impl_trait(opaque_ty_id, bounds, self.resolver.krate()) }); self.impl_trait_mode.opaque_type_data[idx] = actual_opaque_type_data; - let impl_trait_id = origin.either( - |f| ImplTraitId::ReturnTypeImplTrait(f, idx), - |a| ImplTraitId::TypeAliasImplTrait(a, idx), - ); - let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into(); - let generics = generics(self.db, origin.either(|f| f.into(), |a| a.into())); - let parameters = generics.bound_vars_subst(self.db, self.in_binders); - TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner) + let args = GenericArgs::identity_for_item(self.interner, opaque_ty_id); + Ty::new_alias( + self.interner, + AliasTyKind::Opaque, + AliasTy::new_from_args(self.interner, opaque_ty_id, args), + ) } ImplTraitLoweringMode::Disallowed => { // FIXME: report error - TyKind::Error.intern(Interner) + Ty::new_error(self.interner, ErrorGuaranteed) } } } - TypeRef::Error => TyKind::Error.intern(Interner), + TypeRef::Error => Ty::new_error(self.interner, ErrorGuaranteed), }; (ty, res) } @@ -449,8 +519,8 @@ impl<'db> TyLoweringContext<'db> { /// This is only for `generic_predicates_for_param`, where we can't just /// lower the self types of the predicates since that could lead to cycles. /// So we just check here if the `type_ref` resolves to a generic param, and which. - fn lower_ty_only_param(&mut self, type_ref_id: TypeRefId) -> Option { - let type_ref = &self.store[type_ref_id]; + fn lower_ty_only_param(&self, type_ref: TypeRefId) -> Option { + let type_ref = &self.store[type_ref]; let path = match type_ref { TypeRef::Path(path) => path, &TypeRef::TypeParam(idx) => return Some(idx.into()), @@ -462,9 +532,8 @@ impl<'db> TyLoweringContext<'db> { if path.segments().len() > 1 { return None; } - let mut ctx = self.at_path(PathId::from_type_ref_unchecked(type_ref_id)); - let resolution = match ctx.resolve_path_in_type_ns() { - Some((it, None)) => it, + let resolution = match self.resolver.resolve_path_in_type_ns(self.db, path) { + Some((it, None, _)) => it, _ => return None, }; match resolution { @@ -474,7 +543,7 @@ impl<'db> TyLoweringContext<'db> { } #[inline] - fn on_path_diagnostic_callback<'a>(type_ref: TypeRefId) -> PathDiagnosticCallback<'a, 'db> { + fn on_path_diagnostic_callback<'b>(type_ref: TypeRefId) -> PathDiagnosticCallback<'b, 'db> { PathDiagnosticCallback { data: Either::Left(PathDiagnosticCallbackData(type_ref)), callback: |data, this, diag| { @@ -485,7 +554,7 @@ impl<'db> TyLoweringContext<'db> { } #[inline] - fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'db> { + fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'a, 'db> { PathLoweringContext::new( self, Self::on_path_diagnostic_callback(path_id.type_ref()), @@ -493,7 +562,7 @@ impl<'db> TyLoweringContext<'db> { ) } - pub(crate) fn lower_path(&mut self, path: &Path, path_id: PathId) -> (Ty, Option) { + pub(crate) fn lower_path(&mut self, path: &Path, path_id: PathId) -> (Ty<'db>, Option) { // Resolve the path (in type namespace) if let Some(type_ref) = path.type_anchor() { let (ty, res) = self.lower_ty_ext(type_ref); @@ -504,7 +573,7 @@ impl<'db> TyLoweringContext<'db> { let mut ctx = self.at_path(path_id); let (resolution, remaining_index) = match ctx.resolve_path_in_type_ns() { Some(it) => it, - None => return (TyKind::Error.intern(Interner), None), + None => return (Ty::new_error(self.interner, ErrorGuaranteed), None), }; if matches!(resolution, TypeNs::TraitId(_)) && remaining_index.is_none() { @@ -520,8 +589,8 @@ impl<'db> TyLoweringContext<'db> { fn lower_trait_ref_from_path( &mut self, path_id: PathId, - explicit_self_ty: Ty, - ) -> Option<(TraitRef, PathLoweringContext<'_, 'db>)> { + explicit_self_ty: Ty<'db>, + ) -> Option<(TraitRef<'db>, PathLoweringContext<'_, 'a, 'db>)> { let mut ctx = self.at_path(path_id); let resolved = match ctx.resolve_path_in_type_ns_fully()? { // FIXME(trait_alias): We need to handle trait alias here. @@ -531,26 +600,57 @@ impl<'db> TyLoweringContext<'db> { Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty, false), ctx)) } - /// When lowering predicates from parents (impl, traits) for children defs (fns, consts, types), `generics` should - /// contain the `Generics` for the **child**, while `predicate_owner` should contain the `GenericDefId` of the - /// **parent**. This is important so we generate the correct bound var/placeholder. + fn lower_trait_ref( + &mut self, + trait_ref: &HirTraitRef, + explicit_self_ty: Ty<'db>, + ) -> Option> { + self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0) + } + pub(crate) fn lower_where_predicate<'b>( &'b mut self, where_predicate: &'b WherePredicate, ignore_bindings: bool, - ) -> impl Iterator + use<'db, 'b> { + generics: &Generics, + predicate_filter: PredicateFilter, + ) -> impl Iterator> + use<'a, 'b, 'db> { match where_predicate { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { + if let PredicateFilter::SelfTrait = predicate_filter { + let target_type = &self.store[*target]; + let self_type = 'is_self: { + if let TypeRef::Path(path) = target_type + && path.is_self_type() + { + break 'is_self true; + } + if let TypeRef::TypeParam(param) = target_type + && generics[param.local_id()].is_trait_self() + { + break 'is_self true; + } + false + }; + if !self_type { + return Either::Left(Either::Left(iter::empty())); + } + } let self_ty = self.lower_ty(*target); - Either::Left(self.lower_type_bound(bound, self_ty, ignore_bindings)) + Either::Left(Either::Right(self.lower_type_bound(bound, self_ty, ignore_bindings))) + } + &WherePredicate::Lifetime { bound, target } => { + Either::Right(iter::once(Clause(Predicate::new( + self.interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::RegionOutlives(OutlivesPredicate( + self.lower_lifetime(bound), + self.lower_lifetime(target), + )), + )), + )))) } - &WherePredicate::Lifetime { bound, target } => Either::Right(iter::once( - crate::wrap_empty_binders(WhereClause::LifetimeOutlives(LifetimeOutlives { - a: self.lower_lifetime(bound), - b: self.lower_lifetime(target), - })), - )), } .into_iter() } @@ -558,40 +658,40 @@ impl<'db> TyLoweringContext<'db> { pub(crate) fn lower_type_bound<'b>( &'b mut self, bound: &'b TypeBound, - self_ty: Ty, + self_ty: Ty<'db>, ignore_bindings: bool, - ) -> impl Iterator + use<'b, 'db> { + ) -> impl Iterator> + use<'b, 'a, 'db> { + let interner = self.interner; let mut assoc_bounds = None; let mut clause = None; match bound { &TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => { // FIXME Don't silently drop the hrtb lifetimes here - if let Some((trait_ref, mut ctx)) = - self.lower_trait_ref_from_path(path, self_ty.clone()) - { + if let Some((trait_ref, mut ctx)) = self.lower_trait_ref_from_path(path, self_ty) { // FIXME(sized-hierarchy): Remove this bound modifications once we have implemented // sized-hierarchy correctly. let meta_sized = LangItem::MetaSized .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); let pointee_sized = LangItem::PointeeSized .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); - let destruct = LangItem::Destruct - .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); - let hir_trait_id = trait_ref.hir_trait_id(); - if meta_sized.is_some_and(|it| it == hir_trait_id) - || destruct.is_some_and(|it| it == hir_trait_id) - { + if meta_sized.is_some_and(|it| it == trait_ref.def_id.0) { // Ignore this bound - } else if pointee_sized.is_some_and(|it| it == hir_trait_id) { + } else if pointee_sized.is_some_and(|it| it == trait_ref.def_id.0) { // Regard this as `?Sized` bound ctx.ty_ctx().unsized_types.insert(self_ty); } else { if !ignore_bindings { - assoc_bounds = - ctx.assoc_type_bindings_from_type_bound(trait_ref.clone()); + assoc_bounds = ctx.assoc_type_bindings_from_type_bound(trait_ref); } - clause = - Some(crate::wrap_empty_binders(WhereClause::Implemented(trait_ref))); + clause = Some(Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + ))); } } } @@ -601,95 +701,137 @@ impl<'db> TyLoweringContext<'db> { // `?Sized` has no of them. // If we got another trait here ignore the bound completely. let trait_id = self - .lower_trait_ref_from_path(path, self_ty.clone()) - .map(|(trait_ref, _)| trait_ref.hir_trait_id()); + .lower_trait_ref_from_path(path, self_ty) + .map(|(trait_ref, _)| trait_ref.def_id.0); if trait_id == sized_trait { self.unsized_types.insert(self_ty); } } &TypeBound::Lifetime(l) => { let lifetime = self.lower_lifetime(l); - clause = Some(crate::wrap_empty_binders(WhereClause::TypeOutlives(TypeOutlives { - ty: self_ty, - lifetime, - }))); + clause = Some(Clause(Predicate::new( + self.interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::TypeOutlives(OutlivesPredicate( + self_ty, lifetime, + )), + )), + ))); } TypeBound::Use(_) | TypeBound::Error => {} } clause.into_iter().chain(assoc_bounds.into_iter().flatten()) } - fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty { - let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); + fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> { + let interner = self.interner; + // FIXME: we should never create non-existential predicates in the first place + // For now, use an error type so we don't run into dummy binder issues + let self_ty = Ty::new_error(interner, ErrorGuaranteed); // INVARIANT: The principal trait bound, if present, must come first. Others may be in any // order but should be in the same order for the same set but possibly different order of // bounds in the input. // INVARIANT: If this function returns `DynTy`, there should be at least one trait bound. // These invariants are utilized by `TyExt::dyn_trait()` and chalk. let mut lifetime = None; - let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - let mut lowered_bounds = Vec::new(); + let bounds = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { + let mut lowered_bounds: Vec< + rustc_type_ir::Binder, ExistentialPredicate>>, + > = Vec::new(); for b in bounds { - ctx.lower_type_bound(b, self_ty.clone(), false).for_each(|b| { - let filter = match b.skip_binders() { - WhereClause::Implemented(_) | WhereClause::AliasEq(_) => true, - WhereClause::LifetimeOutlives(_) => false, - WhereClause::TypeOutlives(t) => { - lifetime = Some(t.lifetime.clone()); - false - } - }; - if filter { - lowered_bounds.push(b); + let db = ctx.db; + ctx.lower_type_bound(b, self_ty, false).for_each(|b| { + if let Some(bound) = b + .kind() + .map_bound(|c| match c { + rustc_type_ir::ClauseKind::Trait(t) => { + let id = t.def_id(); + let is_auto = + db.trait_signature(id.0).flags.contains(TraitFlags::AUTO); + if is_auto { + Some(ExistentialPredicate::AutoTrait(t.def_id())) + } else { + Some(ExistentialPredicate::Trait( + ExistentialTraitRef::new_from_args( + interner, + t.def_id(), + GenericArgs::new_from_iter( + interner, + t.trait_ref.args.iter().skip(1), + ), + ), + )) + } + } + rustc_type_ir::ClauseKind::Projection(p) => { + Some(ExistentialPredicate::Projection( + ExistentialProjection::new_from_args( + interner, + p.def_id(), + GenericArgs::new_from_iter( + interner, + p.projection_term.args.iter().skip(1), + ), + p.term, + ), + )) + } + rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => { + lifetime = Some(outlives_predicate.1); + None + } + rustc_type_ir::ClauseKind::RegionOutlives(_) + | rustc_type_ir::ClauseKind::ConstArgHasType(_, _) + | rustc_type_ir::ClauseKind::WellFormed(_) + | rustc_type_ir::ClauseKind::ConstEvaluatable(_) + | rustc_type_ir::ClauseKind::HostEffect(_) + | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(), + }) + .transpose() + { + lowered_bounds.push(bound); } - }); + }) } let mut multiple_regular_traits = false; let mut multiple_same_projection = false; lowered_bounds.sort_unstable_by(|lhs, rhs| { use std::cmp::Ordering; - match (lhs.skip_binders(), rhs.skip_binders()) { - (WhereClause::Implemented(lhs), WhereClause::Implemented(rhs)) => { - let lhs_id = lhs.trait_id; - let lhs_is_auto = ctx - .db - .trait_signature(from_chalk_trait_id(lhs_id)) - .flags - .contains(TraitFlags::AUTO); - let rhs_id = rhs.trait_id; - let rhs_is_auto = ctx - .db - .trait_signature(from_chalk_trait_id(rhs_id)) - .flags - .contains(TraitFlags::AUTO); - - if !lhs_is_auto && !rhs_is_auto { - multiple_regular_traits = true; - } - // Note that the ordering here is important; this ensures the invariant - // mentioned above. - (lhs_is_auto, lhs_id).cmp(&(rhs_is_auto, rhs_id)) + match ((*lhs).skip_binder(), (*rhs).skip_binder()) { + (ExistentialPredicate::Trait(_), ExistentialPredicate::Trait(_)) => { + multiple_regular_traits = true; + // Order doesn't matter - we error + Ordering::Equal } - (WhereClause::Implemented(_), _) => Ordering::Less, - (_, WhereClause::Implemented(_)) => Ordering::Greater, - (WhereClause::AliasEq(lhs), WhereClause::AliasEq(rhs)) => { - match (&lhs.alias, &rhs.alias) { - (AliasTy::Projection(lhs_proj), AliasTy::Projection(rhs_proj)) => { - // We only compare the `associated_ty_id`s. We shouldn't have - // multiple bounds for an associated type in the correct Rust code, - // and if we do, we error out. - if lhs_proj.associated_ty_id == rhs_proj.associated_ty_id { - multiple_same_projection = true; - } - lhs_proj.associated_ty_id.cmp(&rhs_proj.associated_ty_id) - } - // We don't produce `AliasTy::Opaque`s yet. + ( + ExistentialPredicate::AutoTrait(lhs_id), + ExistentialPredicate::AutoTrait(rhs_id), + ) => lhs_id.0.cmp(&rhs_id.0), + (ExistentialPredicate::Trait(_), _) => Ordering::Less, + (_, ExistentialPredicate::Trait(_)) => Ordering::Greater, + (ExistentialPredicate::AutoTrait(_), _) => Ordering::Less, + (_, ExistentialPredicate::AutoTrait(_)) => Ordering::Greater, + ( + ExistentialPredicate::Projection(lhs), + ExistentialPredicate::Projection(rhs), + ) => { + let lhs_id = match lhs.def_id { + SolverDefId::TypeAliasId(id) => id, _ => unreachable!(), + }; + let rhs_id = match rhs.def_id { + SolverDefId::TypeAliasId(id) => id, + _ => unreachable!(), + }; + // We only compare the `associated_ty_id`s. We shouldn't have + // multiple bounds for an associated type in the correct Rust code, + // and if we do, we error out. + if lhs_id == rhs_id { + multiple_same_projection = true; } + lhs_id.as_id().index().cmp(&rhs_id.as_id().index()) } - // `WhereClause::{TypeOutlives, LifetimeOutlives}` have been filtered out - _ => unreachable!(), } }); @@ -697,260 +839,535 @@ impl<'db> TyLoweringContext<'db> { return None; } - lowered_bounds.first().and_then(|b| b.trait_id())?; + if !lowered_bounds.first().map_or(false, |b| { + matches!( + b.as_ref().skip_binder(), + ExistentialPredicate::Trait(_) | ExistentialPredicate::AutoTrait(_) + ) + }) { + return None; + } // As multiple occurrences of the same auto traits *are* permitted, we deduplicate the // bounds. We shouldn't have repeated elements besides auto traits at this point. lowered_bounds.dedup(); - Some(QuantifiedWhereClauses::from_iter(Interner, lowered_bounds)) + Some(BoundExistentialPredicates::new_from_iter(interner, lowered_bounds)) }); if let Some(bounds) = bounds { - let bounds = crate::make_single_type_binders(bounds); - TyKind::Dyn(DynTy { - bounds, - lifetime: match lifetime { - Some(it) => match it.bound_var(Interner) { - Some(bound_var) => bound_var - .shifted_out_to(DebruijnIndex::new(2)) - .map(|bound_var| LifetimeData::BoundVar(bound_var).intern(Interner)) - .unwrap_or(it), - None => it, - }, - None => error_lifetime(), + let region = match lifetime { + Some(it) => match it.kind() { + rustc_type_ir::RegionKind::ReBound(db, var) => Region::new_bound( + self.interner, + db.shifted_out_to_binder(DebruijnIndex::from_u32(2)), + var, + ), + _ => it, }, - }) - .intern(Interner) + None => Region::new_static(self.interner), + }; + Ty::new_dynamic(self.interner, bounds, region) } else { // FIXME: report error // (additional non-auto traits, associated type rebound, or no resolved trait) - TyKind::Error.intern(Interner) + Ty::new_error(self.interner, ErrorGuaranteed) } } - fn lower_impl_trait(&mut self, bounds: &[TypeBound], krate: Crate) -> ImplTrait { + fn lower_impl_trait( + &mut self, + def_id: SolverDefId, + bounds: &[TypeBound], + krate: Crate, + ) -> ImplTrait<'db> { + let interner = self.interner; cov_mark::hit!(lower_rpit); - let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); - let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { + let args = GenericArgs::identity_for_item(interner, def_id); + let self_ty = Ty::new_alias( + self.interner, + rustc_type_ir::AliasTyKind::Opaque, + AliasTy::new_from_args(interner, def_id, args), + ); + let predicates = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { let mut predicates = Vec::new(); for b in bounds { - predicates.extend(ctx.lower_type_bound(b, self_ty.clone(), false)); + predicates.extend(ctx.lower_type_bound(b, self_ty, false)); } if !ctx.unsized_types.contains(&self_ty) { - let sized_trait = - LangItem::Sized.resolve_trait(ctx.db, krate).map(to_chalk_trait_id); + let sized_trait = LangItem::Sized.resolve_trait(self.db, krate); let sized_clause = sized_trait.map(|trait_id| { - let clause = WhereClause::Implemented(TraitRef { - trait_id, - substitution: Substitution::from1(Interner, self_ty.clone()), - }); - crate::wrap_empty_binders(clause) + let trait_ref = TraitRef::new_from_args( + interner, + trait_id.into(), + GenericArgs::new_from_iter(interner, [self_ty.into()]), + ); + Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )) }); predicates.extend(sized_clause); } predicates.shrink_to_fit(); predicates }); - ImplTrait { bounds: crate::make_single_type_binders(predicates) } + ImplTrait { predicates } } - pub(crate) fn lower_lifetime(&self, lifetime: LifetimeRefId) -> Lifetime { + pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> { match self.resolver.resolve_lifetime(&self.store[lifetime]) { Some(resolution) => match resolution { - LifetimeNs::Static => static_lifetime(), - LifetimeNs::LifetimeParam(id) => match self.type_param_mode { - ParamLoweringMode::Placeholder => { - let generics = self.generics(); - let idx = generics.lifetime_idx(id).unwrap(); - LifetimeData::Placeholder(lt_to_placeholder_idx(self.db, id, idx as u32)) - } - ParamLoweringMode::Variable => { - let idx = match self.generics().lifetime_idx(id) { - None => return error_lifetime(), - Some(idx) => idx, - }; - - LifetimeData::BoundVar(BoundVar::new(self.in_binders, idx)) - } + LifetimeNs::Static => Region::new_static(self.interner), + LifetimeNs::LifetimeParam(id) => { + let idx = match self.generics().lifetime_idx(id) { + None => return Region::error(self.interner), + Some(idx) => idx, + }; + self.region_param(id, idx as u32) } - .intern(Interner), }, - None => error_lifetime(), + None => Region::error(self.interner), } } } -fn named_associated_type_shorthand_candidates( - db: &dyn HirDatabase, - // If the type parameter is defined in an impl and we're in a method, there - // might be additional where clauses to consider - def: GenericDefId, - res: TypeNs, - assoc_name: Option, - // Do NOT let `cb` touch `TraitRef` outside of `TyLoweringContext`. Its substitution contains - // free `BoundVar`s that need to be shifted and only `TyLoweringContext` knows how to do that - // properly (see `TyLoweringContext::select_associated_type()`). - mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, -) -> Option { - let mut search = |t| { - all_super_trait_refs(db, t, |t| { - let data = t.hir_trait_id().trait_items(db); - - for (name, assoc_id) in &data.items { - if let AssocItemId::TypeAliasId(alias) = assoc_id - && let Some(result) = cb(name, &t, *alias) - { - return Some(result); - } - } - None - }) - }; - - let interner = DbInterner::new_with(db, None, None); - match res { - TypeNs::SelfType(impl_id) => { - let trait_ref = db.impl_trait(impl_id)?; - - let impl_id_as_generic_def: GenericDefId = impl_id.into(); - if impl_id_as_generic_def != def { - let subst = TyBuilder::subst_for_def(db, impl_id, None) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0) - .build(); - let args: crate::next_solver::GenericArgs<'_> = subst.to_nextsolver(interner); - let trait_ref = trait_ref.instantiate(interner, args).to_chalk(interner); - search(trait_ref) - } else { - search(trait_ref.skip_binder().to_chalk(interner)) - } - } - TypeNs::GenericParam(param_id) => { - let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name); - let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() { - // FIXME: how to correctly handle higher-ranked bounds here? - WhereClause::Implemented(tr) => search( - tr.clone() - .shifted_out_to(Interner, DebruijnIndex::ONE) - .expect("FIXME unexpected higher-ranked trait bound"), - ), - _ => None, - }); - if res.is_some() { - return res; - } - // Handle `Self::Type` referring to own associated type in trait definitions - if let GenericDefId::TraitId(trait_id) = param_id.parent() { - let trait_generics = generics(db, trait_id.into()); - if trait_generics[param_id.local_id()].is_trait_self() { - let trait_ref = TyBuilder::trait_ref(db, trait_id) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0) - .build(); - return search(trait_ref); - } - } - None - } - _ => None, +pub(crate) fn lower_mutability(m: hir_def::type_ref::Mutability) -> Mutability { + match m { + hir_def::type_ref::Mutability::Shared => Mutability::Not, + hir_def::type_ref::Mutability::Mut => Mutability::Mut, } } +fn unknown_const(_ty: Ty<'_>) -> Const<'_> { + Const::new(DbInterner::conjure(), ConstKind::Error(ErrorGuaranteed)) +} + pub(crate) type Diagnostics = Option>; pub(crate) fn create_diagnostics(diagnostics: Vec) -> Diagnostics { (!diagnostics.is_empty()).then(|| ThinArc::from_header_and_iter((), diagnostics.into_iter())) } -pub(crate) fn field_types_query( - db: &dyn HirDatabase, - variant_id: VariantId, -) -> Arc>> { - field_types_with_diagnostics_query(db, variant_id).0 +pub(crate) fn impl_trait_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> Option>> { + db.impl_trait_with_diagnostics(impl_id).map(|it| it.0) } -/// Build the type of all specific fields of a struct or enum variant. -pub(crate) fn field_types_with_diagnostics_query( - db: &dyn HirDatabase, - variant_id: VariantId, -) -> (Arc>>, Diagnostics) { - let var_data = variant_id.fields(db); - let fields = var_data.fields(); - if fields.is_empty() { - return (Arc::new(ArenaMap::default()), None); - } - - let (resolver, def): (_, GenericDefId) = match variant_id { - VariantId::StructId(it) => (it.resolver(db), it.into()), - VariantId::UnionId(it) => (it.resolver(db), it.into()), - VariantId::EnumVariantId(it) => (it.resolver(db), it.lookup(db).parent.into()), - }; - let generics = generics(db, def); - let mut res = ArenaMap::default(); +pub(crate) fn impl_trait_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> Option<(EarlyBinder<'db, TraitRef<'db>>, Diagnostics)> { + let impl_data = db.impl_signature(impl_id); + let resolver = impl_id.resolver(db); let mut ctx = TyLoweringContext::new( db, &resolver, - &var_data.store, - def, - LifetimeElisionKind::AnonymousReportError, - ) - .with_type_param_mode(ParamLoweringMode::Variable); - for (field_id, field_data) in fields.iter() { - res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(field_data.type_ref))); - } - (Arc::new(res), create_diagnostics(ctx.diagnostics)) + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let self_ty = db.impl_self_ty(impl_id).skip_binder(); + let target_trait = impl_data.target_trait.as_ref()?; + let trait_ref = EarlyBinder::bind(ctx.lower_trait_ref(target_trait, self_ty)?); + Some((trait_ref, create_diagnostics(ctx.diagnostics))) } -/// This query exists only to be used when resolving short-hand associated types -/// like `T::Item`. -/// -/// See the analogous query in rustc and its comment: -/// -/// This is a query mostly to handle cycles somewhat gracefully; e.g. the -/// following bounds are disallowed: `T: Foo, U: Foo`, but -/// these are fine: `T: Foo, U: Foo<()>`. -pub(crate) fn generic_predicates_for_param_query( - db: &dyn HirDatabase, - def: GenericDefId, - param_id: TypeOrConstParamId, - assoc_name: Option, -) -> GenericPredicates { - let generics = generics(db, def); - if generics.has_no_predicates() && generics.is_empty() { - return GenericPredicates(None); +pub(crate) fn return_type_impl_traits<'db>( + db: &'db dyn HirDatabase, + def: hir_def::FunctionId, +) -> Option>>> { + // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe + let data = db.function_signature(def); + let resolver = def.resolver(db); + let mut ctx_ret = + TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + if let Some(ret_type) = data.ret_type { + let _ret = ctx_ret.lower_ty(ret_type); + } + let return_type_impl_traits = + ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data }; + if return_type_impl_traits.impl_traits.is_empty() { + None + } else { + Some(Arc::new(EarlyBinder::bind(return_type_impl_traits))) } +} +pub(crate) fn type_alias_impl_traits<'db>( + db: &'db dyn HirDatabase, + def: hir_def::TypeAliasId, +) -> Option>>> { + let data = db.type_alias_signature(def); let resolver = def.resolver(db); let mut ctx = TyLoweringContext::new( db, &resolver, - generics.store(), - def, + &data.store, + def.into(), LifetimeElisionKind::AnonymousReportError, ) - .with_type_param_mode(ParamLoweringMode::Variable); + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + if let Some(type_ref) = data.ty { + let _ty = ctx.lower_ty(type_ref); + } + let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data }; + if type_alias_impl_traits.impl_traits.is_empty() { + None + } else { + Some(Arc::new(EarlyBinder::bind(type_alias_impl_traits))) + } +} - // we have to filter out all other predicates *first*, before attempting to lower them - let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_>| match pred { - WherePredicate::ForLifetime { target, bound, .. } - | WherePredicate::TypeBound { target, bound, .. } => { - let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) }; - if invalid_target { - // FIXME(sized-hierarchy): Revisit and adjust this properly once we have implemented - // sized-hierarchy correctly. - // If this is filtered out without lowering, `?Sized` or `PointeeSized` is not gathered into - // `ctx.unsized_types` - let lower = || -> bool { - match bound { - TypeBound::Path(_, TraitBoundModifier::Maybe) => true, - TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => { - let TypeRef::Path(path) = &ctx.store[path.type_ref()] else { - return false; - }; - let Some(pointee_sized) = - LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate()) - else { - return false; +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TyDefId { + BuiltinType(BuiltinType), + AdtId(AdtId), + TypeAliasId(TypeAliasId), +} +impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] +pub enum ValueTyDefId { + FunctionId(FunctionId), + StructId(StructId), + UnionId(UnionId), + EnumVariantId(EnumVariantId), + ConstId(ConstId), + StaticId(StaticId), +} +impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); + +impl ValueTyDefId { + pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> GenericDefId { + match self { + Self::FunctionId(id) => id.into(), + Self::StructId(id) => id.into(), + Self::UnionId(id) => id.into(), + Self::EnumVariantId(var) => var.lookup(db).parent.into(), + Self::ConstId(id) => id.into(), + Self::StaticId(id) => id.into(), + } + } +} + +/// Build the declared type of an item. This depends on the namespace; e.g. for +/// `struct Foo(usize)`, we have two types: The type of the struct itself, and +/// the constructor function `(usize) -> Foo` which lives in the values +/// namespace. +pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBinder<'db, Ty<'db>> { + let interner = DbInterner::new_with(db, None, None); + match def { + TyDefId::BuiltinType(it) => EarlyBinder::bind(Ty::from_builtin_type(interner, it)), + TyDefId::AdtId(it) => EarlyBinder::bind(Ty::new_adt( + interner, + it, + GenericArgs::identity_for_item(interner, it.into()), + )), + TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0, + } +} + +/// Build the declared type of a function. This should not need to look at the +/// function body. +fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> { + let interner = DbInterner::new_with(db, None, None); + EarlyBinder::bind(Ty::new_fn_def( + interner, + CallableDefId::FunctionId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + )) +} + +/// Build the declared type of a const. +fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'db, Ty<'db>> { + let resolver = def.resolver(db); + let data = db.const_signature(def); + let parent = def.loc(db).container; + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::AnonymousReportError, + ); + ctx.set_lifetime_elision(LifetimeElisionKind::for_const(ctx.interner, parent)); + EarlyBinder::bind(ctx.lower_ty(data.type_ref)) +} + +/// Build the declared type of a static. +fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder<'db, Ty<'db>> { + let resolver = def.resolver(db); + let data = db.static_signature(def); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::AnonymousReportError, + ); + ctx.set_lifetime_elision(LifetimeElisionKind::Elided(Region::new_static(ctx.interner))); + EarlyBinder::bind(ctx.lower_ty(data.type_ref)) +} + +/// Build the type of a tuple struct constructor. +fn type_for_struct_constructor<'db>( + db: &'db dyn HirDatabase, + def: StructId, +) -> Option>> { + let struct_data = def.fields(db); + match struct_data.shape { + FieldsShape::Record => None, + FieldsShape::Unit => Some(type_for_adt(db, def.into())), + FieldsShape::Tuple => { + let interner = DbInterner::new_with(db, None, None); + Some(EarlyBinder::bind(Ty::new_fn_def( + interner, + CallableDefId::StructId(def).into(), + GenericArgs::identity_for_item(interner, def.into()), + ))) + } + } +} + +/// Build the type of a tuple enum variant constructor. +fn type_for_enum_variant_constructor<'db>( + db: &'db dyn HirDatabase, + def: EnumVariantId, +) -> Option>> { + let struct_data = def.fields(db); + match struct_data.shape { + FieldsShape::Record => None, + FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())), + FieldsShape::Tuple => { + let interner = DbInterner::new_with(db, None, None); + Some(EarlyBinder::bind(Ty::new_fn_def( + interner, + CallableDefId::EnumVariantId(def).into(), + GenericArgs::identity_for_item(interner, def.loc(db).parent.into()), + ))) + } + } +} + +pub(crate) fn value_ty_query<'db>( + db: &'db dyn HirDatabase, + def: ValueTyDefId, +) -> Option>> { + match def { + ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), + ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), + ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())), + ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), + ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)), + ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)), + } +} + +pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + t: TypeAliasId, +) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { + let type_alias_data = db.type_alias_signature(t); + let mut diags = None; + let resolver = t.resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { + EarlyBinder::bind(Ty::new_foreign(interner, t.into())) + } else { + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &type_alias_data.store, + t.into(), + LifetimeElisionKind::AnonymousReportError, + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + let res = EarlyBinder::bind( + type_alias_data + .ty + .map(|type_ref| ctx.lower_ty(type_ref)) + .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)), + ); + diags = create_diagnostics(ctx.diagnostics); + res + }; + (inner, diags) +} + +pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result<'db>( + db: &'db dyn HirDatabase, + _adt: TypeAliasId, +) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { + (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None) +} + +pub(crate) fn impl_self_ty_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> EarlyBinder<'db, Ty<'db>> { + db.impl_self_ty_with_diagnostics(impl_id).0 +} + +pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + impl_id: ImplId, +) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { + let resolver = impl_id.resolver(db); + + let impl_data = db.impl_signature(impl_id); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &impl_data.store, + impl_id.into(), + LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, + ); + let ty = ctx.lower_ty(impl_data.self_ty); + assert!(!ty.has_escaping_bound_vars()); + (EarlyBinder::bind(ty), create_diagnostics(ctx.diagnostics)) +} + +pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( + db: &dyn HirDatabase, + _impl_id: ImplId, +) -> (EarlyBinder<'_, Ty<'_>>, Diagnostics) { + (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None) +} + +pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> { + db.const_param_ty_with_diagnostics(def).0 +} + +// returns None if def is a type arg +pub(crate) fn const_param_ty_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + def: ConstParamId, +) -> (Ty<'db>, Diagnostics) { + let (parent_data, store) = db.generic_params_and_store(def.parent()); + let data = &parent_data[def.local_id()]; + let resolver = def.parent().resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &store, + def.parent(), + LifetimeElisionKind::AnonymousReportError, + ); + let ty = match data { + TypeOrConstParamData::TypeParamData(_) => { + never!(); + Ty::new_error(interner, ErrorGuaranteed) + } + TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), + }; + (ty, create_diagnostics(ctx.diagnostics)) +} + +pub(crate) fn const_param_ty_with_diagnostics_cycle_result<'db>( + db: &'db dyn HirDatabase, + _: crate::db::HirDatabaseData, + def: ConstParamId, +) -> (Ty<'db>, Diagnostics) { + let resolver = def.parent().resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + (Ty::new_error(interner, ErrorGuaranteed), None) +} + +pub(crate) fn field_types_query<'db>( + db: &'db dyn HirDatabase, + variant_id: VariantId, +) -> Arc>>> { + db.field_types_with_diagnostics(variant_id).0 +} + +/// Build the type of all specific fields of a struct or enum variant. +pub(crate) fn field_types_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + variant_id: VariantId, +) -> (Arc>>>, Diagnostics) { + let var_data = variant_id.fields(db); + let fields = var_data.fields(); + if fields.is_empty() { + return (Arc::new(ArenaMap::default()), None); + } + + let (resolver, def): (_, GenericDefId) = match variant_id { + VariantId::StructId(it) => (it.resolver(db), it.into()), + VariantId::UnionId(it) => (it.resolver(db), it.into()), + VariantId::EnumVariantId(it) => (it.resolver(db), it.lookup(db).parent.into()), + }; + let mut res = ArenaMap::default(); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + &var_data.store, + def, + LifetimeElisionKind::AnonymousReportError, + ); + for (field_id, field_data) in var_data.fields().iter() { + res.insert(field_id, EarlyBinder::bind(ctx.lower_ty(field_data.type_ref))); + } + (Arc::new(res), create_diagnostics(ctx.diagnostics)) +} + +/// This query exists only to be used when resolving short-hand associated types +/// like `T::Item`. +/// +/// See the analogous query in rustc and its comment: +/// +/// This is a query mostly to handle cycles somewhat gracefully; e.g. the +/// following bounds are disallowed: `T: Foo, U: Foo`, but +/// these are fine: `T: Foo, U: Foo<()>`. +#[tracing::instrument(skip(db), ret)] +pub(crate) fn generic_predicates_for_param_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, + param_id: TypeOrConstParamId, + assoc_name: Option, +) -> GenericPredicates<'db> { + let generics = generics(db, def); + let interner = DbInterner::new_with(db, None, None); + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generics.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ); + + // we have to filter out all other predicates *first*, before attempting to lower them + let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred { + WherePredicate::ForLifetime { target, bound, .. } + | WherePredicate::TypeBound { target, bound, .. } => { + let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) }; + if invalid_target { + // FIXME(sized-hierarchy): Revisit and adjust this properly once we have implemented + // sized-hierarchy correctly. + // If this is filtered out without lowering, `?Sized` or `PointeeSized` is not gathered into + // `ctx.unsized_types` + let lower = || -> bool { + match bound { + TypeBound::Path(_, TraitBoundModifier::Maybe) => true, + TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => { + let TypeRef::Path(path) = &ctx.store[path.type_ref()] else { + return false; + }; + let Some(pointee_sized) = + LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate()) + else { + return false; }; // Lower the path directly with `Resolver` instead of PathLoweringContext` // to prevent diagnostics duplications. @@ -962,7 +1379,8 @@ pub(crate) fn generic_predicates_for_param_query( } }(); if lower { - ctx.lower_where_predicate(pred, true).for_each(drop); + ctx.lower_where_predicate(pred, true, &generics, PredicateFilter::All) + .for_each(drop); } return false; } @@ -980,8 +1398,8 @@ pub(crate) fn generic_predicates_for_param_query( return false; }; - all_super_traits(db, tr).iter().any(|tr| { - tr.trait_items(db).items.iter().any(|(name, item)| { + rustc_type_ir::elaborate::supertrait_def_ids(interner, tr.into()).any(|tr| { + tr.0.trait_items(db).items.iter().any(|(name, item)| { matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name }) }) @@ -998,27 +1416,23 @@ pub(crate) fn generic_predicates_for_param_query( ctx.store = maybe_parent_generics.store(); for pred in maybe_parent_generics.where_predicates() { if predicate(pred, &mut ctx) { - predicates.extend( - ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p)), - ); + predicates.extend(ctx.lower_where_predicate( + pred, + true, + maybe_parent_generics, + PredicateFilter::All, + )); } } } - let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); - if !subst.is_empty(Interner) { + let args = GenericArgs::identity_for_item(interner, def.into()); + if !args.is_empty() { let explicitly_unsized_tys = ctx.unsized_types; - if let Some(implicitly_sized_predicates) = implicitly_sized_clauses( - db, - param_id.parent, - &explicitly_unsized_tys, - &subst, - &resolver, - ) { - predicates.extend( - implicitly_sized_predicates - .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))), - ); + if let Some(implicitly_sized_predicates) = + implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &args, &resolver) + { + predicates.extend(implicitly_sized_predicates); }; } GenericPredicates(predicates.is_empty().not().then(|| predicates.into())) @@ -1029,53 +1443,206 @@ pub(crate) fn generic_predicates_for_param_cycle_result( _def: GenericDefId, _param_id: TypeOrConstParamId, _assoc_name: Option, -) -> GenericPredicates { +) -> GenericPredicates<'_> { GenericPredicates(None) } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericPredicates(Option]>>); +pub struct GenericPredicates<'db>(Option]>>); -impl ops::Deref for GenericPredicates { - type Target = [Binders]; +impl<'db> GenericPredicates<'db> { + #[inline] + pub fn instantiate( + &self, + interner: DbInterner<'db>, + args: GenericArgs<'db>, + ) -> Option>> { + self.0 + .as_ref() + .map(|it| EarlyBinder::bind(it.iter().copied()).iter_instantiated(interner, args)) + } + + #[inline] + pub fn instantiate_identity(&self) -> Option>> { + self.0.as_ref().map(|it| it.iter().copied()) + } +} + +impl<'db> ops::Deref for GenericPredicates<'db> { + type Target = [Clause<'db>]; fn deref(&self) -> &Self::Target { self.0.as_deref().unwrap_or(&[]) } } -/// Resolve the where clause(s) of an item with generics. -pub(crate) fn generic_predicates_query( +pub(crate) fn trait_environment_for_body_query( db: &dyn HirDatabase, + def: DefWithBodyId, +) -> Arc> { + let Some(def) = def.as_generic_def_id(db) else { + let krate = def.module(db).krate(); + return TraitEnvironment::empty(krate); + }; + db.trait_environment(def) +} + +pub(crate) fn trait_environment_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, +) -> Arc> { + let generics = generics(db, def); + if generics.has_no_predicates() && generics.is_empty() { + return TraitEnvironment::empty(def.krate(db)); + } + + let resolver = def.resolver(db); + let mut ctx = TyLoweringContext::new( + db, + &resolver, + generics.store(), + def, + LifetimeElisionKind::AnonymousReportError, + ); + let mut traits_in_scope = Vec::new(); + let mut clauses = Vec::new(); + for maybe_parent_generics in + std::iter::successors(Some(&generics), |generics| generics.parent_generics()) + { + ctx.store = maybe_parent_generics.store(); + for pred in maybe_parent_generics.where_predicates() { + for pred in ctx.lower_where_predicate(pred, false, &generics, PredicateFilter::All) { + if let rustc_type_ir::ClauseKind::Trait(tr) = pred.kind().skip_binder() { + traits_in_scope.push((tr.self_ty(), tr.def_id().0)); + } + clauses.push(pred); + } + } + } + + if let Some(trait_id) = def.assoc_trait_container(db) { + // add `Self: Trait` to the environment in trait + // function default implementations (and speculative code + // inside consts or type aliases) + cov_mark::hit!(trait_self_implements_self); + let trait_ref = TraitRef::identity(ctx.interner, trait_id.into()); + let clause = Clause(Predicate::new( + ctx.interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Trait( + TraitPredicate { trait_ref, polarity: rustc_type_ir::PredicatePolarity::Positive }, + ))), + )); + clauses.push(clause); + } + + let explicitly_unsized_tys = ctx.unsized_types; + + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); + if let Some(sized_trait) = sized_trait { + let (mut generics, mut def_id) = + (crate::next_solver::generics::generics(db, def.into()), def); + loop { + let self_idx = trait_self_param_idx(db, def_id); + for (idx, p) in generics.own_params.iter().enumerate() { + if let Some(self_idx) = self_idx + && p.index() as usize == self_idx + { + continue; + } + let GenericParamId::TypeParamId(param_id) = p.id else { + continue; + }; + let idx = idx as u32 + generics.parent_count as u32; + let param_ty = Ty::new_param(ctx.interner, param_id, idx); + if explicitly_unsized_tys.contains(¶m_ty) { + continue; + } + let trait_ref = TraitRef::new_from_args( + ctx.interner, + sized_trait.into(), + GenericArgs::new_from_iter(ctx.interner, [param_ty.into()]), + ); + let clause = Clause(Predicate::new( + ctx.interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )); + clauses.push(clause); + } + + if let Some(g) = generics.parent { + generics = crate::next_solver::generics::generics(db, g.into()); + def_id = g; + } else { + break; + } + } + } + + let clauses = rustc_type_ir::elaborate::elaborate(ctx.interner, clauses); + let clauses = Clauses::new_from_iter(ctx.interner, clauses); + let env = ParamEnv { clauses }; + + TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env) +} + +#[derive(Copy, Clone, Debug)] +pub(crate) enum PredicateFilter { + SelfTrait, + All, +} + +/// Resolve the where clause(s) of an item with generics. +#[tracing::instrument(skip(db))] +pub(crate) fn generic_predicates_query<'db>( + db: &'db dyn HirDatabase, def: GenericDefId, -) -> GenericPredicates { - generic_predicates_filtered_by(db, def, |_, _| true).0 +) -> GenericPredicates<'db> { + generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true).0 +} + +pub(crate) fn generic_predicates_without_parent_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, +) -> GenericPredicates<'db> { + generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def).0 +} + +/// Resolve the where clause(s) of an item with generics, +/// except the ones inherited from the parent +pub(crate) fn generic_predicates_without_parent_with_diagnostics_query<'db>( + db: &'db dyn HirDatabase, + def: GenericDefId, +) -> (GenericPredicates<'db>, Diagnostics) { + generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def) } /// Resolve the where clause(s) of an item with generics, /// with a given filter -fn generic_predicates_filtered_by( - db: &dyn HirDatabase, +#[tracing::instrument(skip(db, filter), ret)] +pub(crate) fn generic_predicates_filtered_by<'db, F>( + db: &'db dyn HirDatabase, def: GenericDefId, + predicate_filter: PredicateFilter, filter: F, -) -> (GenericPredicates, Diagnostics) +) -> (GenericPredicates<'db>, Diagnostics) where - F: Fn(&WherePredicate, GenericDefId) -> bool, + F: Fn(GenericDefId) -> bool, { let generics = generics(db, def); - if generics.has_no_predicates() && generics.is_empty() { - return (GenericPredicates(None), None); - } - let resolver = def.resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); let mut ctx = TyLoweringContext::new( db, &resolver, generics.store(), def, LifetimeElisionKind::AnonymousReportError, - ) - .with_type_param_mode(ParamLoweringMode::Variable); + ); let mut predicates = Vec::new(); for maybe_parent_generics in @@ -1083,29 +1650,71 @@ where { ctx.store = maybe_parent_generics.store(); for pred in maybe_parent_generics.where_predicates() { - if filter(pred, maybe_parent_generics.def()) { - // We deliberately use `generics` and not `maybe_parent_generics` here. This is not a mistake! - // If we use the parent generics - predicates.extend( - ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p)), - ); + tracing::debug!(?pred); + if filter(maybe_parent_generics.def()) { + predicates.extend(ctx.lower_where_predicate( + pred, + false, + maybe_parent_generics, + predicate_filter, + )); } } } - if !generics.is_empty() { - let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); - let explicitly_unsized_tys = ctx.unsized_types; - if let Some(implicitly_sized_predicates) = - implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver) - { - predicates.extend( - implicitly_sized_predicates - .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p))), + let explicitly_unsized_tys = ctx.unsized_types; + + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); + if let Some(sized_trait) = sized_trait { + let mut add_sized_clause = |param_idx, param_id, param_data| { + let ( + GenericParamId::TypeParamId(param_id), + GenericParamDataRef::TypeParamData(param_data), + ) = (param_id, param_data) + else { + return; + }; + + if param_data.provenance == TypeParamProvenance::TraitSelf { + return; + } + + let param_ty = Ty::new_param(interner, param_id, param_idx); + if explicitly_unsized_tys.contains(¶m_ty) { + return; + } + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_iter(interner, [param_ty.into()]), ); + let clause = Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )); + predicates.push(clause); }; + if generics.parent_generics().is_some_and(|parent| filter(parent.def())) { + generics.iter_parent().enumerate().for_each(|(param_idx, (param_id, param_data))| { + add_sized_clause(param_idx as u32, param_id, param_data); + }); + } + if filter(def) { + let parent_params_len = generics.len_parent(); + generics.iter_self().enumerate().for_each(|(param_idx, (param_id, param_data))| { + add_sized_clause((param_idx + parent_params_len) as u32, param_id, param_data); + }); + } } + // FIXME: rustc gathers more predicates by recursing through resulting trait predicates. + // See https://github.com/rust-lang/rust/blob/76c5ed2847cdb26ef2822a3a165d710f6b772217/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L689-L715 + ( GenericPredicates(predicates.is_empty().not().then(|| predicates.into())), create_diagnostics(ctx.diagnostics), @@ -1114,49 +1723,61 @@ where /// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound. /// Exception is Self of a trait def. -fn implicitly_sized_clauses<'db, 'a, 'subst: 'a>( +fn implicitly_sized_clauses<'a, 'subst, 'db>( db: &'db dyn HirDatabase, def: GenericDefId, - explicitly_unsized_tys: &'a FxHashSet, - substitution: &'subst Substitution, + explicitly_unsized_tys: &'a FxHashSet>, + args: &'subst GenericArgs<'db>, resolver: &Resolver<'db>, -) -> Option> { - let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id)?; +) -> Option> + Captures<'a> + Captures<'subst>> { + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate())?; let trait_self_idx = trait_self_param_idx(db, def); Some( - substitution - .iter(Interner) + args.iter() .enumerate() .filter_map( move |(idx, generic_arg)| { if Some(idx) == trait_self_idx { None } else { Some(generic_arg) } }, ) - .filter_map(|generic_arg| generic_arg.ty(Interner)) - .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty)) + .filter_map(|generic_arg| generic_arg.as_type()) + .filter(move |self_ty| !explicitly_unsized_tys.contains(self_ty)) .map(move |self_ty| { - WhereClause::Implemented(TraitRef { - trait_id: sized_trait, - substitution: Substitution::from1(Interner, self_ty.clone()), - }) + let trait_ref = TraitRef::new_from_args( + interner, + sized_trait.into(), + GenericArgs::new_from_iter(interner, [self_ty.into()]), + ); + Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Trait(TraitPredicate { + trait_ref, + polarity: rustc_type_ir::PredicatePolarity::Positive, + }), + )), + )) }), ) } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericDefaults(Option]>>); - -impl ops::Deref for GenericDefaults { - type Target = [Binders]; +pub struct GenericDefaults<'db>(Option>>]>>); - fn deref(&self) -> &Self::Target { - self.0.as_deref().unwrap_or(&[]) +impl<'db> GenericDefaults<'db> { + #[inline] + pub fn get(&self, idx: usize) -> Option>> { + self.0.as_ref()?[idx] } } -pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> GenericDefaults { +pub(crate) fn generic_defaults_query( + db: &dyn HirDatabase, + def: GenericDefId, +) -> GenericDefaults<'_> { db.generic_defaults_with_diagnostics(def).0 } @@ -1166,7 +1787,7 @@ pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> pub(crate) fn generic_defaults_with_diagnostics_query( db: &dyn HirDatabase, def: GenericDefId, -) -> (GenericDefaults, Diagnostics) { +) -> (GenericDefaults<'_>, Diagnostics) { let generic_params = generics(db, def); if generic_params.is_empty() { return (GenericDefaults(None), None); @@ -1180,23 +1801,22 @@ pub(crate) fn generic_defaults_with_diagnostics_query( def, LifetimeElisionKind::AnonymousReportError, ) - .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed) - .with_type_param_mode(ParamLoweringMode::Variable); + .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed); let mut idx = 0; let mut has_any_default = false; let mut defaults = generic_params .iter_parents_with_store() - .map(|((id, p), store)| { + .map(|((_id, p), store)| { ctx.store = store; - let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params); + let (result, has_default) = handle_generic_param(&mut ctx, idx, p); has_any_default |= has_default; idx += 1; result }) .collect::>(); ctx.diagnostics.clear(); // Don't include diagnostics from the parent. - defaults.extend(generic_params.iter_self().map(|(id, p)| { - let (result, has_default) = handle_generic_param(&mut ctx, idx, id, p, &generic_params); + defaults.extend(generic_params.iter_self().map(|(_id, p)| { + let (result, has_default) = handle_generic_param(&mut ctx, idx, p); has_any_default |= has_default; idx += 1; result @@ -1209,47 +1829,26 @@ pub(crate) fn generic_defaults_with_diagnostics_query( }; return (defaults, diagnostics); - fn handle_generic_param( - ctx: &mut TyLoweringContext<'_>, + fn handle_generic_param<'db>( + ctx: &mut TyLoweringContext<'db, '_>, idx: usize, - id: GenericParamId, p: GenericParamDataRef<'_>, - generic_params: &Generics, - ) -> (Binders, bool) { - let binders = variable_kinds_from_iter(ctx.db, generic_params.iter_id().take(idx)); + ) -> (Option>>, bool) { + ctx.lowering_param_default(idx as u32); match p { GenericParamDataRef::TypeParamData(p) => { - let ty = p.default.as_ref().map_or_else( - || TyKind::Error.intern(Interner), - |ty| { - // Each default can only refer to previous parameters. - // Type variable default referring to parameter coming - // after it is forbidden (FIXME: report diagnostic) - fallback_bound_vars(ctx.lower_ty(*ty), idx) - }, - ); - (Binders::new(binders, ty.cast(Interner)), p.default.is_some()) + let ty = p.default.map(|ty| ctx.lower_ty(ty)); + (ty.map(|ty| EarlyBinder::bind(ty.into())), p.default.is_some()) } GenericParamDataRef::ConstParamData(p) => { - let GenericParamId::ConstParamId(id) = id else { - unreachable!("Unexpected lifetime or type argument") - }; - - let mut val = p.default.as_ref().map_or_else( - || unknown_const_as_generic(ctx.db.const_param_ty(id)), - |c| { - let param_ty = ctx.lower_ty(p.ty); - let c = ctx.lower_const(c, param_ty); - c.cast(Interner) - }, - ); - // Each default can only refer to previous parameters, see above. - val = fallback_bound_vars(val, idx); - (Binders::new(binders, val), p.default.is_some()) - } - GenericParamDataRef::LifetimeParamData(_) => { - (Binders::new(binders, error_lifetime().cast(Interner)), false) + let val = p.default.map(|c| { + let param_ty = ctx.lower_ty(p.ty); + let c = ctx.lower_const(c, param_ty); + c.into() + }); + (val.map(EarlyBinder::bind), p.default.is_some()) } + GenericParamDataRef::LifetimeParamData(_) => (None, false), } } } @@ -1257,161 +1856,323 @@ pub(crate) fn generic_defaults_with_diagnostics_query( pub(crate) fn generic_defaults_with_diagnostics_cycle_result( _db: &dyn HirDatabase, _def: GenericDefId, -) -> (GenericDefaults, Diagnostics) { +) -> (GenericDefaults<'_>, Diagnostics) { (GenericDefaults(None), None) } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum TyDefId { - BuiltinType(BuiltinType), - AdtId(AdtId), - TypeAliasId(TypeAliasId), -} -impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)] -pub enum ValueTyDefId { - FunctionId(FunctionId), - StructId(StructId), - UnionId(UnionId), - EnumVariantId(EnumVariantId), - ConstId(ConstId), - StaticId(StaticId), -} -impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); - -impl ValueTyDefId { - pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> GenericDefId { - match self { - Self::FunctionId(id) => id.into(), - Self::StructId(id) => id.into(), - Self::UnionId(id) => id.into(), - Self::EnumVariantId(var) => var.lookup(db).parent.into(), - Self::ConstId(id) => id.into(), - Self::StaticId(id) => id.into(), - } +/// Build the signature of a callable item (function, struct or enum variant). +pub(crate) fn callable_item_signature_query<'db>( + db: &'db dyn HirDatabase, + def: CallableDefId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + match def { + CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), + CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), + CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), } } -pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty { - const_param_ty_with_diagnostics_query(db, def).0 -} - -// returns None if def is a type arg -pub(crate) fn const_param_ty_with_diagnostics_query( - db: &dyn HirDatabase, - def: ConstParamId, -) -> (Ty, Diagnostics) { - let (parent_data, store) = db.generic_params_and_store(def.parent()); - let data = &parent_data[def.local_id()]; - let resolver = def.parent().resolver(db); - let mut ctx = TyLoweringContext::new( +fn fn_sig_for_fn<'db>( + db: &'db dyn HirDatabase, + def: FunctionId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + let data = db.function_signature(def); + let resolver = def.resolver(db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); + let mut ctx_params = TyLoweringContext::new( db, &resolver, - &store, - def.parent(), - LifetimeElisionKind::AnonymousReportError, + &data.store, + def.into(), + LifetimeElisionKind::for_fn_params(&data), ); - let ty = match data { - TypeOrConstParamData::TypeParamData(_) => { - never!(); - Ty::new(Interner, TyKind::Error) + let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr)); + + let ret = match data.ret_type { + Some(ret_type) => { + let mut ctx_ret = TyLoweringContext::new( + db, + &resolver, + &data.store, + def.into(), + LifetimeElisionKind::for_fn_ret(interner), + ) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + ctx_ret.lower_ty(ret_type) } - TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), + None => Ty::new_tup(interner, &[]), }; - (ty, create_diagnostics(ctx.diagnostics)) + + let inputs_and_output = Tys::new_from_iter(interner, params.chain(Some(ret))); + // If/when we track late bound vars, we need to switch this to not be `dummy` + EarlyBinder::bind(rustc_type_ir::Binder::dummy(FnSig { + abi: data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), + c_variadic: data.is_varargs(), + safety: if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, + inputs_and_output, + })) } -pub(crate) fn const_param_ty_cycle_result( - _: &dyn HirDatabase, - _: crate::db::HirDatabaseData, - _: ConstParamId, -) -> Ty { - TyKind::Error.intern(Interner) +fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> { + let interner = DbInterner::new_with(db, None, None); + let args = GenericArgs::identity_for_item(interner, adt.into()); + let ty = Ty::new_adt(interner, adt, args); + EarlyBinder::bind(ty) } -pub(crate) fn return_type_impl_traits( - db: &dyn HirDatabase, - def: hir_def::FunctionId, -) -> Option>> { - // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe - let data = db.function_signature(def); - let resolver = def.resolver(db); - let mut ctx_ret = - TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(ParamLoweringMode::Variable); - if let Some(ret_type) = data.ret_type { - let _ret = ctx_ret.lower_ty(ret_type); - } - let generics = generics(db, def.into()); - let return_type_impl_traits = - ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data }; - if return_type_impl_traits.impl_traits.is_empty() { - None - } else { - Some(Arc::new(make_binders(db, &generics, return_type_impl_traits))) - } +fn fn_sig_for_struct_constructor<'db>( + db: &'db dyn HirDatabase, + def: StructId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + let field_tys = db.field_types(def.into()); + let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let ret = type_for_adt(db, def.into()).skip_binder(); + + let inputs_and_output = + Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret))); + EarlyBinder::bind(Binder::dummy(FnSig { + abi: FnAbi::RustCall, + c_variadic: false, + safety: Safety::Safe, + inputs_and_output, + })) } -pub(crate) fn type_alias_impl_traits( - db: &dyn HirDatabase, - def: hir_def::TypeAliasId, -) -> Option>> { - let data = db.type_alias_signature(def); - let resolver = def.resolver(db); +fn fn_sig_for_enum_variant_constructor<'db>( + db: &'db dyn HirDatabase, + def: EnumVariantId, +) -> EarlyBinder<'db, PolyFnSig<'db>> { + let field_tys = db.field_types(def.into()); + let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); + let parent = def.lookup(db).parent; + let ret = type_for_adt(db, parent.into()).skip_binder(); + + let inputs_and_output = + Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret))); + EarlyBinder::bind(Binder::dummy(FnSig { + abi: FnAbi::RustCall, + c_variadic: false, + safety: Safety::Safe, + inputs_and_output, + })) +} + +// FIXME(next-solver): should merge this with `explicit_item_bounds` in some way +pub(crate) fn associated_ty_item_bounds<'db>( + db: &'db dyn HirDatabase, + type_alias: TypeAliasId, +) -> EarlyBinder<'db, BoundExistentialPredicates<'db>> { + let type_alias_data = db.type_alias_signature(type_alias); + let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); + let interner = DbInterner::new_with(db, Some(resolver.krate()), None); let mut ctx = TyLoweringContext::new( db, &resolver, - &data.store, - def.into(), + &type_alias_data.store, + type_alias.into(), LifetimeElisionKind::AnonymousReportError, - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) - .with_type_param_mode(ParamLoweringMode::Variable); - if let Some(type_ref) = data.ty { - let _ty = ctx.lower_ty(type_ref); + ); + // FIXME: we should never create non-existential predicates in the first place + // For now, use an error type so we don't run into dummy binder issues + let self_ty = Ty::new_error(interner, ErrorGuaranteed); + + let mut bounds = Vec::new(); + for bound in &type_alias_data.bounds { + ctx.lower_type_bound(bound, self_ty, false).for_each(|pred| { + if let Some(bound) = pred + .kind() + .map_bound(|c| match c { + rustc_type_ir::ClauseKind::Trait(t) => { + let id = t.def_id(); + let is_auto = db.trait_signature(id.0).flags.contains(TraitFlags::AUTO); + if is_auto { + Some(ExistentialPredicate::AutoTrait(t.def_id())) + } else { + Some(ExistentialPredicate::Trait(ExistentialTraitRef::new_from_args( + interner, + t.def_id(), + GenericArgs::new_from_iter( + interner, + t.trait_ref.args.iter().skip(1), + ), + ))) + } + } + rustc_type_ir::ClauseKind::Projection(p) => Some( + ExistentialPredicate::Projection(ExistentialProjection::new_from_args( + interner, + p.def_id(), + GenericArgs::new_from_iter( + interner, + p.projection_term.args.iter().skip(1), + ), + p.term, + )), + ), + rustc_type_ir::ClauseKind::TypeOutlives(_) => None, + rustc_type_ir::ClauseKind::RegionOutlives(_) + | rustc_type_ir::ClauseKind::ConstArgHasType(_, _) + | rustc_type_ir::ClauseKind::WellFormed(_) + | rustc_type_ir::ClauseKind::ConstEvaluatable(_) + | rustc_type_ir::ClauseKind::HostEffect(_) + | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(), + }) + .transpose() + { + bounds.push(bound); + } + }); } - let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data }; - if type_alias_impl_traits.impl_traits.is_empty() { - None - } else { - let generics = generics(db, def.into()); - Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits))) + + if !ctx.unsized_types.contains(&self_ty) + && let Some(sized_trait) = LangItem::Sized.resolve_trait(db, resolver.krate()) + { + let sized_clause = Binder::dummy(ExistentialPredicate::Trait(ExistentialTraitRef::new( + interner, + sized_trait.into(), + [] as [GenericArg<'_>; 0], + ))); + bounds.push(sized_clause); } + + EarlyBinder::bind(BoundExistentialPredicates::new_from_iter(interner, bounds)) } -pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability { - match m { - hir_def::type_ref::Mutability::Shared => Mutability::Not, - hir_def::type_ref::Mutability::Mut => Mutability::Mut, - } +pub(crate) fn associated_type_by_name_including_super_traits<'db>( + db: &'db dyn HirDatabase, + trait_ref: TraitRef<'db>, + name: &Name, +) -> Option<(TraitRef<'db>, TypeAliasId)> { + let interner = DbInterner::new_with(db, None, None); + rustc_type_ir::elaborate::supertraits(interner, Binder::dummy(trait_ref)).find_map(|t| { + let trait_id = t.as_ref().skip_binder().def_id.0; + let assoc_type = trait_id.trait_items(db).associated_type_by_name(name)?; + Some((t.skip_binder(), assoc_type)) + }) } -/// Replaces any 'free' `BoundVar`s in `s` by `TyKind::Error` from the perspective of generic -/// parameter whose index is `param_index`. A `BoundVar` is free when it appears after the -/// generic parameter of `param_index`. -fn fallback_bound_vars + HasInterner>( - s: T, - param_index: usize, -) -> T { - let is_allowed = |index| (0..param_index).contains(&index); - - crate::fold_free_vars( - s, - |bound, binders| { - if bound.index_if_innermost().is_none_or(is_allowed) { - bound.shifted_in_from(binders).to_ty(Interner) - } else { - TyKind::Error.intern(Interner) +pub fn associated_type_shorthand_candidates( + db: &dyn HirDatabase, + def: GenericDefId, + res: TypeNs, + mut cb: impl FnMut(&Name, TypeAliasId) -> bool, +) -> Option { + let interner = DbInterner::new_with(db, None, None); + named_associated_type_shorthand_candidates(interner, def, res, None, |name, _, id| { + cb(name, id).then_some(id) + }) +} + +#[tracing::instrument(skip(interner, check_alias))] +fn named_associated_type_shorthand_candidates<'db, R>( + interner: DbInterner<'db>, + // If the type parameter is defined in an impl and we're in a method, there + // might be additional where clauses to consider + def: GenericDefId, + res: TypeNs, + assoc_name: Option, + mut check_alias: impl FnMut(&Name, TraitRef<'db>, TypeAliasId) -> Option, +) -> Option { + let db = interner.db; + let mut search = |t: TraitRef<'db>| -> Option { + let mut checked_traits = FxHashSet::default(); + let mut check_trait = |trait_ref: TraitRef<'db>| { + let trait_id = trait_ref.def_id.0; + let name = &db.trait_signature(trait_id).name; + tracing::debug!(?trait_id, ?name); + if !checked_traits.insert(trait_id) { + return None; } - }, - |ty, bound, binders| { - if bound.index_if_innermost().is_none_or(is_allowed) { - bound.shifted_in_from(binders).to_const(Interner, ty) - } else { - unknown_const(ty) + let data = trait_id.trait_items(db); + + tracing::debug!(?data.items); + for (name, assoc_id) in &data.items { + if let &AssocItemId::TypeAliasId(alias) = assoc_id + && let Some(ty) = check_alias(name, trait_ref, alias) + { + return Some(ty); + } } - }, - ) + None + }; + let mut stack: SmallVec<[_; 4]> = smallvec![t]; + while let Some(trait_ref) = stack.pop() { + if let Some(alias) = check_trait(trait_ref) { + return Some(alias); + } + for pred in generic_predicates_filtered_by( + db, + GenericDefId::TraitId(trait_ref.def_id.0), + PredicateFilter::SelfTrait, + // We are likely in the midst of lowering generic predicates of `def`. + // So, if we allow `pred == def` we might fall into an infinite recursion. + // Actually, we have already checked for the case `pred == def` above as we started + // with a stack including `trait_id` + |pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0), + ) + .0 + .deref() + { + tracing::debug!(?pred); + let sup_trait_ref = match pred.kind().skip_binder() { + rustc_type_ir::ClauseKind::Trait(pred) => pred.trait_ref, + _ => continue, + }; + let sup_trait_ref = + EarlyBinder::bind(sup_trait_ref).instantiate(interner, trait_ref.args); + stack.push(sup_trait_ref); + } + tracing::debug!(?stack); + } + + None + }; + + match res { + TypeNs::SelfType(impl_id) => { + let trait_ref = db.impl_trait(impl_id)?; + + // FIXME(next-solver): same method in `lower` checks for impl or not + // Is that needed here? + + // we're _in_ the impl -- the binders get added back later. Correct, + // but it would be nice to make this more explicit + search(trait_ref.skip_binder()) + } + TypeNs::GenericParam(param_id) => { + // Handle `Self::Type` referring to own associated type in trait definitions + // This *must* be done first to avoid cycles with + // `generic_predicates_for_param`, but not sure that it's sufficient, + if let GenericDefId::TraitId(trait_id) = param_id.parent() { + let trait_name = &db.trait_signature(trait_id).name; + tracing::debug!(?trait_name); + let trait_generics = generics(db, trait_id.into()); + tracing::debug!(?trait_generics); + if trait_generics[param_id.local_id()].is_trait_self() { + let args = GenericArgs::identity_for_item(interner, trait_id.into()); + let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), args); + tracing::debug!(?args, ?trait_ref); + return search(trait_ref); + } + } + + let predicates = + db.generic_predicates_for_param(def, param_id.into(), assoc_name.clone()); + predicates + .iter() + .find_map(|pred| match (*pred).kind().skip_binder() { + rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate), + _ => None, + }) + .and_then(|trait_predicate| { + let trait_ref = trait_predicate.trait_ref; + assert!( + !trait_ref.has_escaping_bound_vars(), + "FIXME unexpected higher-ranked trait bound" + ); + search(trait_ref) + }) + } + _ => None, + } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index 42723dc9e1dda..9ba0da6f49649 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -1,42 +1,52 @@ //! A wrapper around [`TyLoweringContext`] specifically for lowering paths. -use chalk_ir::{BoundVar, cast::Cast, fold::Shift}; use either::Either; use hir_def::{ - GenericDefId, GenericParamId, TraitId, + GenericDefId, GenericParamId, Lookup, TraitId, TypeAliasId, expr_store::{ - ExpressionStore, - path::{GenericArg, GenericArgs, GenericArgsParentheses, Path, PathSegment, PathSegments}, + ExpressionStore, HygieneId, + path::{ + GenericArg as HirGenericArg, GenericArgs as HirGenericArgs, GenericArgsParentheses, + Path, PathSegment, PathSegments, + }, }, hir::generics::{ GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance, }, - resolver::TypeNs, + resolver::{ResolveValueResult, TypeNs, ValueNs}, signatures::TraitFlags, type_ref::{TypeRef, TypeRefId}, }; +use hir_expand::name::Name; +use rustc_type_ir::{ + AliasTerm, AliasTy, AliasTyKind, + inherent::{GenericArgs as _, Region as _, SliceLike, Ty as _}, +}; use smallvec::SmallVec; use stdx::never; use crate::{ - AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, IncorrectGenericsLenKind, - Interner, ParamLoweringMode, PathGenericsSource, PathLoweringDiagnostic, ProjectionTy, - QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyDefId, TyKind, WhereClause, - consteval_chalk::{unknown_const, unknown_const_as_generic}, + GenericArgsProhibitedReason, IncorrectGenericsLenKind, PathGenericsSource, + PathLoweringDiagnostic, TyDefId, ValueTyDefId, + consteval::{unknown_const, unknown_const_as_generic}, db::HirDatabase, - error_lifetime, generics::{Generics, generics}, - lower::{LifetimeElisionKind, TyLoweringContext, named_associated_type_shorthand_candidates}, + lower::{ + LifetimeElisionKind, PathDiagnosticCallbackData, named_associated_type_shorthand_candidates, + }, next_solver::{ - DbInterner, - mapping::{ChalkToNextSolver, NextSolverToChalk}, + Binder, Clause, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Predicate, + ProjectionPredicate, Region, TraitRef, Ty, }, - to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, - utils::associated_type_by_name_including_super_traits, +}; + +use super::{ + ImplTraitLoweringMode, TyLoweringContext, associated_type_by_name_including_super_traits, + const_param_ty_query, ty_query, }; type CallbackData<'a, 'db> = Either< - super::PathDiagnosticCallbackData, + PathDiagnosticCallbackData, crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>, >; @@ -45,12 +55,12 @@ type CallbackData<'a, 'db> = Either< pub(crate) struct PathDiagnosticCallback<'a, 'db> { pub(crate) data: CallbackData<'a, 'db>, pub(crate) callback: - fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'_>, PathLoweringDiagnostic), + fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic), } -pub(crate) struct PathLoweringContext<'a, 'b> { - ctx: &'a mut TyLoweringContext<'b>, - on_diagnostic: PathDiagnosticCallback<'a, 'b>, +pub(crate) struct PathLoweringContext<'a, 'b, 'db> { + ctx: &'a mut TyLoweringContext<'db, 'b>, + on_diagnostic: PathDiagnosticCallback<'a, 'db>, path: &'a Path, segments: PathSegments<'a>, current_segment_idx: usize, @@ -58,11 +68,11 @@ pub(crate) struct PathLoweringContext<'a, 'b> { current_or_prev_segment: PathSegment<'a>, } -impl<'a, 'b> PathLoweringContext<'a, 'b> { +impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { #[inline] pub(crate) fn new( - ctx: &'a mut TyLoweringContext<'b>, - on_diagnostic: PathDiagnosticCallback<'a, 'b>, + ctx: &'a mut TyLoweringContext<'db, 'b>, + on_diagnostic: PathDiagnosticCallback<'a, 'db>, path: &'a Path, ) -> Self { let segments = path.segments(); @@ -84,7 +94,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } #[inline] - pub(crate) fn ty_ctx(&mut self) -> &mut TyLoweringContext<'b> { + pub(crate) fn ty_ctx(&mut self) -> &mut TyLoweringContext<'db, 'b> { self.ctx } @@ -109,11 +119,25 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { self.segments.get(self.current_segment_idx).unwrap_or(self.current_or_prev_segment); } + #[inline] + pub(crate) fn ignore_last_segment(&mut self) { + self.segments = self.segments.strip_last(); + } + + #[inline] + pub(crate) fn set_current_segment(&mut self, segment: usize) { + self.current_segment_idx = segment; + self.current_or_prev_segment = self + .segments + .get(segment) + .expect("invalid segment passed to PathLoweringContext::set_current_segment()"); + } + #[inline] fn with_lifetime_elision( &mut self, - lifetime_elision: LifetimeElisionKind, - f: impl FnOnce(&mut PathLoweringContext<'_, '_>) -> T, + lifetime_elision: LifetimeElisionKind<'db>, + f: impl FnOnce(&mut PathLoweringContext<'_, '_, 'db>) -> T, ) -> T { let old_lifetime_elision = std::mem::replace(&mut self.ctx.lifetime_elision, lifetime_elision); @@ -124,12 +148,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { pub(crate) fn lower_ty_relative_path( &mut self, - ty: Ty, + ty: Ty<'db>, // We need the original resolution to lower `Self::AssocTy` correctly res: Option, infer_args: bool, - ) -> (Ty, Option) { - match self.segments.len() - self.current_segment_idx { + ) -> (Ty<'db>, Option) { + let remaining_segments = self.segments.len() - self.current_segment_idx; + match remaining_segments { 0 => (ty, res), 1 => { // resolve unselected assoc types @@ -137,7 +162,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { } _ => { // FIXME report error (ambiguous associated type) - (TyKind::Error.intern(Interner), None) + (Ty::new_error(self.ctx.interner, ErrorGuaranteed), None) } } } @@ -147,8 +172,11 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { &mut self, resolution: TypeNs, infer_args: bool, - ) -> (Ty, Option) { + ) -> (Ty<'db>, Option) { let remaining_segments = self.segments.skip(self.current_segment_idx + 1); + tracing::debug!(?remaining_segments); + let rem_seg_len = remaining_segments.len(); + tracing::debug!(?rem_seg_len); let ty = match resolution { TypeNs::TraitId(trait_) => { @@ -156,15 +184,17 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { 1 => { let trait_ref = self.lower_trait_ref_from_resolved_path( trait_, - TyKind::Error.intern(Interner), - infer_args, + Ty::new_error(self.ctx.interner, ErrorGuaranteed), + false, ); - + tracing::debug!(?trait_ref); self.skip_resolved_segment(); let segment = self.current_or_prev_segment; + let trait_id = trait_ref.def_id.0; let found = - trait_.trait_items(self.ctx.db).associated_type_by_name(segment.name); + trait_id.trait_items(self.ctx.db).associated_type_by_name(segment.name); + tracing::debug!(?found); match found { Some(associated_ty) => { // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent @@ -173,27 +203,30 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // this point (`trait_ref.substitution`). let substitution = self.substs_from_path_segment( associated_ty.into(), - infer_args, + false, None, true, ); - let substitution = Substitution::from_iter( - Interner, - trait_ref.substitution.iter(Interner).chain( - substitution - .iter(Interner) - .skip(trait_ref.substitution.len(Interner)), - ), + let args = GenericArgs::new_from_iter( + self.ctx.interner, + trait_ref + .args + .iter() + .chain(substitution.iter().skip(trait_ref.args.len())), ); - TyKind::Alias(AliasTy::Projection(ProjectionTy { - associated_ty_id: to_assoc_type_id(associated_ty), - substitution, - })) - .intern(Interner) + Ty::new_alias( + self.ctx.interner, + AliasTyKind::Projection, + AliasTy::new_from_args( + self.ctx.interner, + associated_ty.into(), + args, + ), + ) } None => { // FIXME: report error (associated type not found) - TyKind::Error.intern(Interner) + Ty::new_error(self.ctx.interner, ErrorGuaranteed) } } } @@ -201,73 +234,34 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { // Trait object type without dyn; this should be handled in upstream. See // `lower_path()`. stdx::never!("unexpected fully resolved trait path"); - TyKind::Error.intern(Interner) + Ty::new_error(self.ctx.interner, ErrorGuaranteed) } _ => { // FIXME report error (ambiguous associated type) - TyKind::Error.intern(Interner) + Ty::new_error(self.ctx.interner, ErrorGuaranteed) } }; return (ty, None); } - TypeNs::GenericParam(param_id) => match self.ctx.type_param_mode { - ParamLoweringMode::Placeholder => { - let generics = self.ctx.generics(); - let idx = generics.type_or_const_param_idx(param_id.into()).unwrap(); - TyKind::Placeholder(to_placeholder_idx( - self.ctx.db, - param_id.into(), - idx as u32, - )) - } - ParamLoweringMode::Variable => { - let idx = match self.ctx.generics().type_or_const_param_idx(param_id.into()) { - None => { - never!("no matching generics"); - return (TyKind::Error.intern(Interner), None); - } - Some(idx) => idx, - }; - - TyKind::BoundVar(BoundVar::new(self.ctx.in_binders, idx)) - } - } - .intern(Interner), - TypeNs::SelfType(impl_id) => { + TypeNs::GenericParam(param_id) => { let generics = self.ctx.generics(); - - match self.ctx.type_param_mode { - ParamLoweringMode::Placeholder => { - // `def` can be either impl itself or item within, and we need impl itself - // now. - let generics = generics.parent_or_self(); - let interner = DbInterner::new_with(self.ctx.db, None, None); - let subst = generics.placeholder_subst(self.ctx.db); - let args: crate::next_solver::GenericArgs<'_> = - subst.to_nextsolver(interner); - self.ctx - .db - .impl_self_ty(impl_id) - .instantiate(interner, args) - .to_chalk(interner) + let idx = generics.type_or_const_param_idx(param_id.into()); + match idx { + None => { + never!("no matching generics"); + Ty::new_error(self.ctx.interner, ErrorGuaranteed) + } + Some(idx) => { + let (pidx, _param) = generics.iter().nth(idx).unwrap(); + assert_eq!(pidx, param_id.into()); + self.ctx.type_param(param_id, idx as u32) } - ParamLoweringMode::Variable => TyBuilder::impl_self_ty(self.ctx.db, impl_id) - .fill_with_bound_vars(self.ctx.in_binders, 0) - .build(DbInterner::conjure()) - .to_chalk(DbInterner::conjure()), } } + TypeNs::SelfType(impl_id) => self.ctx.db.impl_self_ty(impl_id).skip_binder(), TypeNs::AdtSelfType(adt) => { - let generics = generics(self.ctx.db, adt.into()); - let substs = match self.ctx.type_param_mode { - ParamLoweringMode::Placeholder => generics.placeholder_subst(self.ctx.db), - ParamLoweringMode::Variable => { - generics.bound_vars_subst(self.ctx.db, self.ctx.in_binders) - } - }; - let interner = DbInterner::conjure(); - let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner); - self.ctx.db.ty(adt.into()).instantiate(interner, args).to_chalk(interner) + let args = GenericArgs::identity_for_item(self.ctx.interner, adt.into()); + Ty::new_adt(self.ctx.interner, adt, args) } TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args), @@ -275,15 +269,19 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args), // FIXME: report error TypeNs::EnumVariantId(_) | TypeNs::ModuleId(_) => { - return (TyKind::Error.intern(Interner), None); + return (Ty::new_error(self.ctx.interner, ErrorGuaranteed), None); } }; + tracing::debug!(?ty); + self.skip_resolved_segment(); self.lower_ty_relative_path(ty, Some(resolution), infer_args) } - fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) { + /// This returns whether to keep the resolution (`true`) of throw it (`false`). + #[must_use] + fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) -> bool { let mut prohibit_generics_on_resolved = |reason| { if self.current_or_prev_segment.args_and_bindings.is_some() { let segment = self.current_segment_u32(); @@ -302,7 +300,13 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam) } TypeNs::AdtSelfType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy); + + if self.ctx.lowering_param_default.is_some() { + // Generic defaults are not allowed to refer to `Self`. + // FIXME: Emit an error. + return false; + } } TypeNs::BuiltinType(_) => { prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy) @@ -315,6 +319,8 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { | TypeNs::TypeAliasId(_) | TypeNs::TraitId(_) => {} } + + true } pub(crate) fn resolve_path_in_type_ns_fully(&mut self) -> Option { @@ -325,6 +331,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { Some(res) } + #[tracing::instrument(skip(self), ret)] pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option)> { let (resolution, remaining_index, _, prefix_info) = self.ctx.resolver.resolve_path_in_type_ns_with_prefix_info(self.ctx.db, self.path)?; @@ -347,11 +354,6 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { self.current_or_prev_segment = segments.get(resolved_segment_idx).expect("should have resolved segment"); - if matches!(self.path, Path::BarePath(..)) { - // Bare paths cannot have generics, so skip them as an optimization. - return Some((resolution, remaining_index)); - } - for (i, mod_segment) in module_segments.iter().enumerate() { if mod_segment.args_and_bindings.is_some() { self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { @@ -371,90 +373,233 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { }); } - self.handle_type_ns_resolution(&resolution); + if !self.handle_type_ns_resolution(&resolution) { + return None; + } Some((resolution, remaining_index)) } - fn select_associated_type(&mut self, res: Option, infer_args: bool) -> Ty { - let Some(res) = res else { - return TyKind::Error.intern(Interner); - }; - let segment = self.current_or_prev_segment; - let ty = named_associated_type_shorthand_candidates( + pub(crate) fn resolve_path_in_value_ns( + &mut self, + hygiene_id: HygieneId, + ) -> Option { + let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info( self.ctx.db, - self.ctx.def, - res, - Some(segment.name.clone()), - move |name, t, associated_ty| { - if name != segment.name { - return None; - } - let generics = self.ctx.generics(); + self.path, + hygiene_id, + )?; - let parent_subst = t.substitution.clone(); - let parent_subst = match self.ctx.type_param_mode { - ParamLoweringMode::Placeholder => { - // if we're lowering to placeholders, we have to put them in now. - let s = generics.placeholder_subst(self.ctx.db); - s.apply(parent_subst, Interner) - } - ParamLoweringMode::Variable => { - // We need to shift in the bound vars, since - // `named_associated_type_shorthand_candidates` does not do that. - parent_subst.shifted_in_from(Interner, self.ctx.in_binders) + let segments = self.segments; + if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { + // `segments.is_empty()` can occur with `self`. + return Some(res); + } + + let (mod_segments, enum_segment, resolved_segment_idx) = match res { + ResolveValueResult::Partial(_, unresolved_segment, _) => { + (segments.take(unresolved_segment - 1), None, unresolved_segment - 1) + } + ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _) + if prefix_info.enum_variant => + { + (segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1) + } + ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1), + }; + + self.current_segment_idx = resolved_segment_idx; + self.current_or_prev_segment = + segments.get(resolved_segment_idx).expect("should have resolved segment"); + + for (i, mod_segment) in mod_segments.iter().enumerate() { + if mod_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: i as u32, + reason: GenericArgsProhibitedReason::Module, + }); + } + } + + if let Some(enum_segment) = enum_segment + && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) + && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) + { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: (enum_segment + 1) as u32, + reason: GenericArgsProhibitedReason::EnumVariant, + }); + } + + match &res { + ResolveValueResult::ValueNs(resolution, _) => { + let resolved_segment_idx = self.current_segment_u32(); + let resolved_segment = self.current_or_prev_segment; + + let mut prohibit_generics_on_resolved = |reason| { + if resolved_segment.args_and_bindings.is_some() { + self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { + segment: resolved_segment_idx, + reason, + }); } }; - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`t.substitution`). - let substs = - self.substs_from_path_segment(associated_ty.into(), infer_args, None, true); - - let substs = Substitution::from_iter( - Interner, - parent_subst - .iter(Interner) - .chain(substs.iter(Interner).skip(parent_subst.len(Interner))), - ); + match resolution { + ValueNs::ImplSelf(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy); + } + // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not + // E0109 (generic arguments provided for a type that doesn't accept them) for + // consts and statics, presumably as a defense against future in which consts + // and statics can be generic, or just because it was easier for rustc implementors. + // That means we'll show the wrong error code. Because of us it's easier to do it + // this way :) + ValueNs::GenericParam(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const) + } + ValueNs::StaticId(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static) + } + ValueNs::LocalBinding(_) => { + prohibit_generics_on_resolved(GenericArgsProhibitedReason::LocalVariable) + } + ValueNs::FunctionId(_) + | ValueNs::StructId(_) + | ValueNs::EnumVariantId(_) + | ValueNs::ConstId(_) => {} + } + } + ResolveValueResult::Partial(resolution, _, _) => { + if !self.handle_type_ns_resolution(resolution) { + return None; + } + } + }; + Some(res) + } - Some( - TyKind::Alias(AliasTy::Projection(ProjectionTy { - associated_ty_id: to_assoc_type_id(associated_ty), - substitution: substs, - })) - .intern(Interner), - ) - }, - ); + #[tracing::instrument(skip(self), ret)] + fn select_associated_type(&mut self, res: Option, infer_args: bool) -> Ty<'db> { + let interner = self.ctx.interner; + let Some(res) = res else { + return Ty::new_error(self.ctx.interner, ErrorGuaranteed); + }; + let def = self.ctx.def; + let segment = self.current_or_prev_segment; + let assoc_name = segment.name; + let check_alias = |name: &Name, t: TraitRef<'db>, associated_ty: TypeAliasId| { + if name != assoc_name { + return None; + } - ty.unwrap_or_else(|| TyKind::Error.intern(Interner)) + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent + // generic params. It's inefficient to splice the `Substitution`s, so we may want + // that method to optionally take parent `Substitution` as we already know them at + // this point (`t.substitution`). + let substs = + self.substs_from_path_segment(associated_ty.into(), infer_args, None, true); + + let substs = GenericArgs::new_from_iter( + interner, + t.args.iter().chain(substs.iter().skip(t.args.len())), + ); + + Some(Ty::new_alias( + interner, + AliasTyKind::Projection, + AliasTy::new(interner, associated_ty.into(), substs), + )) + }; + named_associated_type_shorthand_candidates( + interner, + def, + res, + Some(assoc_name.clone()), + check_alias, + ) + .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)) } - fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty { - let interner = DbInterner::conjure(); + fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty<'db> { let generic_def = match typeable { - TyDefId::BuiltinType(builtin) => { - return crate::next_solver::Ty::from_builtin_type(interner, builtin) - .to_chalk(interner); + TyDefId::BuiltinType(builtinty) => { + return Ty::from_builtin_type(self.ctx.interner, builtinty); } TyDefId::AdtId(it) => it.into(), TyDefId::TypeAliasId(it) => it.into(), }; - let substs = self.substs_from_path_segment(generic_def, infer_args, None, false); - let args: crate::next_solver::GenericArgs<'_> = substs.to_nextsolver(interner); - self.ctx.db.ty(typeable).instantiate(interner, args).to_chalk(interner) + let args = self.substs_from_path_segment(generic_def, infer_args, None, false); + let ty = ty_query(self.ctx.db, typeable); + ty.instantiate(self.ctx.interner, args) + } + + /// Collect generic arguments from a path into a `Substs`. See also + /// `create_substs_for_ast_path` and `def_to_ty` in rustc. + pub(crate) fn substs_from_path( + &mut self, + // Note that we don't call `db.value_type(resolved)` here, + // `ValueTyDefId` is just a convenient way to pass generics and + // special-case enum variants + resolved: ValueTyDefId, + infer_args: bool, + lowering_assoc_type_generics: bool, + ) -> GenericArgs<'db> { + let interner = self.ctx.interner; + let prev_current_segment_idx = self.current_segment_idx; + let prev_current_segment = self.current_or_prev_segment; + + let generic_def = match resolved { + ValueTyDefId::FunctionId(it) => it.into(), + ValueTyDefId::StructId(it) => it.into(), + ValueTyDefId::UnionId(it) => it.into(), + ValueTyDefId::ConstId(it) => it.into(), + ValueTyDefId::StaticId(_) => { + return GenericArgs::new_from_iter(interner, []); + } + ValueTyDefId::EnumVariantId(var) => { + // the generic args for an enum variant may be either specified + // on the segment referring to the enum, or on the segment + // referring to the variant. So `Option::::None` and + // `Option::None::` are both allowed (though the former is + // FIXME: This isn't strictly correct, enum variants may be used not through the enum + // (via `use Enum::Variant`). The resolver returns whether they were, but we don't have its result + // available here. The worst that can happen is that we will show some confusing diagnostics to the user, + // if generics exist on the module and they don't match with the variant. + // preferred). See also `def_ids_for_path_segments` in rustc. + // + // `wrapping_sub(1)` will return a number which `get` will return None for if current_segment_idx<2. + // This simplifies the code a bit. + let penultimate_idx = self.current_segment_idx.wrapping_sub(1); + let penultimate = self.segments.get(penultimate_idx); + if let Some(penultimate) = penultimate + && self.current_or_prev_segment.args_and_bindings.is_none() + && penultimate.args_and_bindings.is_some() + { + self.current_segment_idx = penultimate_idx; + self.current_or_prev_segment = penultimate; + } + var.lookup(self.ctx.db).parent.into() + } + }; + let result = self.substs_from_path_segment( + generic_def, + infer_args, + None, + lowering_assoc_type_generics, + ); + self.current_segment_idx = prev_current_segment_idx; + self.current_or_prev_segment = prev_current_segment; + result } pub(crate) fn substs_from_path_segment( &mut self, def: GenericDefId, infer_args: bool, - explicit_self_ty: Option, + explicit_self_ty: Option>, lowering_assoc_type_generics: bool, - ) -> Substitution { + ) -> GenericArgs<'db> { let old_lifetime_elision = self.ctx.lifetime_elision.clone(); if let Some(args) = self.current_or_prev_segment.args_and_bindings @@ -481,7 +626,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, ); - return TyBuilder::unknown_subst(self.ctx.db, def); + return unknown_subst(self.ctx.interner, def); } // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. @@ -504,20 +649,20 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { pub(super) fn substs_from_args_and_bindings( &mut self, - args_and_bindings: Option<&GenericArgs>, + args_and_bindings: Option<&HirGenericArgs>, def: GenericDefId, infer_args: bool, - explicit_self_ty: Option, + explicit_self_ty: Option>, generics_source: PathGenericsSource, lowering_assoc_type_generics: bool, - lifetime_elision: LifetimeElisionKind, - ) -> Substitution { - struct LowererCtx<'a, 'b, 'c> { - ctx: &'a mut PathLoweringContext<'b, 'c>, + lifetime_elision: LifetimeElisionKind<'db>, + ) -> GenericArgs<'db> { + struct LowererCtx<'a, 'b, 'c, 'db> { + ctx: &'a mut PathLoweringContext<'b, 'c, 'db>, generics_source: PathGenericsSource, } - impl GenericArgsLowerer for LowererCtx<'_, '_, '_> { + impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, '_, 'db> { fn report_len_mismatch( &mut self, def: GenericDefId, @@ -552,23 +697,24 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { &mut self, param_id: GenericParamId, param: GenericParamDataRef<'_>, - arg: &GenericArg, - ) -> crate::GenericArg { - match (param, arg) { - (GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => { - self.ctx.ctx.lower_lifetime(*lifetime).cast(Interner) - } - (GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => { - self.ctx.ctx.lower_ty(*type_ref).cast(Interner) + arg: &HirGenericArg, + ) -> GenericArg<'db> { + match (param, *arg) { + ( + GenericParamDataRef::LifetimeParamData(_), + HirGenericArg::Lifetime(lifetime), + ) => self.ctx.ctx.lower_lifetime(lifetime).into(), + (GenericParamDataRef::TypeParamData(_), HirGenericArg::Type(type_ref)) => { + self.ctx.ctx.lower_ty(type_ref).into() } - (GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => { + (GenericParamDataRef::ConstParamData(_), HirGenericArg::Const(konst)) => { let GenericParamId::ConstParamId(const_id) = param_id else { unreachable!("non-const param ID for const param"); }; self.ctx .ctx - .lower_const(konst, self.ctx.ctx.db.const_param_ty(const_id)) - .cast(Interner) + .lower_const(konst, const_param_ty_query(self.ctx.ctx.db, const_id)) + .into() } _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"), } @@ -576,9 +722,9 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { fn provided_type_like_const( &mut self, - const_ty: Ty, + const_ty: Ty<'db>, arg: TypeLikeConst<'_>, - ) -> crate::Const { + ) -> Const<'db> { match arg { TypeLikeConst::Path(path) => self.ctx.ctx.lower_path_as_const(path, const_ty), TypeLikeConst::Infer => unknown_const(const_ty), @@ -591,18 +737,19 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { param_id: GenericParamId, param: GenericParamDataRef<'_>, infer_args: bool, - preceding_args: &[crate::GenericArg], - ) -> crate::GenericArg { - let default = || { - self.ctx - .ctx - .db - .generic_defaults(def) - .get(preceding_args.len()) - .map(|default| default.clone().substitute(Interner, preceding_args)) - }; + preceding_args: &[GenericArg<'db>], + ) -> GenericArg<'db> { + let default = + || { + self.ctx.ctx.db.generic_defaults(def).get(preceding_args.len()).map( + |default| default.instantiate(self.ctx.ctx.interner, preceding_args), + ) + }; match param { - GenericParamDataRef::LifetimeParamData(_) => error_lifetime().cast(Interner), + GenericParamDataRef::LifetimeParamData(_) => { + Region::new(self.ctx.ctx.interner, rustc_type_ir::ReError(ErrorGuaranteed)) + .into() + } GenericParamDataRef::TypeParamData(param) => { if !infer_args && param.default.is_some() @@ -610,7 +757,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { { return default; } - TyKind::Error.intern(Interner).cast(Interner) + Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into() } GenericParamDataRef::ConstParamData(param) => { if !infer_args @@ -622,19 +769,23 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { let GenericParamId::ConstParamId(const_id) = param_id else { unreachable!("non-const param ID for const param"); }; - unknown_const_as_generic(self.ctx.ctx.db.const_param_ty(const_id)) - .cast(Interner) + unknown_const_as_generic(const_param_ty_query(self.ctx.ctx.db, const_id)) } } } - fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg { + fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db> { match param_id { - GenericParamId::TypeParamId(_) => TyKind::Error.intern(Interner).cast(Interner), + GenericParamId::TypeParamId(_) => { + Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into() + } GenericParamId::ConstParamId(const_id) => { - unknown_const_as_generic(self.ctx.ctx.db.const_param_ty(const_id)) + unknown_const_as_generic(const_param_ty_query(self.ctx.ctx.db, const_id)) + } + GenericParamId::LifetimeParamId(_) => { + Region::new(self.ctx.ctx.interner, rustc_type_ir::ReError(ErrorGuaranteed)) + .into() } - GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), } } @@ -652,6 +803,14 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { }); } + fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32) { + self.ctx.on_diagnostic(PathLoweringDiagnostic::ElisionFailure { + generics_source: self.generics_source, + def, + expected_count, + }); + } + fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32) { self.ctx.on_diagnostic(PathLoweringDiagnostic::MissingLifetime { generics_source: self.generics_source, @@ -677,38 +836,39 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { pub(crate) fn lower_trait_ref_from_resolved_path( &mut self, resolved: TraitId, - explicit_self_ty: Ty, + explicit_self_ty: Ty<'db>, infer_args: bool, - ) -> TraitRef { - let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty, infer_args); - TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs } + ) -> TraitRef<'db> { + let args = self.trait_ref_substs_from_path(resolved, explicit_self_ty, infer_args); + TraitRef::new_from_args(self.ctx.interner, resolved.into(), args) } fn trait_ref_substs_from_path( &mut self, resolved: TraitId, - explicit_self_ty: Ty, + explicit_self_ty: Ty<'db>, infer_args: bool, - ) -> Substitution { + ) -> GenericArgs<'db> { self.substs_from_path_segment(resolved.into(), infer_args, Some(explicit_self_ty), false) } pub(super) fn assoc_type_bindings_from_type_bound<'c>( mut self, - trait_ref: TraitRef, - ) -> Option + use<'a, 'b, 'c>> { + trait_ref: TraitRef<'db>, + ) -> Option> + use<'a, 'b, 'c, 'db>> { + let interner = self.ctx.interner; self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| { args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| { let found = associated_type_by_name_including_super_traits( self.ctx.db, - trait_ref.clone(), + trait_ref, &binding.name, ); let (super_trait_ref, associated_ty) = match found { None => return SmallVec::new(), Some(t) => t, }; - let substitution = + let args = self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| { // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent // generic params. It's inefficient to splice the `Substitution`s, so we may want @@ -718,7 +878,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { binding.args.as_ref(), associated_ty.into(), false, // this is not relevant - Some(super_trait_ref.self_type_parameter(Interner)), + Some(super_trait_ref.self_ty()), PathGenericsSource::AssocType { segment: this.current_segment_u32(), assoc_type: binding_idx as u32, @@ -727,27 +887,20 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { this.ctx.lifetime_elision.clone(), ) }); - let substitution = Substitution::from_iter( - Interner, - super_trait_ref.substitution.iter(Interner).chain( - substitution - .iter(Interner) - .skip(super_trait_ref.substitution.len(Interner)), - ), + let args = GenericArgs::new_from_iter( + interner, + super_trait_ref.args.iter().chain(args.iter().skip(super_trait_ref.args.len())), ); - let projection_ty = ProjectionTy { - associated_ty_id: to_assoc_type_id(associated_ty), - substitution, - }; + let projection_term = + AliasTerm::new_from_args(interner, associated_ty.into(), args); let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity( binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), ); - if let Some(type_ref) = binding.type_ref { let lifetime_elision = if args_and_bindings.parenthesized == GenericArgsParentheses::ParenSugar { // `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def). - LifetimeElisionKind::for_fn_ret() + LifetimeElisionKind::for_fn_ret(self.ctx.interner) } else { self.ctx.lifetime_elision.clone() }; @@ -759,31 +912,33 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> { ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque, ) => { let ty = this.ctx.lower_ty(type_ref); - let alias_eq = AliasEq { - alias: AliasTy::Projection(projection_ty.clone()), - ty, - }; - predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq( - alias_eq, - ))); + let pred = Clause(Predicate::new( + interner, + Binder::dummy(rustc_type_ir::PredicateKind::Clause( + rustc_type_ir::ClauseKind::Projection( + ProjectionPredicate { + projection_term, + term: ty.into(), + }, + ), + )), + )); + predicates.push(pred); } } - }); + }) + } + for bound in binding.bounds.iter() { + predicates.extend(self.ctx.lower_type_bound( + bound, + Ty::new_alias( + self.ctx.interner, + AliasTyKind::Projection, + AliasTy::new_from_args(self.ctx.interner, associated_ty.into(), args), + ), + false, + )); } - - self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| { - for bound in binding.bounds.iter() { - predicates.extend( - this.ctx.lower_type_bound( - bound, - TyKind::Alias(AliasTy::Projection(projection_ty.clone())) - .intern(Interner), - false, - ), - ); - } - }); - predicates }) }) @@ -796,7 +951,7 @@ pub(crate) enum TypeLikeConst<'a> { Path(&'a Path), } -pub(crate) trait GenericArgsLowerer { +pub(crate) trait GenericArgsLowerer<'db> { fn report_elided_lifetimes_in_path( &mut self, def: GenericDefId, @@ -804,6 +959,8 @@ pub(crate) trait GenericArgsLowerer { hard_error: bool, ); + fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32); + fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32); fn report_len_mismatch( @@ -820,10 +977,11 @@ pub(crate) trait GenericArgsLowerer { &mut self, param_id: GenericParamId, param: GenericParamDataRef<'_>, - arg: &GenericArg, - ) -> crate::GenericArg; + arg: &HirGenericArg, + ) -> GenericArg<'db>; - fn provided_type_like_const(&mut self, const_ty: Ty, arg: TypeLikeConst<'_>) -> crate::Const; + fn provided_type_like_const(&mut self, const_ty: Ty<'db>, arg: TypeLikeConst<'_>) + -> Const<'db>; fn inferred_kind( &mut self, @@ -831,21 +989,21 @@ pub(crate) trait GenericArgsLowerer { param_id: GenericParamId, param: GenericParamDataRef<'_>, infer_args: bool, - preceding_args: &[crate::GenericArg], - ) -> crate::GenericArg; + preceding_args: &[GenericArg<'db>], + ) -> GenericArg<'db>; - fn parent_arg(&mut self, param_id: GenericParamId) -> crate::GenericArg; + fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db>; } /// Returns true if there was an error. -fn check_generic_args_len( - args_and_bindings: Option<&GenericArgs>, +fn check_generic_args_len<'db>( + args_and_bindings: Option<&HirGenericArgs>, def: GenericDefId, def_generics: &Generics, infer_args: bool, - lifetime_elision: &LifetimeElisionKind, + lifetime_elision: &LifetimeElisionKind<'db>, lowering_assoc_type_generics: bool, - ctx: &mut impl GenericArgsLowerer, + ctx: &mut impl GenericArgsLowerer<'db>, ) -> bool { let mut had_error = false; @@ -854,8 +1012,10 @@ fn check_generic_args_len( let args_no_self = &args_and_bindings.args[usize::from(args_and_bindings.has_self_type)..]; for arg in args_no_self { match arg { - GenericArg::Lifetime(_) => provided_lifetimes_count += 1, - GenericArg::Type(_) | GenericArg::Const(_) => provided_types_and_consts_count += 1, + HirGenericArg::Lifetime(_) => provided_lifetimes_count += 1, + HirGenericArg::Type(_) | HirGenericArg::Const(_) => { + provided_types_and_consts_count += 1 + } } } } @@ -876,6 +1036,13 @@ fn check_generic_args_len( ctx.report_missing_lifetime(def, lifetime_args_len as u32); had_error = true } + LifetimeElisionKind::ElisionFailure => { + ctx.report_elision_failure(def, lifetime_args_len as u32); + had_error = true; + } + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { + // FIXME: Check there are other lifetimes in scope, and error/lint. + } LifetimeElisionKind::Elided(_) => { ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, false); } @@ -922,17 +1089,21 @@ fn check_generic_args_len( had_error } -pub(crate) fn substs_from_args_and_bindings( - db: &dyn HirDatabase, +pub(crate) fn substs_from_args_and_bindings<'db>( + db: &'db dyn HirDatabase, store: &ExpressionStore, - args_and_bindings: Option<&GenericArgs>, + args_and_bindings: Option<&HirGenericArgs>, def: GenericDefId, mut infer_args: bool, - lifetime_elision: LifetimeElisionKind, + lifetime_elision: LifetimeElisionKind<'db>, lowering_assoc_type_generics: bool, - explicit_self_ty: Option, - ctx: &mut impl GenericArgsLowerer, -) -> Substitution { + explicit_self_ty: Option>, + ctx: &mut impl GenericArgsLowerer<'db>, +) -> GenericArgs<'db> { + let interner = DbInterner::new_with(db, None, None); + + tracing::debug!(?args_and_bindings); + // Order is // - Parent parameters // - Optional Self parameter @@ -943,7 +1114,7 @@ pub(crate) fn substs_from_args_and_bindings( // We do not allow inference if there are specified args, i.e. we do not allow partial inference. let has_non_lifetime_args = - args_slice.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))); + args_slice.iter().any(|arg| !matches!(arg, HirGenericArg::Lifetime(_))); infer_args &= !has_non_lifetime_args; let had_count_error = check_generic_args_len( @@ -984,7 +1155,7 @@ pub(crate) fn substs_from_args_and_bindings( let (_, self_ty) = args.next().expect("has_self_type=true, should have Self type"); ctx.provided_kind(self_param_id, self_param, self_ty) } else { - explicit_self_ty.map(|it| it.cast(Interner)).unwrap_or_else(|| { + explicit_self_ty.map(|it| it.into()).unwrap_or_else(|| { ctx.inferred_kind(def, self_param_id, self_param, infer_args, &substs) }) }; @@ -999,7 +1170,7 @@ pub(crate) fn substs_from_args_and_bindings( // input. We try to handle both sensibly. match (args.peek(), params.peek()) { (Some(&(arg_idx, arg)), Some(&(param_id, param))) => match (arg, param) { - (GenericArg::Type(_), GenericParamDataRef::TypeParamData(type_param)) + (HirGenericArg::Type(_), GenericParamDataRef::TypeParamData(type_param)) if type_param.provenance == TypeParamProvenance::ArgumentImplTrait => { // Do not allow specifying `impl Trait` explicitly. We already err at that, but if we won't handle it here @@ -1007,15 +1178,15 @@ pub(crate) fn substs_from_args_and_bindings( substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs)); params.next(); } - (GenericArg::Lifetime(_), GenericParamDataRef::LifetimeParamData(_)) - | (GenericArg::Type(_), GenericParamDataRef::TypeParamData(_)) - | (GenericArg::Const(_), GenericParamDataRef::ConstParamData(_)) => { + (HirGenericArg::Lifetime(_), GenericParamDataRef::LifetimeParamData(_)) + | (HirGenericArg::Type(_), GenericParamDataRef::TypeParamData(_)) + | (HirGenericArg::Const(_), GenericParamDataRef::ConstParamData(_)) => { substs.push(ctx.provided_kind(param_id, param, arg)); args.next(); params.next(); } ( - GenericArg::Type(_) | GenericArg::Const(_), + HirGenericArg::Type(_) | HirGenericArg::Const(_), GenericParamDataRef::LifetimeParamData(_), ) => { // We expected a lifetime argument, but got a type or const @@ -1024,13 +1195,13 @@ pub(crate) fn substs_from_args_and_bindings( params.next(); force_infer_lt = Some((arg_idx as u32, param_id)); } - (GenericArg::Type(type_ref), GenericParamDataRef::ConstParamData(_)) => { + (HirGenericArg::Type(type_ref), GenericParamDataRef::ConstParamData(_)) => { if let Some(konst) = type_looks_like_const(store, *type_ref) { let GenericParamId::ConstParamId(param_id) = param_id else { panic!("unmatching param kinds"); }; - let const_ty = db.const_param_ty(param_id); - substs.push(ctx.provided_type_like_const(const_ty, konst).cast(Interner)); + let const_ty = const_param_ty_query(db, param_id); + substs.push(ctx.provided_type_like_const(const_ty, konst).into()); args.next(); params.next(); } else { @@ -1069,7 +1240,7 @@ pub(crate) fn substs_from_args_and_bindings( // after a type or const). We want to throw an error in this case. if !had_count_error { assert!( - matches!(arg, GenericArg::Lifetime(_)), + matches!(arg, HirGenericArg::Lifetime(_)), "the only possible situation here is incorrect lifetime order" ); let (provided_arg_idx, param_id) = @@ -1084,12 +1255,16 @@ pub(crate) fn substs_from_args_and_bindings( // If there are fewer arguments than parameters, it means we're inferring the remaining arguments. let param = if let GenericParamId::LifetimeParamId(_) = param_id { match &lifetime_elision { - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true } + LifetimeElisionKind::ElisionFailure + | LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true } | LifetimeElisionKind::AnonymousReportError => { assert!(had_count_error); ctx.inferred_kind(def, param_id, param, infer_args, &substs) } - LifetimeElisionKind::Elided(lifetime) => lifetime.clone().cast(Interner), + LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { + Region::new_static(interner).into() + } + LifetimeElisionKind::Elided(lifetime) => (*lifetime).into(), LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false } | LifetimeElisionKind::Infer => { // FIXME: With `AnonymousCreateParameter`, we need to create a new lifetime parameter here @@ -1108,7 +1283,7 @@ pub(crate) fn substs_from_args_and_bindings( } } - Substitution::from_iter(Interner, substs) + GenericArgs::new_from_iter(interner, substs) } fn type_looks_like_const( @@ -1127,3 +1302,17 @@ fn type_looks_like_const( _ => None, } } + +fn unknown_subst<'db>(interner: DbInterner<'db>, def: impl Into) -> GenericArgs<'db> { + let params = generics(interner.db(), def.into()); + GenericArgs::new_from_iter( + interner, + params.iter_id().map(|id| match id { + GenericParamId::TypeParamId(_) => Ty::new_error(interner, ErrorGuaranteed).into(), + GenericParamId::ConstParamId(id) => { + unknown_const_as_generic(const_param_ty_query(interner.db(), id)) + } + GenericParamId::LifetimeParamId(_) => Region::error(interner).into(), + }), + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs deleted file mode 100644 index 76ee1a4f2d2b7..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver.rs +++ /dev/null @@ -1,2138 +0,0 @@ -//! Methods for lowering the HIR to types. There are two main cases here: -//! -//! - Lowering a type reference like `&usize` or `Option` to a -//! type: The entry point for this is `TyLoweringContext::lower_ty`. -//! - Building the type for an item: This happens through the `ty` query. -//! -//! This usually involves resolving names, collecting generic arguments etc. -pub(crate) mod path; - -use std::{ - cell::OnceCell, - iter, mem, - ops::{self, Deref, Not as _}, -}; - -use base_db::Crate; -use either::Either; -use hir_def::{ - AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, - FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, - LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeParamId, - VariantId, - expr_store::{ExpressionStore, HygieneId, path::Path}, - hir::generics::{ - GenericParamDataRef, TypeOrConstParamData, TypeParamProvenance, WherePredicate, - }, - item_tree::FieldsShape, - lang_item::LangItem, - resolver::{HasResolver, LifetimeNs, Resolver, TypeNs, ValueNs}, - signatures::{FunctionSignature, TraitFlags, TypeAliasFlags}, - type_ref::{ - ConstRef, LifetimeRefId, LiteralConstRef, PathId, TraitBoundModifier, - TraitRef as HirTraitRef, TypeBound, TypeRef, TypeRefId, - }, -}; -use hir_expand::name::Name; -use la_arena::{Arena, ArenaMap, Idx}; -use path::{PathDiagnosticCallback, PathLoweringContext}; -use rustc_ast_ir::Mutability; -use rustc_hash::FxHashSet; -use rustc_pattern_analysis::Captures; -use rustc_type_ir::{ - AliasTyKind, ConstKind, DebruijnIndex, ExistentialPredicate, ExistentialProjection, - ExistentialTraitRef, FnSig, OutlivesPredicate, - TyKind::{self}, - TypeVisitableExt, - inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _}, -}; -use salsa::plumbing::AsId; -use smallvec::{SmallVec, smallvec}; -use stdx::never; -use triomphe::Arc; - -use crate::{ - FnAbi, ImplTraitId, TraitEnvironment, TyDefId, TyLoweringDiagnostic, TyLoweringDiagnosticKind, - ValueTyDefId, - consteval::intern_const_ref, - db::HirDatabase, - generics::{Generics, generics, trait_self_param_idx}, - lower::{Diagnostics, PathDiagnosticCallbackData, create_diagnostics}, - next_solver::{ - AliasTy, Binder, BoundExistentialPredicates, Clause, Clauses, Const, DbInterner, - EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs, ParamConst, - ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef, Ty, Tys, - UnevaluatedConst, abi::Safety, - }, -}; - -#[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTraits<'db> { - pub(crate) impl_traits: Arena>, -} - -#[derive(PartialEq, Eq, Debug, Hash)] -pub struct ImplTrait<'db> { - pub(crate) predicates: Vec>, -} - -pub type ImplTraitIdx<'db> = Idx>; - -#[derive(Debug, Default)] -struct ImplTraitLoweringState<'db> { - /// When turning `impl Trait` into opaque types, we have to collect the - /// bounds at the same time to get the IDs correct (without becoming too - /// complicated). - mode: ImplTraitLoweringMode, - // This is structured as a struct with fields and not as an enum because it helps with the borrow checker. - opaque_type_data: Arena>, -} - -impl<'db> ImplTraitLoweringState<'db> { - fn new(mode: ImplTraitLoweringMode) -> ImplTraitLoweringState<'db> { - Self { mode, opaque_type_data: Arena::new() } - } -} - -#[derive(Debug, Clone)] -pub enum LifetimeElisionKind<'db> { - /// Create a new anonymous lifetime parameter and reference it. - /// - /// If `report_in_path`, report an error when encountering lifetime elision in a path: - /// ```compile_fail - /// struct Foo<'a> { x: &'a () } - /// async fn foo(x: Foo) {} - /// ``` - /// - /// Note: the error should not trigger when the elided lifetime is in a pattern or - /// expression-position path: - /// ``` - /// struct Foo<'a> { x: &'a () } - /// async fn foo(Foo { x: _ }: Foo<'_>) {} - /// ``` - AnonymousCreateParameter { report_in_path: bool }, - - /// Replace all anonymous lifetimes by provided lifetime. - Elided(Region<'db>), - - /// Give a hard error when either `&` or `'_` is written. Used to - /// rule out things like `where T: Foo<'_>`. Does not imply an - /// error on default object bounds (e.g., `Box`). - AnonymousReportError, - - /// Resolves elided lifetimes to `'static` if there are no other lifetimes in scope, - /// otherwise give a warning that the previous behavior of introducing a new early-bound - /// lifetime is a bug and will be removed (if `only_lint` is enabled). - StaticIfNoLifetimeInScope { only_lint: bool }, - - /// Signal we cannot find which should be the anonymous lifetime. - ElisionFailure, - - /// Infer all elided lifetimes. - Infer, -} - -impl<'db> LifetimeElisionKind<'db> { - #[inline] - pub(crate) fn for_const( - interner: DbInterner<'db>, - const_parent: ItemContainerId, - ) -> LifetimeElisionKind<'db> { - match const_parent { - ItemContainerId::ExternBlockId(_) | ItemContainerId::ModuleId(_) => { - LifetimeElisionKind::Elided(Region::new_static(interner)) - } - ItemContainerId::ImplId(_) => { - LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: true } - } - ItemContainerId::TraitId(_) => { - LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: false } - } - } - } - - #[inline] - pub(crate) fn for_fn_params(data: &FunctionSignature) -> LifetimeElisionKind<'db> { - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: data.is_async() } - } - - #[inline] - pub(crate) fn for_fn_ret(interner: DbInterner<'db>) -> LifetimeElisionKind<'db> { - // FIXME: We should use the elided lifetime here, or `ElisionFailure`. - LifetimeElisionKind::Elided(Region::error(interner)) - } -} - -#[derive(Debug)] -pub struct TyLoweringContext<'db, 'a> { - pub db: &'db dyn HirDatabase, - interner: DbInterner<'db>, - resolver: &'a Resolver<'db>, - store: &'a ExpressionStore, - def: GenericDefId, - generics: OnceCell, - in_binders: DebruijnIndex, - impl_trait_mode: ImplTraitLoweringState<'db>, - /// Tracks types with explicit `?Sized` bounds. - pub(crate) unsized_types: FxHashSet>, - pub(crate) diagnostics: Vec, - lifetime_elision: LifetimeElisionKind<'db>, - /// When lowering the defaults for generic params, this contains the index of the currently lowered param. - /// We disallow referring to later params, or to ADT's `Self`. - lowering_param_default: Option, -} - -impl<'db, 'a> TyLoweringContext<'db, 'a> { - pub fn new( - db: &'db dyn HirDatabase, - resolver: &'a Resolver<'db>, - store: &'a ExpressionStore, - def: GenericDefId, - lifetime_elision: LifetimeElisionKind<'db>, - ) -> Self { - let impl_trait_mode = ImplTraitLoweringState::new(ImplTraitLoweringMode::Disallowed); - let in_binders = DebruijnIndex::ZERO; - Self { - db, - interner: DbInterner::new_with(db, Some(resolver.krate()), None), - resolver, - def, - generics: Default::default(), - store, - in_binders, - impl_trait_mode, - unsized_types: FxHashSet::default(), - diagnostics: Vec::new(), - lifetime_elision, - lowering_param_default: None, - } - } - - pub(crate) fn set_lifetime_elision(&mut self, lifetime_elision: LifetimeElisionKind<'db>) { - self.lifetime_elision = lifetime_elision; - } - - pub(crate) fn with_debruijn( - &mut self, - debruijn: DebruijnIndex, - f: impl FnOnce(&mut TyLoweringContext<'db, '_>) -> T, - ) -> T { - let old_debruijn = mem::replace(&mut self.in_binders, debruijn); - let result = f(self); - self.in_binders = old_debruijn; - result - } - - pub(crate) fn with_shifted_in( - &mut self, - debruijn: DebruijnIndex, - f: impl FnOnce(&mut TyLoweringContext<'db, '_>) -> T, - ) -> T { - self.with_debruijn(self.in_binders.shifted_in(debruijn.as_u32()), f) - } - - pub(crate) fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { - Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self } - } - - pub(crate) fn impl_trait_mode(&mut self, impl_trait_mode: ImplTraitLoweringMode) -> &mut Self { - self.impl_trait_mode = ImplTraitLoweringState::new(impl_trait_mode); - self - } - - pub(crate) fn lowering_param_default(&mut self, index: u32) { - self.lowering_param_default = Some(index); - } - - pub(crate) fn push_diagnostic(&mut self, type_ref: TypeRefId, kind: TyLoweringDiagnosticKind) { - self.diagnostics.push(TyLoweringDiagnostic { source: type_ref, kind }); - } -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)] -pub(crate) enum ImplTraitLoweringMode { - /// `impl Trait` gets lowered into an opaque type that doesn't unify with - /// anything except itself. This is used in places where values flow 'out', - /// i.e. for arguments of the function we're currently checking, and return - /// types of functions we're calling. - Opaque, - /// `impl Trait` is disallowed and will be an error. - #[default] - Disallowed, -} - -impl<'db, 'a> TyLoweringContext<'db, 'a> { - pub fn lower_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> { - self.lower_ty_ext(type_ref).0 - } - - pub(crate) fn lower_const(&mut self, const_ref: ConstRef, const_type: Ty<'db>) -> Const<'db> { - let const_ref = &self.store[const_ref.expr]; - match const_ref { - hir_def::hir::Expr::Path(path) => { - self.path_to_const(path).unwrap_or_else(|| unknown_const(const_type)) - } - hir_def::hir::Expr::Literal(literal) => intern_const_ref( - self.db, - &match *literal { - hir_def::hir::Literal::Float(_, _) - | hir_def::hir::Literal::String(_) - | hir_def::hir::Literal::ByteString(_) - | hir_def::hir::Literal::CString(_) => LiteralConstRef::Unknown, - hir_def::hir::Literal::Char(c) => LiteralConstRef::Char(c), - hir_def::hir::Literal::Bool(b) => LiteralConstRef::Bool(b), - hir_def::hir::Literal::Int(val, _) => LiteralConstRef::Int(val), - hir_def::hir::Literal::Uint(val, _) => LiteralConstRef::UInt(val), - }, - const_type, - self.resolver.krate(), - ), - hir_def::hir::Expr::UnaryOp { expr: inner_expr, op: hir_def::hir::UnaryOp::Neg } => { - if let hir_def::hir::Expr::Literal(literal) = &self.store[*inner_expr] { - // Only handle negation for signed integers and floats - match literal { - hir_def::hir::Literal::Int(_, _) | hir_def::hir::Literal::Float(_, _) => { - if let Some(negated_literal) = literal.clone().negate() { - intern_const_ref( - self.db, - &negated_literal.into(), - const_type, - self.resolver.krate(), - ) - } else { - unknown_const(const_type) - } - } - // For unsigned integers, chars, bools, etc., negation is not meaningful - _ => unknown_const(const_type), - } - } else { - unknown_const(const_type) - } - } - _ => unknown_const(const_type), - } - } - - pub(crate) fn path_to_const(&mut self, path: &Path) -> Option> { - match self.resolver.resolve_path_in_value_ns_fully(self.db, path, HygieneId::ROOT) { - Some(ValueNs::GenericParam(p)) => { - let args = self.generics(); - match args.type_or_const_param_idx(p.into()) { - Some(idx) => Some(self.const_param(p, idx as u32)), - None => { - never!( - "Generic list doesn't contain this param: {:?}, {:?}, {:?}", - args, - path, - p - ); - None - } - } - } - Some(ValueNs::ConstId(c)) => { - let args = GenericArgs::new_from_iter(self.interner, []); - Some(Const::new( - self.interner, - rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new( - SolverDefId::ConstId(c), - args, - )), - )) - } - _ => None, - } - } - - pub(crate) fn lower_path_as_const(&mut self, path: &Path, const_type: Ty<'db>) -> Const<'db> { - self.path_to_const(path).unwrap_or_else(|| unknown_const(const_type)) - } - - fn generics(&self) -> &Generics { - self.generics.get_or_init(|| generics(self.db, self.def)) - } - - fn param_index_is_disallowed(&self, index: u32) -> bool { - self.lowering_param_default - .is_some_and(|disallow_params_after| index >= disallow_params_after) - } - - fn type_param(&mut self, id: TypeParamId, index: u32) -> Ty<'db> { - if self.param_index_is_disallowed(index) { - // FIXME: Report an error. - Ty::new_error(self.interner, ErrorGuaranteed) - } else { - Ty::new_param(self.interner, id, index) - } - } - - fn const_param(&mut self, id: ConstParamId, index: u32) -> Const<'db> { - if self.param_index_is_disallowed(index) { - // FIXME: Report an error. - Const::error(self.interner) - } else { - Const::new_param(self.interner, ParamConst { id, index }) - } - } - - fn region_param(&mut self, id: LifetimeParamId, index: u32) -> Region<'db> { - if self.param_index_is_disallowed(index) { - // FIXME: Report an error. - Region::error(self.interner) - } else { - Region::new_early_param(self.interner, EarlyParamRegion { id, index }) - } - } - - #[tracing::instrument(skip(self), ret)] - pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty<'db>, Option) { - let interner = self.interner; - let mut res = None; - let type_ref = &self.store[type_ref_id]; - tracing::debug!(?type_ref); - let ty = match type_ref { - TypeRef::Never => Ty::new(interner, TyKind::Never), - TypeRef::Tuple(inner) => { - let inner_tys = inner.iter().map(|&tr| self.lower_ty(tr)); - Ty::new_tup_from_iter(interner, inner_tys) - } - TypeRef::Path(path) => { - let (ty, res_) = - self.lower_path(path, PathId::from_type_ref_unchecked(type_ref_id)); - res = res_; - ty - } - &TypeRef::TypeParam(type_param_id) => { - res = Some(TypeNs::GenericParam(type_param_id)); - - let generics = self.generics(); - let (idx, _data) = - generics.type_or_const_param(type_param_id.into()).expect("matching generics"); - self.type_param(type_param_id, idx as u32) - } - &TypeRef::RawPtr(inner, mutability) => { - let inner_ty = self.lower_ty(inner); - Ty::new(interner, TyKind::RawPtr(inner_ty, lower_mutability(mutability))) - } - TypeRef::Array(array) => { - let inner_ty = self.lower_ty(array.ty); - let const_len = self.lower_const(array.len, Ty::new_usize(interner)); - Ty::new_array_with_const_len(interner, inner_ty, const_len) - } - &TypeRef::Slice(inner) => { - let inner_ty = self.lower_ty(inner); - Ty::new_slice(interner, inner_ty) - } - TypeRef::Reference(ref_) => { - let inner_ty = self.lower_ty(ref_.ty); - // FIXME: It should infer the eldided lifetimes instead of stubbing with error - let lifetime = ref_ - .lifetime - .map_or_else(|| Region::error(interner), |lr| self.lower_lifetime(lr)); - Ty::new_ref(interner, lifetime, inner_ty, lower_mutability(ref_.mutability)) - } - TypeRef::Placeholder => Ty::new_error(interner, ErrorGuaranteed), - TypeRef::Fn(fn_) => { - let substs = self.with_shifted_in( - DebruijnIndex::from_u32(1), - |ctx: &mut TyLoweringContext<'_, '_>| { - Tys::new_from_iter( - interner, - fn_.params.iter().map(|&(_, tr)| ctx.lower_ty(tr)), - ) - }, - ); - Ty::new_fn_ptr( - interner, - Binder::dummy(FnSig { - abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), - safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe }, - c_variadic: fn_.is_varargs, - inputs_and_output: substs, - }), - ) - } - TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds), - TypeRef::ImplTrait(bounds) => { - match self.impl_trait_mode.mode { - ImplTraitLoweringMode::Opaque => { - let origin = match self.resolver.generic_def() { - Some(GenericDefId::FunctionId(it)) => Either::Left(it), - Some(GenericDefId::TypeAliasId(it)) => Either::Right(it), - _ => panic!( - "opaque impl trait lowering must be in function or type alias" - ), - }; - - // this dance is to make sure the data is in the right - // place even if we encounter more opaque types while - // lowering the bounds - let idx = self - .impl_trait_mode - .opaque_type_data - .alloc(ImplTrait { predicates: Vec::default() }); - - // FIXME(next-solver): this from_raw/into_raw dance isn't nice, but it's minimal - let impl_trait_id = origin.either( - |f| ImplTraitId::ReturnTypeImplTrait(f, Idx::from_raw(idx.into_raw())), - |a| ImplTraitId::TypeAliasImplTrait(a, Idx::from_raw(idx.into_raw())), - ); - let opaque_ty_id: SolverDefId = - self.db.intern_impl_trait_id(impl_trait_id).into(); - - // We don't want to lower the bounds inside the binders - // we're currently in, because they don't end up inside - // those binders. E.g. when we have `impl Trait>`, the `impl OtherTrait` can't refer - // to the self parameter from `impl Trait`, and the - // bounds aren't actually stored nested within each - // other, but separately. So if the `T` refers to a type - // parameter of the outer function, it's just one binder - // away instead of two. - let actual_opaque_type_data = self - .with_debruijn(DebruijnIndex::ZERO, |ctx| { - ctx.lower_impl_trait(opaque_ty_id, bounds, self.resolver.krate()) - }); - self.impl_trait_mode.opaque_type_data[idx] = actual_opaque_type_data; - - let args = GenericArgs::identity_for_item(self.interner, opaque_ty_id); - Ty::new_alias( - self.interner, - AliasTyKind::Opaque, - AliasTy::new_from_args(self.interner, opaque_ty_id, args), - ) - } - ImplTraitLoweringMode::Disallowed => { - // FIXME: report error - Ty::new_error(self.interner, ErrorGuaranteed) - } - } - } - TypeRef::Error => Ty::new_error(self.interner, ErrorGuaranteed), - }; - (ty, res) - } - - /// This is only for `generic_predicates_for_param`, where we can't just - /// lower the self types of the predicates since that could lead to cycles. - /// So we just check here if the `type_ref` resolves to a generic param, and which. - fn lower_ty_only_param(&self, type_ref: TypeRefId) -> Option { - let type_ref = &self.store[type_ref]; - let path = match type_ref { - TypeRef::Path(path) => path, - &TypeRef::TypeParam(idx) => return Some(idx.into()), - _ => return None, - }; - if path.type_anchor().is_some() { - return None; - } - if path.segments().len() > 1 { - return None; - } - let resolution = match self.resolver.resolve_path_in_type_ns(self.db, path) { - Some((it, None, _)) => it, - _ => return None, - }; - match resolution { - TypeNs::GenericParam(param_id) => Some(param_id.into()), - _ => None, - } - } - - #[inline] - fn on_path_diagnostic_callback<'b>(type_ref: TypeRefId) -> PathDiagnosticCallback<'b, 'db> { - PathDiagnosticCallback { - data: Either::Left(PathDiagnosticCallbackData(type_ref)), - callback: |data, this, diag| { - let type_ref = data.as_ref().left().unwrap().0; - this.push_diagnostic(type_ref, TyLoweringDiagnosticKind::PathDiagnostic(diag)) - }, - } - } - - #[inline] - fn at_path(&mut self, path_id: PathId) -> PathLoweringContext<'_, 'a, 'db> { - PathLoweringContext::new( - self, - Self::on_path_diagnostic_callback(path_id.type_ref()), - &self.store[path_id], - ) - } - - pub(crate) fn lower_path(&mut self, path: &Path, path_id: PathId) -> (Ty<'db>, Option) { - // Resolve the path (in type namespace) - if let Some(type_ref) = path.type_anchor() { - let (ty, res) = self.lower_ty_ext(type_ref); - let mut ctx = self.at_path(path_id); - return ctx.lower_ty_relative_path(ty, res, false); - } - - let mut ctx = self.at_path(path_id); - let (resolution, remaining_index) = match ctx.resolve_path_in_type_ns() { - Some(it) => it, - None => return (Ty::new_error(self.interner, ErrorGuaranteed), None), - }; - - if matches!(resolution, TypeNs::TraitId(_)) && remaining_index.is_none() { - // trait object type without dyn - let bound = TypeBound::Path(path_id, TraitBoundModifier::None); - let ty = self.lower_dyn_trait(&[bound]); - return (ty, None); - } - - ctx.lower_partly_resolved_path(resolution, false) - } - - fn lower_trait_ref_from_path( - &mut self, - path_id: PathId, - explicit_self_ty: Ty<'db>, - ) -> Option<(TraitRef<'db>, PathLoweringContext<'_, 'a, 'db>)> { - let mut ctx = self.at_path(path_id); - let resolved = match ctx.resolve_path_in_type_ns_fully()? { - // FIXME(trait_alias): We need to handle trait alias here. - TypeNs::TraitId(tr) => tr, - _ => return None, - }; - Some((ctx.lower_trait_ref_from_resolved_path(resolved, explicit_self_ty, false), ctx)) - } - - fn lower_trait_ref( - &mut self, - trait_ref: &HirTraitRef, - explicit_self_ty: Ty<'db>, - ) -> Option> { - self.lower_trait_ref_from_path(trait_ref.path, explicit_self_ty).map(|it| it.0) - } - - pub(crate) fn lower_where_predicate<'b>( - &'b mut self, - where_predicate: &'b WherePredicate, - ignore_bindings: bool, - generics: &Generics, - predicate_filter: PredicateFilter, - ) -> impl Iterator> + use<'a, 'b, 'db> { - match where_predicate { - WherePredicate::ForLifetime { target, bound, .. } - | WherePredicate::TypeBound { target, bound } => { - if let PredicateFilter::SelfTrait = predicate_filter { - let target_type = &self.store[*target]; - let self_type = 'is_self: { - if let TypeRef::Path(path) = target_type - && path.is_self_type() - { - break 'is_self true; - } - if let TypeRef::TypeParam(param) = target_type - && generics[param.local_id()].is_trait_self() - { - break 'is_self true; - } - false - }; - if !self_type { - return Either::Left(Either::Left(iter::empty())); - } - } - let self_ty = self.lower_ty(*target); - Either::Left(Either::Right(self.lower_type_bound(bound, self_ty, ignore_bindings))) - } - &WherePredicate::Lifetime { bound, target } => { - Either::Right(iter::once(Clause(Predicate::new( - self.interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::RegionOutlives(OutlivesPredicate( - self.lower_lifetime(bound), - self.lower_lifetime(target), - )), - )), - )))) - } - } - .into_iter() - } - - pub(crate) fn lower_type_bound<'b>( - &'b mut self, - bound: &'b TypeBound, - self_ty: Ty<'db>, - ignore_bindings: bool, - ) -> impl Iterator> + use<'b, 'a, 'db> { - let interner = self.interner; - let mut assoc_bounds = None; - let mut clause = None; - match bound { - &TypeBound::Path(path, TraitBoundModifier::None) | &TypeBound::ForLifetime(_, path) => { - // FIXME Don't silently drop the hrtb lifetimes here - if let Some((trait_ref, mut ctx)) = self.lower_trait_ref_from_path(path, self_ty) { - // FIXME(sized-hierarchy): Remove this bound modifications once we have implemented - // sized-hierarchy correctly. - let meta_sized = LangItem::MetaSized - .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); - let pointee_sized = LangItem::PointeeSized - .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); - if meta_sized.is_some_and(|it| it == trait_ref.def_id.0) { - // Ignore this bound - } else if pointee_sized.is_some_and(|it| it == trait_ref.def_id.0) { - // Regard this as `?Sized` bound - ctx.ty_ctx().unsized_types.insert(self_ty); - } else { - if !ignore_bindings { - assoc_bounds = ctx.assoc_type_bindings_from_type_bound(trait_ref); - } - clause = Some(Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - ))); - } - } - } - &TypeBound::Path(path, TraitBoundModifier::Maybe) => { - let sized_trait = LangItem::Sized.resolve_trait(self.db, self.resolver.krate()); - // Don't lower associated type bindings as the only possible relaxed trait bound - // `?Sized` has no of them. - // If we got another trait here ignore the bound completely. - let trait_id = self - .lower_trait_ref_from_path(path, self_ty) - .map(|(trait_ref, _)| trait_ref.def_id.0); - if trait_id == sized_trait { - self.unsized_types.insert(self_ty); - } - } - &TypeBound::Lifetime(l) => { - let lifetime = self.lower_lifetime(l); - clause = Some(Clause(Predicate::new( - self.interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::TypeOutlives(OutlivesPredicate( - self_ty, lifetime, - )), - )), - ))); - } - TypeBound::Use(_) | TypeBound::Error => {} - } - clause.into_iter().chain(assoc_bounds.into_iter().flatten()) - } - - fn lower_dyn_trait(&mut self, bounds: &[TypeBound]) -> Ty<'db> { - let interner = self.interner; - // FIXME: we should never create non-existential predicates in the first place - // For now, use an error type so we don't run into dummy binder issues - let self_ty = Ty::new_error(interner, ErrorGuaranteed); - // INVARIANT: The principal trait bound, if present, must come first. Others may be in any - // order but should be in the same order for the same set but possibly different order of - // bounds in the input. - // INVARIANT: If this function returns `DynTy`, there should be at least one trait bound. - // These invariants are utilized by `TyExt::dyn_trait()` and chalk. - let mut lifetime = None; - let bounds = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { - let mut lowered_bounds: Vec< - rustc_type_ir::Binder, ExistentialPredicate>>, - > = Vec::new(); - for b in bounds { - let db = ctx.db; - ctx.lower_type_bound(b, self_ty, false).for_each(|b| { - if let Some(bound) = b - .kind() - .map_bound(|c| match c { - rustc_type_ir::ClauseKind::Trait(t) => { - let id = t.def_id(); - let is_auto = - db.trait_signature(id.0).flags.contains(TraitFlags::AUTO); - if is_auto { - Some(ExistentialPredicate::AutoTrait(t.def_id())) - } else { - Some(ExistentialPredicate::Trait( - ExistentialTraitRef::new_from_args( - interner, - t.def_id(), - GenericArgs::new_from_iter( - interner, - t.trait_ref.args.iter().skip(1), - ), - ), - )) - } - } - rustc_type_ir::ClauseKind::Projection(p) => { - Some(ExistentialPredicate::Projection( - ExistentialProjection::new_from_args( - interner, - p.def_id(), - GenericArgs::new_from_iter( - interner, - p.projection_term.args.iter().skip(1), - ), - p.term, - ), - )) - } - rustc_type_ir::ClauseKind::TypeOutlives(outlives_predicate) => { - lifetime = Some(outlives_predicate.1); - None - } - rustc_type_ir::ClauseKind::RegionOutlives(_) - | rustc_type_ir::ClauseKind::ConstArgHasType(_, _) - | rustc_type_ir::ClauseKind::WellFormed(_) - | rustc_type_ir::ClauseKind::ConstEvaluatable(_) - | rustc_type_ir::ClauseKind::HostEffect(_) - | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(), - }) - .transpose() - { - lowered_bounds.push(bound); - } - }) - } - - let mut multiple_regular_traits = false; - let mut multiple_same_projection = false; - lowered_bounds.sort_unstable_by(|lhs, rhs| { - use std::cmp::Ordering; - match ((*lhs).skip_binder(), (*rhs).skip_binder()) { - (ExistentialPredicate::Trait(_), ExistentialPredicate::Trait(_)) => { - multiple_regular_traits = true; - // Order doesn't matter - we error - Ordering::Equal - } - ( - ExistentialPredicate::AutoTrait(lhs_id), - ExistentialPredicate::AutoTrait(rhs_id), - ) => lhs_id.0.cmp(&rhs_id.0), - (ExistentialPredicate::Trait(_), _) => Ordering::Less, - (_, ExistentialPredicate::Trait(_)) => Ordering::Greater, - (ExistentialPredicate::AutoTrait(_), _) => Ordering::Less, - (_, ExistentialPredicate::AutoTrait(_)) => Ordering::Greater, - ( - ExistentialPredicate::Projection(lhs), - ExistentialPredicate::Projection(rhs), - ) => { - let lhs_id = match lhs.def_id { - SolverDefId::TypeAliasId(id) => id, - _ => unreachable!(), - }; - let rhs_id = match rhs.def_id { - SolverDefId::TypeAliasId(id) => id, - _ => unreachable!(), - }; - // We only compare the `associated_ty_id`s. We shouldn't have - // multiple bounds for an associated type in the correct Rust code, - // and if we do, we error out. - if lhs_id == rhs_id { - multiple_same_projection = true; - } - lhs_id.as_id().index().cmp(&rhs_id.as_id().index()) - } - } - }); - - if multiple_regular_traits || multiple_same_projection { - return None; - } - - if !lowered_bounds.first().map_or(false, |b| { - matches!( - b.as_ref().skip_binder(), - ExistentialPredicate::Trait(_) | ExistentialPredicate::AutoTrait(_) - ) - }) { - return None; - } - - // As multiple occurrences of the same auto traits *are* permitted, we deduplicate the - // bounds. We shouldn't have repeated elements besides auto traits at this point. - lowered_bounds.dedup(); - - Some(BoundExistentialPredicates::new_from_iter(interner, lowered_bounds)) - }); - - if let Some(bounds) = bounds { - let region = match lifetime { - Some(it) => match it.kind() { - rustc_type_ir::RegionKind::ReBound(db, var) => Region::new_bound( - self.interner, - db.shifted_out_to_binder(DebruijnIndex::from_u32(2)), - var, - ), - _ => it, - }, - None => Region::new_static(self.interner), - }; - Ty::new_dynamic(self.interner, bounds, region) - } else { - // FIXME: report error - // (additional non-auto traits, associated type rebound, or no resolved trait) - Ty::new_error(self.interner, ErrorGuaranteed) - } - } - - fn lower_impl_trait( - &mut self, - def_id: SolverDefId, - bounds: &[TypeBound], - krate: Crate, - ) -> ImplTrait<'db> { - let interner = self.interner; - cov_mark::hit!(lower_rpit); - let args = GenericArgs::identity_for_item(interner, def_id); - let self_ty = Ty::new_alias( - self.interner, - rustc_type_ir::AliasTyKind::Opaque, - AliasTy::new_from_args(interner, def_id, args), - ); - let predicates = self.with_shifted_in(DebruijnIndex::from_u32(1), |ctx| { - let mut predicates = Vec::new(); - for b in bounds { - predicates.extend(ctx.lower_type_bound(b, self_ty, false)); - } - - if !ctx.unsized_types.contains(&self_ty) { - let sized_trait = LangItem::Sized.resolve_trait(self.db, krate); - let sized_clause = sized_trait.map(|trait_id| { - let trait_ref = TraitRef::new_from_args( - interner, - trait_id.into(), - GenericArgs::new_from_iter(interner, [self_ty.into()]), - ); - Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )) - }); - predicates.extend(sized_clause); - } - predicates.shrink_to_fit(); - predicates - }); - ImplTrait { predicates } - } - - pub(crate) fn lower_lifetime(&mut self, lifetime: LifetimeRefId) -> Region<'db> { - match self.resolver.resolve_lifetime(&self.store[lifetime]) { - Some(resolution) => match resolution { - LifetimeNs::Static => Region::new_static(self.interner), - LifetimeNs::LifetimeParam(id) => { - let idx = match self.generics().lifetime_idx(id) { - None => return Region::error(self.interner), - Some(idx) => idx, - }; - self.region_param(id, idx as u32) - } - }, - None => Region::error(self.interner), - } - } -} - -pub(crate) fn lower_mutability(m: hir_def::type_ref::Mutability) -> Mutability { - match m { - hir_def::type_ref::Mutability::Shared => Mutability::Not, - hir_def::type_ref::Mutability::Mut => Mutability::Mut, - } -} - -fn unknown_const(_ty: Ty<'_>) -> Const<'_> { - Const::new(DbInterner::conjure(), ConstKind::Error(ErrorGuaranteed)) -} - -pub(crate) fn impl_trait_query<'db>( - db: &'db dyn HirDatabase, - impl_id: ImplId, -) -> Option>> { - db.impl_trait_with_diagnostics(impl_id).map(|it| it.0) -} - -pub(crate) fn impl_trait_with_diagnostics_query<'db>( - db: &'db dyn HirDatabase, - impl_id: ImplId, -) -> Option<(EarlyBinder<'db, TraitRef<'db>>, Diagnostics)> { - let impl_data = db.impl_signature(impl_id); - let resolver = impl_id.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &impl_data.store, - impl_id.into(), - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, - ); - let self_ty = db.impl_self_ty(impl_id).skip_binder(); - let target_trait = impl_data.target_trait.as_ref()?; - let trait_ref = EarlyBinder::bind(ctx.lower_trait_ref(target_trait, self_ty)?); - Some((trait_ref, create_diagnostics(ctx.diagnostics))) -} - -pub(crate) fn return_type_impl_traits<'db>( - db: &'db dyn HirDatabase, - def: hir_def::FunctionId, -) -> Option>>> { - // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe - let data = db.function_signature(def); - let resolver = def.resolver(db); - let mut ctx_ret = - TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - if let Some(ret_type) = data.ret_type { - let _ret = ctx_ret.lower_ty(ret_type); - } - let return_type_impl_traits = - ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data }; - if return_type_impl_traits.impl_traits.is_empty() { - None - } else { - Some(Arc::new(EarlyBinder::bind(return_type_impl_traits))) - } -} - -pub(crate) fn type_alias_impl_traits<'db>( - db: &'db dyn HirDatabase, - def: hir_def::TypeAliasId, -) -> Option>>> { - let data = db.type_alias_signature(def); - let resolver = def.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &data.store, - def.into(), - LifetimeElisionKind::AnonymousReportError, - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - if let Some(type_ref) = data.ty { - let _ty = ctx.lower_ty(type_ref); - } - let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data }; - if type_alias_impl_traits.impl_traits.is_empty() { - None - } else { - Some(Arc::new(EarlyBinder::bind(type_alias_impl_traits))) - } -} - -/// Build the declared type of an item. This depends on the namespace; e.g. for -/// `struct Foo(usize)`, we have two types: The type of the struct itself, and -/// the constructor function `(usize) -> Foo` which lives in the values -/// namespace. -pub(crate) fn ty_query<'db>(db: &'db dyn HirDatabase, def: TyDefId) -> EarlyBinder<'db, Ty<'db>> { - let interner = DbInterner::new_with(db, None, None); - match def { - TyDefId::BuiltinType(it) => EarlyBinder::bind(Ty::from_builtin_type(interner, it)), - TyDefId::AdtId(it) => EarlyBinder::bind(Ty::new_adt( - interner, - it, - GenericArgs::identity_for_item(interner, it.into()), - )), - TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0, - } -} - -/// Build the declared type of a function. This should not need to look at the -/// function body. -fn type_for_fn<'db>(db: &'db dyn HirDatabase, def: FunctionId) -> EarlyBinder<'db, Ty<'db>> { - let interner = DbInterner::new_with(db, None, None); - EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::FunctionId(def).into(), - GenericArgs::identity_for_item(interner, def.into()), - )) -} - -/// Build the declared type of a const. -fn type_for_const<'db>(db: &'db dyn HirDatabase, def: ConstId) -> EarlyBinder<'db, Ty<'db>> { - let resolver = def.resolver(db); - let data = db.const_signature(def); - let parent = def.loc(db).container; - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &data.store, - def.into(), - LifetimeElisionKind::AnonymousReportError, - ); - ctx.set_lifetime_elision(LifetimeElisionKind::for_const(ctx.interner, parent)); - EarlyBinder::bind(ctx.lower_ty(data.type_ref)) -} - -/// Build the declared type of a static. -fn type_for_static<'db>(db: &'db dyn HirDatabase, def: StaticId) -> EarlyBinder<'db, Ty<'db>> { - let resolver = def.resolver(db); - let data = db.static_signature(def); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &data.store, - def.into(), - LifetimeElisionKind::AnonymousReportError, - ); - ctx.set_lifetime_elision(LifetimeElisionKind::Elided(Region::new_static(ctx.interner))); - EarlyBinder::bind(ctx.lower_ty(data.type_ref)) -} - -/// Build the type of a tuple struct constructor. -fn type_for_struct_constructor<'db>( - db: &'db dyn HirDatabase, - def: StructId, -) -> Option>> { - let struct_data = def.fields(db); - match struct_data.shape { - FieldsShape::Record => None, - FieldsShape::Unit => Some(type_for_adt(db, def.into())), - FieldsShape::Tuple => { - let interner = DbInterner::new_with(db, None, None); - Some(EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::StructId(def).into(), - GenericArgs::identity_for_item(interner, def.into()), - ))) - } - } -} - -/// Build the type of a tuple enum variant constructor. -fn type_for_enum_variant_constructor<'db>( - db: &'db dyn HirDatabase, - def: EnumVariantId, -) -> Option>> { - let struct_data = def.fields(db); - match struct_data.shape { - FieldsShape::Record => None, - FieldsShape::Unit => Some(type_for_adt(db, def.loc(db).parent.into())), - FieldsShape::Tuple => { - let interner = DbInterner::new_with(db, None, None); - Some(EarlyBinder::bind(Ty::new_fn_def( - interner, - CallableDefId::EnumVariantId(def).into(), - GenericArgs::identity_for_item(interner, def.loc(db).parent.into()), - ))) - } - } -} - -pub(crate) fn value_ty_query<'db>( - db: &'db dyn HirDatabase, - def: ValueTyDefId, -) -> Option>> { - match def { - ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)), - ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), - ValueTyDefId::UnionId(it) => Some(type_for_adt(db, it.into())), - ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it), - ValueTyDefId::ConstId(it) => Some(type_for_const(db, it)), - ValueTyDefId::StaticId(it) => Some(type_for_static(db, it)), - } -} - -pub(crate) fn type_for_type_alias_with_diagnostics_query<'db>( - db: &'db dyn HirDatabase, - t: TypeAliasId, -) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - let type_alias_data = db.type_alias_signature(t); - let mut diags = None; - let resolver = t.resolver(db); - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) { - EarlyBinder::bind(Ty::new_foreign(interner, t.into())) - } else { - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - t.into(), - LifetimeElisionKind::AnonymousReportError, - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - let res = EarlyBinder::bind( - type_alias_data - .ty - .map(|type_ref| ctx.lower_ty(type_ref)) - .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)), - ); - diags = create_diagnostics(ctx.diagnostics); - res - }; - (inner, diags) -} - -pub(crate) fn type_for_type_alias_with_diagnostics_cycle_result<'db>( - db: &'db dyn HirDatabase, - _adt: TypeAliasId, -) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None) -} - -pub(crate) fn impl_self_ty_query<'db>( - db: &'db dyn HirDatabase, - impl_id: ImplId, -) -> EarlyBinder<'db, Ty<'db>> { - db.impl_self_ty_with_diagnostics(impl_id).0 -} - -pub(crate) fn impl_self_ty_with_diagnostics_query<'db>( - db: &'db dyn HirDatabase, - impl_id: ImplId, -) -> (EarlyBinder<'db, Ty<'db>>, Diagnostics) { - let resolver = impl_id.resolver(db); - - let impl_data = db.impl_signature(impl_id); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &impl_data.store, - impl_id.into(), - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true }, - ); - let ty = ctx.lower_ty(impl_data.self_ty); - assert!(!ty.has_escaping_bound_vars()); - (EarlyBinder::bind(ty), create_diagnostics(ctx.diagnostics)) -} - -pub(crate) fn impl_self_ty_with_diagnostics_cycle_result( - db: &dyn HirDatabase, - _impl_id: ImplId, -) -> (EarlyBinder<'_, Ty<'_>>, Diagnostics) { - (EarlyBinder::bind(Ty::new_error(DbInterner::new_with(db, None, None), ErrorGuaranteed)), None) -} - -pub(crate) fn const_param_ty_query<'db>(db: &'db dyn HirDatabase, def: ConstParamId) -> Ty<'db> { - db.const_param_ty_with_diagnostics(def).0 -} - -// returns None if def is a type arg -pub(crate) fn const_param_ty_with_diagnostics_query<'db>( - db: &'db dyn HirDatabase, - def: ConstParamId, -) -> (Ty<'db>, Diagnostics) { - let (parent_data, store) = db.generic_params_and_store(def.parent()); - let data = &parent_data[def.local_id()]; - let resolver = def.parent().resolver(db); - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &store, - def.parent(), - LifetimeElisionKind::AnonymousReportError, - ); - let ty = match data { - TypeOrConstParamData::TypeParamData(_) => { - never!(); - Ty::new_error(interner, ErrorGuaranteed) - } - TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), - }; - (ty, create_diagnostics(ctx.diagnostics)) -} - -pub(crate) fn const_param_ty_with_diagnostics_cycle_result<'db>( - db: &'db dyn HirDatabase, - _: crate::db::HirDatabaseData, - def: ConstParamId, -) -> (Ty<'db>, Diagnostics) { - let resolver = def.parent().resolver(db); - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - (Ty::new_error(interner, ErrorGuaranteed), None) -} - -pub(crate) fn field_types_query<'db>( - db: &'db dyn HirDatabase, - variant_id: VariantId, -) -> Arc>>> { - db.field_types_with_diagnostics(variant_id).0 -} - -/// Build the type of all specific fields of a struct or enum variant. -pub(crate) fn field_types_with_diagnostics_query<'db>( - db: &'db dyn HirDatabase, - variant_id: VariantId, -) -> (Arc>>>, Diagnostics) { - let var_data = variant_id.fields(db); - let fields = var_data.fields(); - if fields.is_empty() { - return (Arc::new(ArenaMap::default()), None); - } - - let (resolver, def): (_, GenericDefId) = match variant_id { - VariantId::StructId(it) => (it.resolver(db), it.into()), - VariantId::UnionId(it) => (it.resolver(db), it.into()), - VariantId::EnumVariantId(it) => (it.resolver(db), it.lookup(db).parent.into()), - }; - let mut res = ArenaMap::default(); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &var_data.store, - def, - LifetimeElisionKind::AnonymousReportError, - ); - for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, EarlyBinder::bind(ctx.lower_ty(field_data.type_ref))); - } - (Arc::new(res), create_diagnostics(ctx.diagnostics)) -} - -/// This query exists only to be used when resolving short-hand associated types -/// like `T::Item`. -/// -/// See the analogous query in rustc and its comment: -/// -/// This is a query mostly to handle cycles somewhat gracefully; e.g. the -/// following bounds are disallowed: `T: Foo, U: Foo`, but -/// these are fine: `T: Foo, U: Foo<()>`. -#[tracing::instrument(skip(db), ret)] -pub(crate) fn generic_predicates_for_param_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, - param_id: TypeOrConstParamId, - assoc_name: Option, -) -> GenericPredicates<'db> { - let generics = generics(db, def); - let interner = DbInterner::new_with(db, None, None); - let resolver = def.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - generics.store(), - def, - LifetimeElisionKind::AnonymousReportError, - ); - - // we have to filter out all other predicates *first*, before attempting to lower them - let predicate = |pred: &_, ctx: &mut TyLoweringContext<'_, '_>| match pred { - WherePredicate::ForLifetime { target, bound, .. } - | WherePredicate::TypeBound { target, bound, .. } => { - let invalid_target = { ctx.lower_ty_only_param(*target) != Some(param_id) }; - if invalid_target { - // FIXME(sized-hierarchy): Revisit and adjust this properly once we have implemented - // sized-hierarchy correctly. - // If this is filtered out without lowering, `?Sized` or `PointeeSized` is not gathered into - // `ctx.unsized_types` - let lower = || -> bool { - match bound { - TypeBound::Path(_, TraitBoundModifier::Maybe) => true, - TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => { - let TypeRef::Path(path) = &ctx.store[path.type_ref()] else { - return false; - }; - let Some(pointee_sized) = - LangItem::PointeeSized.resolve_trait(ctx.db, ctx.resolver.krate()) - else { - return false; - }; - // Lower the path directly with `Resolver` instead of PathLoweringContext` - // to prevent diagnostics duplications. - ctx.resolver.resolve_path_in_type_ns_fully(ctx.db, path).is_some_and( - |it| matches!(it, TypeNs::TraitId(tr) if tr == pointee_sized), - ) - } - _ => false, - } - }(); - if lower { - ctx.lower_where_predicate(pred, true, &generics, PredicateFilter::All) - .for_each(drop); - } - return false; - } - - match bound { - &TypeBound::ForLifetime(_, path) | &TypeBound::Path(path, _) => { - // Only lower the bound if the trait could possibly define the associated - // type we're looking for. - let path = &ctx.store[path]; - - let Some(assoc_name) = &assoc_name else { return true }; - let Some(TypeNs::TraitId(tr)) = - resolver.resolve_path_in_type_ns_fully(db, path) - else { - return false; - }; - - rustc_type_ir::elaborate::supertrait_def_ids(interner, tr.into()).any(|tr| { - tr.0.trait_items(db).items.iter().any(|(name, item)| { - matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name - }) - }) - } - TypeBound::Use(_) | TypeBound::Lifetime(_) | TypeBound::Error => false, - } - } - WherePredicate::Lifetime { .. } => false, - }; - let mut predicates = Vec::new(); - for maybe_parent_generics in - std::iter::successors(Some(&generics), |generics| generics.parent_generics()) - { - ctx.store = maybe_parent_generics.store(); - for pred in maybe_parent_generics.where_predicates() { - if predicate(pred, &mut ctx) { - predicates.extend(ctx.lower_where_predicate( - pred, - true, - maybe_parent_generics, - PredicateFilter::All, - )); - } - } - } - - let args = GenericArgs::identity_for_item(interner, def.into()); - if !args.is_empty() { - let explicitly_unsized_tys = ctx.unsized_types; - if let Some(implicitly_sized_predicates) = - implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &args, &resolver) - { - predicates.extend(implicitly_sized_predicates); - }; - } - GenericPredicates(predicates.is_empty().not().then(|| predicates.into())) -} - -pub(crate) fn generic_predicates_for_param_cycle_result( - _db: &dyn HirDatabase, - _def: GenericDefId, - _param_id: TypeOrConstParamId, - _assoc_name: Option, -) -> GenericPredicates<'_> { - GenericPredicates(None) -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericPredicates<'db>(Option]>>); - -impl<'db> GenericPredicates<'db> { - #[inline] - pub fn instantiate( - &self, - interner: DbInterner<'db>, - args: GenericArgs<'db>, - ) -> Option>> { - self.0 - .as_ref() - .map(|it| EarlyBinder::bind(it.iter().copied()).iter_instantiated(interner, args)) - } - - #[inline] - pub fn instantiate_identity(&self) -> Option>> { - self.0.as_ref().map(|it| it.iter().copied()) - } -} - -impl<'db> ops::Deref for GenericPredicates<'db> { - type Target = [Clause<'db>]; - - fn deref(&self) -> &Self::Target { - self.0.as_deref().unwrap_or(&[]) - } -} - -pub(crate) fn trait_environment_for_body_query( - db: &dyn HirDatabase, - def: DefWithBodyId, -) -> Arc> { - let Some(def) = def.as_generic_def_id(db) else { - let krate = def.module(db).krate(); - return TraitEnvironment::empty(krate); - }; - db.trait_environment(def) -} - -pub(crate) fn trait_environment_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> Arc> { - let generics = generics(db, def); - if generics.has_no_predicates() && generics.is_empty() { - return TraitEnvironment::empty(def.krate(db)); - } - - let resolver = def.resolver(db); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - generics.store(), - def, - LifetimeElisionKind::AnonymousReportError, - ); - let mut traits_in_scope = Vec::new(); - let mut clauses = Vec::new(); - for maybe_parent_generics in - std::iter::successors(Some(&generics), |generics| generics.parent_generics()) - { - ctx.store = maybe_parent_generics.store(); - for pred in maybe_parent_generics.where_predicates() { - for pred in ctx.lower_where_predicate(pred, false, &generics, PredicateFilter::All) { - if let rustc_type_ir::ClauseKind::Trait(tr) = pred.kind().skip_binder() { - traits_in_scope.push((tr.self_ty(), tr.def_id().0)); - } - clauses.push(pred); - } - } - } - - if let Some(trait_id) = def.assoc_trait_container(db) { - // add `Self: Trait` to the environment in trait - // function default implementations (and speculative code - // inside consts or type aliases) - cov_mark::hit!(trait_self_implements_self); - let trait_ref = TraitRef::identity(ctx.interner, trait_id.into()); - let clause = Clause(Predicate::new( - ctx.interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Trait( - TraitPredicate { trait_ref, polarity: rustc_type_ir::PredicatePolarity::Positive }, - ))), - )); - clauses.push(clause); - } - - let explicitly_unsized_tys = ctx.unsized_types; - - let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); - if let Some(sized_trait) = sized_trait { - let (mut generics, mut def_id) = - (crate::next_solver::generics::generics(db, def.into()), def); - loop { - let self_idx = trait_self_param_idx(db, def_id); - for (idx, p) in generics.own_params.iter().enumerate() { - if let Some(self_idx) = self_idx - && p.index() as usize == self_idx - { - continue; - } - let GenericParamId::TypeParamId(param_id) = p.id else { - continue; - }; - let idx = idx as u32 + generics.parent_count as u32; - let param_ty = Ty::new_param(ctx.interner, param_id, idx); - if explicitly_unsized_tys.contains(¶m_ty) { - continue; - } - let trait_ref = TraitRef::new_from_args( - ctx.interner, - sized_trait.into(), - GenericArgs::new_from_iter(ctx.interner, [param_ty.into()]), - ); - let clause = Clause(Predicate::new( - ctx.interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )); - clauses.push(clause); - } - - if let Some(g) = generics.parent { - generics = crate::next_solver::generics::generics(db, g.into()); - def_id = g; - } else { - break; - } - } - } - - let clauses = rustc_type_ir::elaborate::elaborate(ctx.interner, clauses); - let clauses = Clauses::new_from_iter(ctx.interner, clauses); - let env = ParamEnv { clauses }; - - TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env) -} - -#[derive(Copy, Clone, Debug)] -pub(crate) enum PredicateFilter { - SelfTrait, - All, -} - -/// Resolve the where clause(s) of an item with generics. -#[tracing::instrument(skip(db))] -pub(crate) fn generic_predicates_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> GenericPredicates<'db> { - generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true).0 -} - -pub(crate) fn generic_predicates_without_parent_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> GenericPredicates<'db> { - generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def).0 -} - -/// Resolve the where clause(s) of an item with generics, -/// except the ones inherited from the parent -pub(crate) fn generic_predicates_without_parent_with_diagnostics_query<'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, -) -> (GenericPredicates<'db>, Diagnostics) { - generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def) -} - -/// Resolve the where clause(s) of an item with generics, -/// with a given filter -#[tracing::instrument(skip(db, filter), ret)] -pub(crate) fn generic_predicates_filtered_by<'db, F>( - db: &'db dyn HirDatabase, - def: GenericDefId, - predicate_filter: PredicateFilter, - filter: F, -) -> (GenericPredicates<'db>, Diagnostics) -where - F: Fn(GenericDefId) -> bool, -{ - let generics = generics(db, def); - let resolver = def.resolver(db); - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - generics.store(), - def, - LifetimeElisionKind::AnonymousReportError, - ); - - let mut predicates = Vec::new(); - for maybe_parent_generics in - std::iter::successors(Some(&generics), |generics| generics.parent_generics()) - { - ctx.store = maybe_parent_generics.store(); - for pred in maybe_parent_generics.where_predicates() { - tracing::debug!(?pred); - if filter(maybe_parent_generics.def()) { - predicates.extend(ctx.lower_where_predicate( - pred, - false, - maybe_parent_generics, - predicate_filter, - )); - } - } - } - - let explicitly_unsized_tys = ctx.unsized_types; - - let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()); - if let Some(sized_trait) = sized_trait { - let mut add_sized_clause = |param_idx, param_id, param_data| { - let ( - GenericParamId::TypeParamId(param_id), - GenericParamDataRef::TypeParamData(param_data), - ) = (param_id, param_data) - else { - return; - }; - - if param_data.provenance == TypeParamProvenance::TraitSelf { - return; - } - - let param_ty = Ty::new_param(interner, param_id, param_idx); - if explicitly_unsized_tys.contains(¶m_ty) { - return; - } - let trait_ref = TraitRef::new_from_args( - interner, - sized_trait.into(), - GenericArgs::new_from_iter(interner, [param_ty.into()]), - ); - let clause = Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )); - predicates.push(clause); - }; - if generics.parent_generics().is_some_and(|parent| filter(parent.def())) { - generics.iter_parent().enumerate().for_each(|(param_idx, (param_id, param_data))| { - add_sized_clause(param_idx as u32, param_id, param_data); - }); - } - if filter(def) { - let parent_params_len = generics.len_parent(); - generics.iter_self().enumerate().for_each(|(param_idx, (param_id, param_data))| { - add_sized_clause((param_idx + parent_params_len) as u32, param_id, param_data); - }); - } - } - - // FIXME: rustc gathers more predicates by recursing through resulting trait predicates. - // See https://github.com/rust-lang/rust/blob/76c5ed2847cdb26ef2822a3a165d710f6b772217/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L689-L715 - - ( - GenericPredicates(predicates.is_empty().not().then(|| predicates.into())), - create_diagnostics(ctx.diagnostics), - ) -} - -/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound. -/// Exception is Self of a trait def. -fn implicitly_sized_clauses<'a, 'subst, 'db>( - db: &'db dyn HirDatabase, - def: GenericDefId, - explicitly_unsized_tys: &'a FxHashSet>, - args: &'subst GenericArgs<'db>, - resolver: &Resolver<'db>, -) -> Option> + Captures<'a> + Captures<'subst>> { - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate())?; - - let trait_self_idx = trait_self_param_idx(db, def); - - Some( - args.iter() - .enumerate() - .filter_map( - move |(idx, generic_arg)| { - if Some(idx) == trait_self_idx { None } else { Some(generic_arg) } - }, - ) - .filter_map(|generic_arg| generic_arg.as_type()) - .filter(move |self_ty| !explicitly_unsized_tys.contains(self_ty)) - .map(move |self_ty| { - let trait_ref = TraitRef::new_from_args( - interner, - sized_trait.into(), - GenericArgs::new_from_iter(interner, [self_ty.into()]), - ); - Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Trait(TraitPredicate { - trait_ref, - polarity: rustc_type_ir::PredicatePolarity::Positive, - }), - )), - )) - }), - ) -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct GenericDefaults<'db>(Option>>]>>); - -impl<'db> GenericDefaults<'db> { - #[inline] - pub fn get(&self, idx: usize) -> Option>> { - self.0.as_ref()?[idx] - } -} - -pub(crate) fn generic_defaults_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> GenericDefaults<'_> { - db.generic_defaults_ns_with_diagnostics(def).0 -} - -/// Resolve the default type params from generics. -/// -/// Diagnostics are only returned for this `GenericDefId` (returned defaults include parents). -pub(crate) fn generic_defaults_with_diagnostics_query( - db: &dyn HirDatabase, - def: GenericDefId, -) -> (GenericDefaults<'_>, Diagnostics) { - let generic_params = generics(db, def); - if generic_params.is_empty() { - return (GenericDefaults(None), None); - } - let resolver = def.resolver(db); - - let mut ctx = TyLoweringContext::new( - db, - &resolver, - generic_params.store(), - def, - LifetimeElisionKind::AnonymousReportError, - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed); - let mut idx = 0; - let mut has_any_default = false; - let mut defaults = generic_params - .iter_parents_with_store() - .map(|((_id, p), store)| { - ctx.store = store; - let (result, has_default) = handle_generic_param(&mut ctx, idx, p); - has_any_default |= has_default; - idx += 1; - result - }) - .collect::>(); - ctx.diagnostics.clear(); // Don't include diagnostics from the parent. - defaults.extend(generic_params.iter_self().map(|(_id, p)| { - let (result, has_default) = handle_generic_param(&mut ctx, idx, p); - has_any_default |= has_default; - idx += 1; - result - })); - let diagnostics = create_diagnostics(mem::take(&mut ctx.diagnostics)); - let defaults = if has_any_default { - GenericDefaults(Some(Arc::from_iter(defaults))) - } else { - GenericDefaults(None) - }; - return (defaults, diagnostics); - - fn handle_generic_param<'db>( - ctx: &mut TyLoweringContext<'db, '_>, - idx: usize, - p: GenericParamDataRef<'_>, - ) -> (Option>>, bool) { - ctx.lowering_param_default(idx as u32); - match p { - GenericParamDataRef::TypeParamData(p) => { - let ty = p.default.map(|ty| ctx.lower_ty(ty)); - (ty.map(|ty| EarlyBinder::bind(ty.into())), p.default.is_some()) - } - GenericParamDataRef::ConstParamData(p) => { - let val = p.default.map(|c| { - let param_ty = ctx.lower_ty(p.ty); - let c = ctx.lower_const(c, param_ty); - c.into() - }); - (val.map(EarlyBinder::bind), p.default.is_some()) - } - GenericParamDataRef::LifetimeParamData(_) => (None, false), - } - } -} - -pub(crate) fn generic_defaults_with_diagnostics_cycle_result( - _db: &dyn HirDatabase, - _def: GenericDefId, -) -> (GenericDefaults<'_>, Diagnostics) { - (GenericDefaults(None), None) -} - -/// Build the signature of a callable item (function, struct or enum variant). -pub(crate) fn callable_item_signature_query<'db>( - db: &'db dyn HirDatabase, - def: CallableDefId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { - match def { - CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f), - CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s), - CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e), - } -} - -fn fn_sig_for_fn<'db>( - db: &'db dyn HirDatabase, - def: FunctionId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { - let data = db.function_signature(def); - let resolver = def.resolver(db); - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - let mut ctx_params = TyLoweringContext::new( - db, - &resolver, - &data.store, - def.into(), - LifetimeElisionKind::for_fn_params(&data), - ); - let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr)); - - let ret = match data.ret_type { - Some(ret_type) => { - let mut ctx_ret = TyLoweringContext::new( - db, - &resolver, - &data.store, - def.into(), - LifetimeElisionKind::for_fn_ret(interner), - ) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - ctx_ret.lower_ty(ret_type) - } - None => Ty::new_tup(interner, &[]), - }; - - let inputs_and_output = Tys::new_from_iter(interner, params.chain(Some(ret))); - // If/when we track late bound vars, we need to switch this to not be `dummy` - EarlyBinder::bind(rustc_type_ir::Binder::dummy(FnSig { - abi: data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), - c_variadic: data.is_varargs(), - safety: if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, - inputs_and_output, - })) -} - -fn type_for_adt<'db>(db: &'db dyn HirDatabase, adt: AdtId) -> EarlyBinder<'db, Ty<'db>> { - let interner = DbInterner::new_with(db, None, None); - let args = GenericArgs::identity_for_item(interner, adt.into()); - let ty = Ty::new_adt(interner, adt, args); - EarlyBinder::bind(ty) -} - -fn fn_sig_for_struct_constructor<'db>( - db: &'db dyn HirDatabase, - def: StructId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { - let field_tys = db.field_types_ns(def.into()); - let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); - let ret = type_for_adt(db, def.into()).skip_binder(); - - let inputs_and_output = - Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret))); - EarlyBinder::bind(Binder::dummy(FnSig { - abi: FnAbi::RustCall, - c_variadic: false, - safety: Safety::Safe, - inputs_and_output, - })) -} - -fn fn_sig_for_enum_variant_constructor<'db>( - db: &'db dyn HirDatabase, - def: EnumVariantId, -) -> EarlyBinder<'db, PolyFnSig<'db>> { - let field_tys = db.field_types_ns(def.into()); - let params = field_tys.iter().map(|(_, ty)| ty.skip_binder()); - let parent = def.lookup(db).parent; - let ret = type_for_adt(db, parent.into()).skip_binder(); - - let inputs_and_output = - Tys::new_from_iter(DbInterner::new_with(db, None, None), params.chain(Some(ret))); - EarlyBinder::bind(Binder::dummy(FnSig { - abi: FnAbi::RustCall, - c_variadic: false, - safety: Safety::Safe, - inputs_and_output, - })) -} - -// FIXME(next-solver): should merge this with `explicit_item_bounds` in some way -pub(crate) fn associated_ty_item_bounds<'db>( - db: &'db dyn HirDatabase, - type_alias: TypeAliasId, -) -> EarlyBinder<'db, BoundExistentialPredicates<'db>> { - let type_alias_data = db.type_alias_signature(type_alias); - let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db); - let interner = DbInterner::new_with(db, Some(resolver.krate()), None); - let mut ctx = TyLoweringContext::new( - db, - &resolver, - &type_alias_data.store, - type_alias.into(), - LifetimeElisionKind::AnonymousReportError, - ); - // FIXME: we should never create non-existential predicates in the first place - // For now, use an error type so we don't run into dummy binder issues - let self_ty = Ty::new_error(interner, ErrorGuaranteed); - - let mut bounds = Vec::new(); - for bound in &type_alias_data.bounds { - ctx.lower_type_bound(bound, self_ty, false).for_each(|pred| { - if let Some(bound) = pred - .kind() - .map_bound(|c| match c { - rustc_type_ir::ClauseKind::Trait(t) => { - let id = t.def_id(); - let is_auto = db.trait_signature(id.0).flags.contains(TraitFlags::AUTO); - if is_auto { - Some(ExistentialPredicate::AutoTrait(t.def_id())) - } else { - Some(ExistentialPredicate::Trait(ExistentialTraitRef::new_from_args( - interner, - t.def_id(), - GenericArgs::new_from_iter( - interner, - t.trait_ref.args.iter().skip(1), - ), - ))) - } - } - rustc_type_ir::ClauseKind::Projection(p) => Some( - ExistentialPredicate::Projection(ExistentialProjection::new_from_args( - interner, - p.def_id(), - GenericArgs::new_from_iter( - interner, - p.projection_term.args.iter().skip(1), - ), - p.term, - )), - ), - rustc_type_ir::ClauseKind::TypeOutlives(_) => None, - rustc_type_ir::ClauseKind::RegionOutlives(_) - | rustc_type_ir::ClauseKind::ConstArgHasType(_, _) - | rustc_type_ir::ClauseKind::WellFormed(_) - | rustc_type_ir::ClauseKind::ConstEvaluatable(_) - | rustc_type_ir::ClauseKind::HostEffect(_) - | rustc_type_ir::ClauseKind::UnstableFeature(_) => unreachable!(), - }) - .transpose() - { - bounds.push(bound); - } - }); - } - - if !ctx.unsized_types.contains(&self_ty) - && let Some(sized_trait) = LangItem::Sized.resolve_trait(db, resolver.krate()) - { - let sized_clause = Binder::dummy(ExistentialPredicate::Trait(ExistentialTraitRef::new( - interner, - sized_trait.into(), - [] as [GenericArg<'_>; 0], - ))); - bounds.push(sized_clause); - } - - EarlyBinder::bind(BoundExistentialPredicates::new_from_iter(interner, bounds)) -} - -pub(crate) fn associated_type_by_name_including_super_traits<'db>( - db: &'db dyn HirDatabase, - trait_ref: TraitRef<'db>, - name: &Name, -) -> Option<(TraitRef<'db>, TypeAliasId)> { - let interner = DbInterner::new_with(db, None, None); - rustc_type_ir::elaborate::supertraits(interner, Binder::dummy(trait_ref)).find_map(|t| { - let trait_id = t.as_ref().skip_binder().def_id.0; - let assoc_type = trait_id.trait_items(db).associated_type_by_name(name)?; - Some((t.skip_binder(), assoc_type)) - }) -} - -pub fn associated_type_shorthand_candidates( - db: &dyn HirDatabase, - def: GenericDefId, - res: TypeNs, - mut cb: impl FnMut(&Name, TypeAliasId) -> bool, -) -> Option { - let interner = DbInterner::new_with(db, None, None); - named_associated_type_shorthand_candidates(interner, def, res, None, |name, _, id| { - cb(name, id).then_some(id) - }) -} - -#[tracing::instrument(skip(interner, check_alias))] -fn named_associated_type_shorthand_candidates<'db, R>( - interner: DbInterner<'db>, - // If the type parameter is defined in an impl and we're in a method, there - // might be additional where clauses to consider - def: GenericDefId, - res: TypeNs, - assoc_name: Option, - mut check_alias: impl FnMut(&Name, TraitRef<'db>, TypeAliasId) -> Option, -) -> Option { - let db = interner.db; - let mut search = |t: TraitRef<'db>| -> Option { - let mut checked_traits = FxHashSet::default(); - let mut check_trait = |trait_ref: TraitRef<'db>| { - let trait_id = trait_ref.def_id.0; - let name = &db.trait_signature(trait_id).name; - tracing::debug!(?trait_id, ?name); - if !checked_traits.insert(trait_id) { - return None; - } - let data = trait_id.trait_items(db); - - tracing::debug!(?data.items); - for (name, assoc_id) in &data.items { - if let &AssocItemId::TypeAliasId(alias) = assoc_id - && let Some(ty) = check_alias(name, trait_ref, alias) - { - return Some(ty); - } - } - None - }; - let mut stack: SmallVec<[_; 4]> = smallvec![t]; - while let Some(trait_ref) = stack.pop() { - if let Some(alias) = check_trait(trait_ref) { - return Some(alias); - } - for pred in generic_predicates_filtered_by( - db, - GenericDefId::TraitId(trait_ref.def_id.0), - PredicateFilter::SelfTrait, - // We are likely in the midst of lowering generic predicates of `def`. - // So, if we allow `pred == def` we might fall into an infinite recursion. - // Actually, we have already checked for the case `pred == def` above as we started - // with a stack including `trait_id` - |pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0), - ) - .0 - .deref() - { - tracing::debug!(?pred); - let sup_trait_ref = match pred.kind().skip_binder() { - rustc_type_ir::ClauseKind::Trait(pred) => pred.trait_ref, - _ => continue, - }; - let sup_trait_ref = - EarlyBinder::bind(sup_trait_ref).instantiate(interner, trait_ref.args); - stack.push(sup_trait_ref); - } - tracing::debug!(?stack); - } - - None - }; - - match res { - TypeNs::SelfType(impl_id) => { - let trait_ref = db.impl_trait(impl_id)?; - - // FIXME(next-solver): same method in `lower` checks for impl or not - // Is that needed here? - - // we're _in_ the impl -- the binders get added back later. Correct, - // but it would be nice to make this more explicit - search(trait_ref.skip_binder()) - } - TypeNs::GenericParam(param_id) => { - // Handle `Self::Type` referring to own associated type in trait definitions - // This *must* be done first to avoid cycles with - // `generic_predicates_for_param`, but not sure that it's sufficient, - if let GenericDefId::TraitId(trait_id) = param_id.parent() { - let trait_name = &db.trait_signature(trait_id).name; - tracing::debug!(?trait_name); - let trait_generics = generics(db, trait_id.into()); - tracing::debug!(?trait_generics); - if trait_generics[param_id.local_id()].is_trait_self() { - let args = GenericArgs::identity_for_item(interner, trait_id.into()); - let trait_ref = TraitRef::new_from_args(interner, trait_id.into(), args); - tracing::debug!(?args, ?trait_ref); - return search(trait_ref); - } - } - - let predicates = - db.generic_predicates_for_param_ns(def, param_id.into(), assoc_name.clone()); - predicates - .iter() - .find_map(|pred| match (*pred).kind().skip_binder() { - rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate), - _ => None, - }) - .and_then(|trait_predicate| { - let trait_ref = trait_predicate.trait_ref; - assert!( - !trait_ref.has_escaping_bound_vars(), - "FIXME unexpected higher-ranked trait bound" - ); - search(trait_ref) - }) - } - _ => None, - } -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs deleted file mode 100644 index a4ff47e3892a6..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower_nextsolver/path.rs +++ /dev/null @@ -1,1327 +0,0 @@ -//! A wrapper around [`TyLoweringContext`] specifically for lowering paths. - -use either::Either; -use hir_def::{ - GenericDefId, GenericParamId, Lookup, TraitId, TypeAliasId, - expr_store::{ - ExpressionStore, HygieneId, - path::{GenericArg, GenericArgs, GenericArgsParentheses, Path, PathSegment, PathSegments}, - }, - hir::generics::{ - GenericParamDataRef, TypeOrConstParamData, TypeParamData, TypeParamProvenance, - }, - resolver::{ResolveValueResult, TypeNs, ValueNs}, - signatures::TraitFlags, - type_ref::{TypeRef, TypeRefId}, -}; -use hir_expand::name::Name; -use rustc_type_ir::{ - AliasTerm, AliasTy, AliasTyKind, - inherent::{GenericArgs as _, Region as _, SliceLike, Ty as _}, -}; -use smallvec::SmallVec; -use stdx::never; - -use crate::{ - GenericArgsProhibitedReason, IncorrectGenericsLenKind, PathGenericsSource, - PathLoweringDiagnostic, TyDefId, ValueTyDefId, - consteval::{unknown_const, unknown_const_as_generic}, - db::HirDatabase, - generics::{Generics, generics}, - lower::PathDiagnosticCallbackData, - lower_nextsolver::{LifetimeElisionKind, named_associated_type_shorthand_candidates}, - next_solver::{ - Binder, Clause, Const, DbInterner, ErrorGuaranteed, Predicate, ProjectionPredicate, Region, - TraitRef, Ty, - mapping::{ChalkToNextSolver, convert_binder_to_early_binder}, - }, -}; - -use super::{ - ImplTraitLoweringMode, TyLoweringContext, associated_type_by_name_including_super_traits, - const_param_ty_query, ty_query, -}; - -type CallbackData<'a, 'db> = Either< - PathDiagnosticCallbackData, - crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>, ->; - -// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box` -// because of the allocation, so we create a lifetime-less callback, tailored for our needs. -pub(crate) struct PathDiagnosticCallback<'a, 'db> { - pub(crate) data: CallbackData<'a, 'db>, - pub(crate) callback: - fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic), -} - -pub(crate) struct PathLoweringContext<'a, 'b, 'db> { - ctx: &'a mut TyLoweringContext<'db, 'b>, - on_diagnostic: PathDiagnosticCallback<'a, 'db>, - path: &'a Path, - segments: PathSegments<'a>, - current_segment_idx: usize, - /// Contains the previous segment if `current_segment_idx == segments.len()` - current_or_prev_segment: PathSegment<'a>, -} - -impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { - #[inline] - pub(crate) fn new( - ctx: &'a mut TyLoweringContext<'db, 'b>, - on_diagnostic: PathDiagnosticCallback<'a, 'db>, - path: &'a Path, - ) -> Self { - let segments = path.segments(); - let first_segment = segments.first().unwrap_or(PathSegment::MISSING); - Self { - ctx, - on_diagnostic, - path, - segments, - current_segment_idx: 0, - current_or_prev_segment: first_segment, - } - } - - #[inline] - #[cold] - fn on_diagnostic(&mut self, diag: PathLoweringDiagnostic) { - (self.on_diagnostic.callback)(&self.on_diagnostic.data, self.ctx, diag); - } - - #[inline] - pub(crate) fn ty_ctx(&mut self) -> &mut TyLoweringContext<'db, 'b> { - self.ctx - } - - #[inline] - fn current_segment_u32(&self) -> u32 { - self.current_segment_idx as u32 - } - - #[inline] - fn skip_resolved_segment(&mut self) { - if !matches!(self.path, Path::LangItem(..)) { - // In lang items, the resolved "segment" is not one of the segments. Perhaps we should've put it - // point at -1, but I don't feel this is clearer. - self.current_segment_idx += 1; - } - self.update_current_segment(); - } - - #[inline] - fn update_current_segment(&mut self) { - self.current_or_prev_segment = - self.segments.get(self.current_segment_idx).unwrap_or(self.current_or_prev_segment); - } - - #[inline] - pub(crate) fn ignore_last_segment(&mut self) { - self.segments = self.segments.strip_last(); - } - - #[inline] - pub(crate) fn set_current_segment(&mut self, segment: usize) { - self.current_segment_idx = segment; - self.current_or_prev_segment = self - .segments - .get(segment) - .expect("invalid segment passed to PathLoweringContext::set_current_segment()"); - } - - #[inline] - fn with_lifetime_elision( - &mut self, - lifetime_elision: LifetimeElisionKind<'db>, - f: impl FnOnce(&mut PathLoweringContext<'_, '_, 'db>) -> T, - ) -> T { - let old_lifetime_elision = - std::mem::replace(&mut self.ctx.lifetime_elision, lifetime_elision); - let result = f(self); - self.ctx.lifetime_elision = old_lifetime_elision; - result - } - - pub(crate) fn lower_ty_relative_path( - &mut self, - ty: Ty<'db>, - // We need the original resolution to lower `Self::AssocTy` correctly - res: Option, - infer_args: bool, - ) -> (Ty<'db>, Option) { - let remaining_segments = self.segments.len() - self.current_segment_idx; - match remaining_segments { - 0 => (ty, res), - 1 => { - // resolve unselected assoc types - (self.select_associated_type(res, infer_args), None) - } - _ => { - // FIXME report error (ambiguous associated type) - (Ty::new_error(self.ctx.interner, ErrorGuaranteed), None) - } - } - } - - // When calling this, the current segment is the resolved segment (we don't advance it yet). - pub(crate) fn lower_partly_resolved_path( - &mut self, - resolution: TypeNs, - infer_args: bool, - ) -> (Ty<'db>, Option) { - let remaining_segments = self.segments.skip(self.current_segment_idx + 1); - tracing::debug!(?remaining_segments); - let rem_seg_len = remaining_segments.len(); - tracing::debug!(?rem_seg_len); - - let ty = match resolution { - TypeNs::TraitId(trait_) => { - let ty = match remaining_segments.len() { - 1 => { - let trait_ref = self.lower_trait_ref_from_resolved_path( - trait_, - Ty::new_error(self.ctx.interner, ErrorGuaranteed), - false, - ); - tracing::debug!(?trait_ref); - self.skip_resolved_segment(); - let segment = self.current_or_prev_segment; - let trait_id = trait_ref.def_id.0; - let found = - trait_id.trait_items(self.ctx.db).associated_type_by_name(segment.name); - - tracing::debug!(?found); - match found { - Some(associated_ty) => { - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`trait_ref.substitution`). - let substitution = self.substs_from_path_segment( - associated_ty.into(), - false, - None, - true, - ); - let args = crate::next_solver::GenericArgs::new_from_iter( - self.ctx.interner, - trait_ref - .args - .iter() - .chain(substitution.iter().skip(trait_ref.args.len())), - ); - Ty::new_alias( - self.ctx.interner, - AliasTyKind::Projection, - AliasTy::new_from_args( - self.ctx.interner, - associated_ty.into(), - args, - ), - ) - } - None => { - // FIXME: report error (associated type not found) - Ty::new_error(self.ctx.interner, ErrorGuaranteed) - } - } - } - 0 => { - // Trait object type without dyn; this should be handled in upstream. See - // `lower_path()`. - stdx::never!("unexpected fully resolved trait path"); - Ty::new_error(self.ctx.interner, ErrorGuaranteed) - } - _ => { - // FIXME report error (ambiguous associated type) - Ty::new_error(self.ctx.interner, ErrorGuaranteed) - } - }; - return (ty, None); - } - TypeNs::GenericParam(param_id) => { - let generics = self.ctx.generics(); - let idx = generics.type_or_const_param_idx(param_id.into()); - match idx { - None => { - never!("no matching generics"); - Ty::new_error(self.ctx.interner, ErrorGuaranteed) - } - Some(idx) => { - let (pidx, _param) = generics.iter().nth(idx).unwrap(); - assert_eq!(pidx, param_id.into()); - self.ctx.type_param(param_id, idx as u32) - } - } - } - TypeNs::SelfType(impl_id) => self.ctx.db.impl_self_ty(impl_id).skip_binder(), - TypeNs::AdtSelfType(adt) => { - let args = crate::next_solver::GenericArgs::identity_for_item( - self.ctx.interner, - adt.into(), - ); - Ty::new_adt(self.ctx.interner, adt, args) - } - - TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args), - TypeNs::BuiltinType(it) => self.lower_path_inner(it.into(), infer_args), - TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args), - // FIXME: report error - TypeNs::EnumVariantId(_) | TypeNs::ModuleId(_) => { - return (Ty::new_error(self.ctx.interner, ErrorGuaranteed), None); - } - }; - - tracing::debug!(?ty); - - self.skip_resolved_segment(); - self.lower_ty_relative_path(ty, Some(resolution), infer_args) - } - - /// This returns whether to keep the resolution (`true`) of throw it (`false`). - #[must_use] - fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) -> bool { - let mut prohibit_generics_on_resolved = |reason| { - if self.current_or_prev_segment.args_and_bindings.is_some() { - let segment = self.current_segment_u32(); - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment, - reason, - }); - } - }; - - match resolution { - TypeNs::SelfType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy) - } - TypeNs::GenericParam(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam) - } - TypeNs::AdtSelfType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy); - - if self.ctx.lowering_param_default.is_some() { - // Generic defaults are not allowed to refer to `Self`. - // FIXME: Emit an error. - return false; - } - } - TypeNs::BuiltinType(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy) - } - TypeNs::ModuleId(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::Module) - } - TypeNs::AdtId(_) - | TypeNs::EnumVariantId(_) - | TypeNs::TypeAliasId(_) - | TypeNs::TraitId(_) => {} - } - - true - } - - pub(crate) fn resolve_path_in_type_ns_fully(&mut self) -> Option { - let (res, unresolved) = self.resolve_path_in_type_ns()?; - if unresolved.is_some() { - return None; - } - Some(res) - } - - #[tracing::instrument(skip(self), ret)] - pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option)> { - let (resolution, remaining_index, _, prefix_info) = - self.ctx.resolver.resolve_path_in_type_ns_with_prefix_info(self.ctx.db, self.path)?; - - let segments = self.segments; - if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { - // `segments.is_empty()` can occur with `self`. - return Some((resolution, remaining_index)); - } - - let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index { - None if prefix_info.enum_variant => { - (segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2)) - } - None => (segments.strip_last(), segments.len() - 1, None), - Some(i) => (segments.take(i - 1), i - 1, None), - }; - - self.current_segment_idx = resolved_segment_idx; - self.current_or_prev_segment = - segments.get(resolved_segment_idx).expect("should have resolved segment"); - - for (i, mod_segment) in module_segments.iter().enumerate() { - if mod_segment.args_and_bindings.is_some() { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: i as u32, - reason: GenericArgsProhibitedReason::Module, - }); - } - } - - if let Some(enum_segment) = enum_segment - && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }); - } - - if !self.handle_type_ns_resolution(&resolution) { - return None; - } - - Some((resolution, remaining_index)) - } - - pub(crate) fn resolve_path_in_value_ns( - &mut self, - hygiene_id: HygieneId, - ) -> Option { - let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info( - self.ctx.db, - self.path, - hygiene_id, - )?; - - let segments = self.segments; - if segments.is_empty() || matches!(self.path, Path::LangItem(..)) { - // `segments.is_empty()` can occur with `self`. - return Some(res); - } - - let (mod_segments, enum_segment, resolved_segment_idx) = match res { - ResolveValueResult::Partial(_, unresolved_segment, _) => { - (segments.take(unresolved_segment - 1), None, unresolved_segment - 1) - } - ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _) - if prefix_info.enum_variant => - { - (segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1) - } - ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1), - }; - - self.current_segment_idx = resolved_segment_idx; - self.current_or_prev_segment = - segments.get(resolved_segment_idx).expect("should have resolved segment"); - - for (i, mod_segment) in mod_segments.iter().enumerate() { - if mod_segment.args_and_bindings.is_some() { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: i as u32, - reason: GenericArgsProhibitedReason::Module, - }); - } - } - - if let Some(enum_segment) = enum_segment - && segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some()) - && segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some()) - { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: (enum_segment + 1) as u32, - reason: GenericArgsProhibitedReason::EnumVariant, - }); - } - - match &res { - ResolveValueResult::ValueNs(resolution, _) => { - let resolved_segment_idx = self.current_segment_u32(); - let resolved_segment = self.current_or_prev_segment; - - let mut prohibit_generics_on_resolved = |reason| { - if resolved_segment.args_and_bindings.is_some() { - self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited { - segment: resolved_segment_idx, - reason, - }); - } - }; - - match resolution { - ValueNs::ImplSelf(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy); - } - // FIXME: rustc generates E0107 (incorrect number of generic arguments) and not - // E0109 (generic arguments provided for a type that doesn't accept them) for - // consts and statics, presumably as a defense against future in which consts - // and statics can be generic, or just because it was easier for rustc implementors. - // That means we'll show the wrong error code. Because of us it's easier to do it - // this way :) - ValueNs::GenericParam(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const) - } - ValueNs::StaticId(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static) - } - ValueNs::LocalBinding(_) => { - prohibit_generics_on_resolved(GenericArgsProhibitedReason::LocalVariable) - } - ValueNs::FunctionId(_) - | ValueNs::StructId(_) - | ValueNs::EnumVariantId(_) - | ValueNs::ConstId(_) => {} - } - } - ResolveValueResult::Partial(resolution, _, _) => { - if !self.handle_type_ns_resolution(resolution) { - return None; - } - } - }; - Some(res) - } - - #[tracing::instrument(skip(self), ret)] - fn select_associated_type(&mut self, res: Option, infer_args: bool) -> Ty<'db> { - let interner = self.ctx.interner; - let Some(res) = res else { - return Ty::new_error(self.ctx.interner, ErrorGuaranteed); - }; - let def = self.ctx.def; - let segment = self.current_or_prev_segment; - let assoc_name = segment.name; - let check_alias = |name: &Name, t: TraitRef<'db>, associated_ty: TypeAliasId| { - if name != assoc_name { - return None; - } - - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`t.substitution`). - let substs = - self.substs_from_path_segment(associated_ty.into(), infer_args, None, true); - - let substs = crate::next_solver::GenericArgs::new_from_iter( - interner, - t.args.iter().chain(substs.iter().skip(t.args.len())), - ); - - Some(Ty::new_alias( - interner, - AliasTyKind::Projection, - AliasTy::new(interner, associated_ty.into(), substs), - )) - }; - named_associated_type_shorthand_candidates( - interner, - def, - res, - Some(assoc_name.clone()), - check_alias, - ) - .unwrap_or_else(|| Ty::new_error(interner, ErrorGuaranteed)) - } - - fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty<'db> { - let generic_def = match typeable { - TyDefId::BuiltinType(builtinty) => { - return Ty::from_builtin_type(self.ctx.interner, builtinty); - } - TyDefId::AdtId(it) => it.into(), - TyDefId::TypeAliasId(it) => it.into(), - }; - let args = self.substs_from_path_segment(generic_def, infer_args, None, false); - let ty = ty_query(self.ctx.db, typeable); - ty.instantiate(self.ctx.interner, args) - } - - /// Collect generic arguments from a path into a `Substs`. See also - /// `create_substs_for_ast_path` and `def_to_ty` in rustc. - pub(crate) fn substs_from_path( - &mut self, - // Note that we don't call `db.value_type(resolved)` here, - // `ValueTyDefId` is just a convenient way to pass generics and - // special-case enum variants - resolved: ValueTyDefId, - infer_args: bool, - lowering_assoc_type_generics: bool, - ) -> crate::next_solver::GenericArgs<'db> { - let interner = self.ctx.interner; - let prev_current_segment_idx = self.current_segment_idx; - let prev_current_segment = self.current_or_prev_segment; - - let generic_def = match resolved { - ValueTyDefId::FunctionId(it) => it.into(), - ValueTyDefId::StructId(it) => it.into(), - ValueTyDefId::UnionId(it) => it.into(), - ValueTyDefId::ConstId(it) => it.into(), - ValueTyDefId::StaticId(_) => { - return crate::next_solver::GenericArgs::new_from_iter(interner, []); - } - ValueTyDefId::EnumVariantId(var) => { - // the generic args for an enum variant may be either specified - // on the segment referring to the enum, or on the segment - // referring to the variant. So `Option::::None` and - // `Option::None::` are both allowed (though the former is - // FIXME: This isn't strictly correct, enum variants may be used not through the enum - // (via `use Enum::Variant`). The resolver returns whether they were, but we don't have its result - // available here. The worst that can happen is that we will show some confusing diagnostics to the user, - // if generics exist on the module and they don't match with the variant. - // preferred). See also `def_ids_for_path_segments` in rustc. - // - // `wrapping_sub(1)` will return a number which `get` will return None for if current_segment_idx<2. - // This simplifies the code a bit. - let penultimate_idx = self.current_segment_idx.wrapping_sub(1); - let penultimate = self.segments.get(penultimate_idx); - if let Some(penultimate) = penultimate - && self.current_or_prev_segment.args_and_bindings.is_none() - && penultimate.args_and_bindings.is_some() - { - self.current_segment_idx = penultimate_idx; - self.current_or_prev_segment = penultimate; - } - var.lookup(self.ctx.db).parent.into() - } - }; - let result = self.substs_from_path_segment( - generic_def, - infer_args, - None, - lowering_assoc_type_generics, - ); - self.current_segment_idx = prev_current_segment_idx; - self.current_or_prev_segment = prev_current_segment; - result - } - - pub(crate) fn substs_from_path_segment( - &mut self, - def: GenericDefId, - infer_args: bool, - explicit_self_ty: Option>, - lowering_assoc_type_generics: bool, - ) -> crate::next_solver::GenericArgs<'db> { - let old_lifetime_elision = self.ctx.lifetime_elision.clone(); - - if let Some(args) = self.current_or_prev_segment.args_and_bindings - && args.parenthesized != GenericArgsParentheses::No - { - let prohibit_parens = match def { - GenericDefId::TraitId(trait_) => { - // RTN is prohibited anyways if we got here. - let is_rtn = args.parenthesized == GenericArgsParentheses::ReturnTypeNotation; - let is_fn_trait = self - .ctx - .db - .trait_signature(trait_) - .flags - .contains(TraitFlags::RUSTC_PAREN_SUGAR); - is_rtn || !is_fn_trait - } - _ => true, - }; - - if prohibit_parens { - let segment = self.current_segment_u32(); - self.on_diagnostic( - PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment }, - ); - - return unknown_subst(self.ctx.interner, def); - } - - // `Fn()`-style generics are treated like functions for the purpose of lifetime elision. - self.ctx.lifetime_elision = - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false }; - } - - let result = self.substs_from_args_and_bindings( - self.current_or_prev_segment.args_and_bindings, - def, - infer_args, - explicit_self_ty, - PathGenericsSource::Segment(self.current_segment_u32()), - lowering_assoc_type_generics, - self.ctx.lifetime_elision.clone(), - ); - self.ctx.lifetime_elision = old_lifetime_elision; - result - } - - pub(super) fn substs_from_args_and_bindings( - &mut self, - args_and_bindings: Option<&GenericArgs>, - def: GenericDefId, - infer_args: bool, - explicit_self_ty: Option>, - generics_source: PathGenericsSource, - lowering_assoc_type_generics: bool, - lifetime_elision: LifetimeElisionKind<'db>, - ) -> crate::next_solver::GenericArgs<'db> { - struct LowererCtx<'a, 'b, 'c, 'db> { - ctx: &'a mut PathLoweringContext<'b, 'c, 'db>, - generics_source: PathGenericsSource, - } - - impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, '_, 'db> { - fn report_len_mismatch( - &mut self, - def: GenericDefId, - provided_count: u32, - expected_count: u32, - kind: IncorrectGenericsLenKind, - ) { - self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsLen { - generics_source: self.generics_source, - provided_count, - expected_count, - kind, - def, - }); - } - - fn report_arg_mismatch( - &mut self, - param_id: GenericParamId, - arg_idx: u32, - has_self_arg: bool, - ) { - self.ctx.on_diagnostic(PathLoweringDiagnostic::IncorrectGenericsOrder { - generics_source: self.generics_source, - param_id, - arg_idx, - has_self_arg, - }); - } - - fn provided_kind( - &mut self, - param_id: GenericParamId, - param: GenericParamDataRef<'_>, - arg: &GenericArg, - ) -> crate::next_solver::GenericArg<'db> { - match (param, *arg) { - (GenericParamDataRef::LifetimeParamData(_), GenericArg::Lifetime(lifetime)) => { - self.ctx.ctx.lower_lifetime(lifetime).into() - } - (GenericParamDataRef::TypeParamData(_), GenericArg::Type(type_ref)) => { - self.ctx.ctx.lower_ty(type_ref).into() - } - (GenericParamDataRef::ConstParamData(_), GenericArg::Const(konst)) => { - let GenericParamId::ConstParamId(const_id) = param_id else { - unreachable!("non-const param ID for const param"); - }; - self.ctx - .ctx - .lower_const(konst, const_param_ty_query(self.ctx.ctx.db, const_id)) - .into() - } - _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"), - } - } - - fn provided_type_like_const( - &mut self, - const_ty: Ty<'db>, - arg: TypeLikeConst<'_>, - ) -> crate::next_solver::Const<'db> { - match arg { - TypeLikeConst::Path(path) => self.ctx.ctx.lower_path_as_const(path, const_ty), - TypeLikeConst::Infer => unknown_const(const_ty), - } - } - - fn inferred_kind( - &mut self, - def: GenericDefId, - param_id: GenericParamId, - param: GenericParamDataRef<'_>, - infer_args: bool, - preceding_args: &[crate::next_solver::GenericArg<'db>], - ) -> crate::next_solver::GenericArg<'db> { - let default = || { - self.ctx.ctx.db.generic_defaults(def).get(preceding_args.len()).map(|default| { - convert_binder_to_early_binder( - self.ctx.ctx.interner, - def, - default.to_nextsolver(self.ctx.ctx.interner), - ) - .instantiate(self.ctx.ctx.interner, preceding_args) - }) - }; - match param { - GenericParamDataRef::LifetimeParamData(_) => { - Region::new(self.ctx.ctx.interner, rustc_type_ir::ReError(ErrorGuaranteed)) - .into() - } - GenericParamDataRef::TypeParamData(param) => { - if !infer_args - && param.default.is_some() - && let Some(default) = default() - { - return default; - } - Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into() - } - GenericParamDataRef::ConstParamData(param) => { - if !infer_args - && param.default.is_some() - && let Some(default) = default() - { - return default; - } - let GenericParamId::ConstParamId(const_id) = param_id else { - unreachable!("non-const param ID for const param"); - }; - unknown_const_as_generic(const_param_ty_query(self.ctx.ctx.db, const_id)) - } - } - } - - fn parent_arg( - &mut self, - param_id: GenericParamId, - ) -> crate::next_solver::GenericArg<'db> { - match param_id { - GenericParamId::TypeParamId(_) => { - Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into() - } - GenericParamId::ConstParamId(const_id) => { - unknown_const_as_generic(const_param_ty_query(self.ctx.ctx.db, const_id)) - } - GenericParamId::LifetimeParamId(_) => { - Region::new(self.ctx.ctx.interner, rustc_type_ir::ReError(ErrorGuaranteed)) - .into() - } - } - } - - fn report_elided_lifetimes_in_path( - &mut self, - def: GenericDefId, - expected_count: u32, - hard_error: bool, - ) { - self.ctx.on_diagnostic(PathLoweringDiagnostic::ElidedLifetimesInPath { - generics_source: self.generics_source, - def, - expected_count, - hard_error, - }); - } - - fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32) { - self.ctx.on_diagnostic(PathLoweringDiagnostic::ElisionFailure { - generics_source: self.generics_source, - def, - expected_count, - }); - } - - fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32) { - self.ctx.on_diagnostic(PathLoweringDiagnostic::MissingLifetime { - generics_source: self.generics_source, - def, - expected_count, - }); - } - } - - substs_from_args_and_bindings( - self.ctx.db, - self.ctx.store, - args_and_bindings, - def, - infer_args, - lifetime_elision, - lowering_assoc_type_generics, - explicit_self_ty, - &mut LowererCtx { ctx: self, generics_source }, - ) - } - - pub(crate) fn lower_trait_ref_from_resolved_path( - &mut self, - resolved: TraitId, - explicit_self_ty: Ty<'db>, - infer_args: bool, - ) -> TraitRef<'db> { - let args = self.trait_ref_substs_from_path(resolved, explicit_self_ty, infer_args); - TraitRef::new_from_args(self.ctx.interner, resolved.into(), args) - } - - fn trait_ref_substs_from_path( - &mut self, - resolved: TraitId, - explicit_self_ty: Ty<'db>, - infer_args: bool, - ) -> crate::next_solver::GenericArgs<'db> { - self.substs_from_path_segment(resolved.into(), infer_args, Some(explicit_self_ty), false) - } - - pub(super) fn assoc_type_bindings_from_type_bound<'c>( - mut self, - trait_ref: TraitRef<'db>, - ) -> Option> + use<'a, 'b, 'c, 'db>> { - let interner = self.ctx.interner; - self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| { - args_and_bindings.bindings.iter().enumerate().flat_map(move |(binding_idx, binding)| { - let found = associated_type_by_name_including_super_traits( - self.ctx.db, - trait_ref, - &binding.name, - ); - let (super_trait_ref, associated_ty) = match found { - None => return SmallVec::new(), - Some(t) => t, - }; - let args = - self.with_lifetime_elision(LifetimeElisionKind::AnonymousReportError, |this| { - // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent - // generic params. It's inefficient to splice the `Substitution`s, so we may want - // that method to optionally take parent `Substitution` as we already know them at - // this point (`super_trait_ref.substitution`). - this.substs_from_args_and_bindings( - binding.args.as_ref(), - associated_ty.into(), - false, // this is not relevant - Some(super_trait_ref.self_ty()), - PathGenericsSource::AssocType { - segment: this.current_segment_u32(), - assoc_type: binding_idx as u32, - }, - false, - this.ctx.lifetime_elision.clone(), - ) - }); - let args = crate::next_solver::GenericArgs::new_from_iter( - interner, - super_trait_ref.args.iter().chain(args.iter().skip(super_trait_ref.args.len())), - ); - let projection_term = - AliasTerm::new_from_args(interner, associated_ty.into(), args); - let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity( - binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), - ); - if let Some(type_ref) = binding.type_ref { - let lifetime_elision = - if args_and_bindings.parenthesized == GenericArgsParentheses::ParenSugar { - // `Fn()`-style generics are elided like functions. This is `Output` (we lower to it in hir-def). - LifetimeElisionKind::for_fn_ret(self.ctx.interner) - } else { - self.ctx.lifetime_elision.clone() - }; - self.with_lifetime_elision(lifetime_elision, |this| { - match (&this.ctx.store[type_ref], this.ctx.impl_trait_mode.mode) { - (TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (), - ( - _, - ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque, - ) => { - let ty = this.ctx.lower_ty(type_ref); - let pred = Clause(Predicate::new( - interner, - Binder::dummy(rustc_type_ir::PredicateKind::Clause( - rustc_type_ir::ClauseKind::Projection( - ProjectionPredicate { - projection_term, - term: ty.into(), - }, - ), - )), - )); - predicates.push(pred); - } - } - }) - } - for bound in binding.bounds.iter() { - predicates.extend(self.ctx.lower_type_bound( - bound, - Ty::new_alias( - self.ctx.interner, - AliasTyKind::Projection, - AliasTy::new_from_args(self.ctx.interner, associated_ty.into(), args), - ), - false, - )); - } - predicates - }) - }) - } -} - -/// A const that were parsed like a type. -pub(crate) enum TypeLikeConst<'a> { - Infer, - Path(&'a Path), -} - -pub(crate) trait GenericArgsLowerer<'db> { - fn report_elided_lifetimes_in_path( - &mut self, - def: GenericDefId, - expected_count: u32, - hard_error: bool, - ); - - fn report_elision_failure(&mut self, def: GenericDefId, expected_count: u32); - - fn report_missing_lifetime(&mut self, def: GenericDefId, expected_count: u32); - - fn report_len_mismatch( - &mut self, - def: GenericDefId, - provided_count: u32, - expected_count: u32, - kind: IncorrectGenericsLenKind, - ); - - fn report_arg_mismatch(&mut self, param_id: GenericParamId, arg_idx: u32, has_self_arg: bool); - - fn provided_kind( - &mut self, - param_id: GenericParamId, - param: GenericParamDataRef<'_>, - arg: &GenericArg, - ) -> crate::next_solver::GenericArg<'db>; - - fn provided_type_like_const(&mut self, const_ty: Ty<'db>, arg: TypeLikeConst<'_>) - -> Const<'db>; - - fn inferred_kind( - &mut self, - def: GenericDefId, - param_id: GenericParamId, - param: GenericParamDataRef<'_>, - infer_args: bool, - preceding_args: &[crate::next_solver::GenericArg<'db>], - ) -> crate::next_solver::GenericArg<'db>; - - fn parent_arg(&mut self, param_id: GenericParamId) -> crate::next_solver::GenericArg<'db>; -} - -/// Returns true if there was an error. -fn check_generic_args_len<'db>( - args_and_bindings: Option<&GenericArgs>, - def: GenericDefId, - def_generics: &Generics, - infer_args: bool, - lifetime_elision: &LifetimeElisionKind<'db>, - lowering_assoc_type_generics: bool, - ctx: &mut impl GenericArgsLowerer<'db>, -) -> bool { - let mut had_error = false; - - let (mut provided_lifetimes_count, mut provided_types_and_consts_count) = (0usize, 0usize); - if let Some(args_and_bindings) = args_and_bindings { - let args_no_self = &args_and_bindings.args[usize::from(args_and_bindings.has_self_type)..]; - for arg in args_no_self { - match arg { - GenericArg::Lifetime(_) => provided_lifetimes_count += 1, - GenericArg::Type(_) | GenericArg::Const(_) => provided_types_and_consts_count += 1, - } - } - } - - let lifetime_args_len = def_generics.len_lifetimes_self(); - if provided_lifetimes_count == 0 - && lifetime_args_len > 0 - && (!lowering_assoc_type_generics || infer_args) - { - // In generic associated types, we never allow inferring the lifetimes, but only in type context, that is - // when `infer_args == false`. In expression/pattern context we always allow inferring them, even for GATs. - match lifetime_elision { - &LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => { - ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path); - had_error |= report_in_path; - } - LifetimeElisionKind::AnonymousReportError => { - ctx.report_missing_lifetime(def, lifetime_args_len as u32); - had_error = true - } - LifetimeElisionKind::ElisionFailure => { - ctx.report_elision_failure(def, lifetime_args_len as u32); - had_error = true; - } - LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { - // FIXME: Check there are other lifetimes in scope, and error/lint. - } - LifetimeElisionKind::Elided(_) => { - ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, false); - } - LifetimeElisionKind::Infer => { - // Allow eliding lifetimes. - } - } - } else if lifetime_args_len != provided_lifetimes_count { - ctx.report_len_mismatch( - def, - provided_lifetimes_count as u32, - lifetime_args_len as u32, - IncorrectGenericsLenKind::Lifetimes, - ); - had_error = true; - } - - let defaults_count = - def_generics.iter_self_type_or_consts().filter(|(_, param)| param.has_default()).count(); - let named_type_and_const_params_count = def_generics - .iter_self_type_or_consts() - .filter(|(_, param)| match param { - TypeOrConstParamData::TypeParamData(param) => { - param.provenance == TypeParamProvenance::TypeParamList - } - TypeOrConstParamData::ConstParamData(_) => true, - }) - .count(); - let expected_max = named_type_and_const_params_count; - let expected_min = - if infer_args { 0 } else { named_type_and_const_params_count - defaults_count }; - if provided_types_and_consts_count < expected_min - || expected_max < provided_types_and_consts_count - { - ctx.report_len_mismatch( - def, - provided_types_and_consts_count as u32, - named_type_and_const_params_count as u32, - IncorrectGenericsLenKind::TypesAndConsts, - ); - had_error = true; - } - - had_error -} - -pub(crate) fn substs_from_args_and_bindings<'db>( - db: &'db dyn HirDatabase, - store: &ExpressionStore, - args_and_bindings: Option<&GenericArgs>, - def: GenericDefId, - mut infer_args: bool, - lifetime_elision: LifetimeElisionKind<'db>, - lowering_assoc_type_generics: bool, - explicit_self_ty: Option>, - ctx: &mut impl GenericArgsLowerer<'db>, -) -> crate::next_solver::GenericArgs<'db> { - let interner = DbInterner::new_with(db, None, None); - - tracing::debug!(?args_and_bindings); - - // Order is - // - Parent parameters - // - Optional Self parameter - // - Lifetime parameters - // - Type or Const parameters - let def_generics = generics(db, def); - let args_slice = args_and_bindings.map(|it| &*it.args).unwrap_or_default(); - - // We do not allow inference if there are specified args, i.e. we do not allow partial inference. - let has_non_lifetime_args = - args_slice.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))); - infer_args &= !has_non_lifetime_args; - - let had_count_error = check_generic_args_len( - args_and_bindings, - def, - &def_generics, - infer_args, - &lifetime_elision, - lowering_assoc_type_generics, - ctx, - ); - - let mut substs = Vec::with_capacity(def_generics.len()); - - substs.extend(def_generics.iter_parent_id().map(|id| ctx.parent_arg(id))); - - let mut args = args_slice.iter().enumerate().peekable(); - let mut params = def_generics.iter_self().peekable(); - - // If we encounter a type or const when we expect a lifetime, we infer the lifetimes. - // If we later encounter a lifetime, we know that the arguments were provided in the - // wrong order. `force_infer_lt` records the type or const that forced lifetimes to be - // inferred, so we can use it for diagnostics later. - let mut force_infer_lt = None; - - let has_self_arg = args_and_bindings.is_some_and(|it| it.has_self_type); - // First, handle `Self` parameter. Consume it from the args if provided, otherwise from `explicit_self_ty`, - // and lastly infer it. - if let Some(&( - self_param_id, - self_param @ GenericParamDataRef::TypeParamData(TypeParamData { - provenance: TypeParamProvenance::TraitSelf, - .. - }), - )) = params.peek() - { - let self_ty = if has_self_arg { - let (_, self_ty) = args.next().expect("has_self_type=true, should have Self type"); - ctx.provided_kind(self_param_id, self_param, self_ty) - } else { - explicit_self_ty.map(|it| it.into()).unwrap_or_else(|| { - ctx.inferred_kind(def, self_param_id, self_param, infer_args, &substs) - }) - }; - params.next(); - substs.push(self_ty); - } - - loop { - // We're going to iterate through the generic arguments that the user - // provided, matching them with the generic parameters we expect. - // Mismatches can occur as a result of elided lifetimes, or for malformed - // input. We try to handle both sensibly. - match (args.peek(), params.peek()) { - (Some(&(arg_idx, arg)), Some(&(param_id, param))) => match (arg, param) { - (GenericArg::Type(_), GenericParamDataRef::TypeParamData(type_param)) - if type_param.provenance == TypeParamProvenance::ArgumentImplTrait => - { - // Do not allow specifying `impl Trait` explicitly. We already err at that, but if we won't handle it here - // we will handle it as if it was specified, instead of inferring it. - substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs)); - params.next(); - } - (GenericArg::Lifetime(_), GenericParamDataRef::LifetimeParamData(_)) - | (GenericArg::Type(_), GenericParamDataRef::TypeParamData(_)) - | (GenericArg::Const(_), GenericParamDataRef::ConstParamData(_)) => { - substs.push(ctx.provided_kind(param_id, param, arg)); - args.next(); - params.next(); - } - ( - GenericArg::Type(_) | GenericArg::Const(_), - GenericParamDataRef::LifetimeParamData(_), - ) => { - // We expected a lifetime argument, but got a type or const - // argument. That means we're inferring the lifetime. - substs.push(ctx.inferred_kind(def, param_id, param, infer_args, &substs)); - params.next(); - force_infer_lt = Some((arg_idx as u32, param_id)); - } - (GenericArg::Type(type_ref), GenericParamDataRef::ConstParamData(_)) => { - if let Some(konst) = type_looks_like_const(store, *type_ref) { - let GenericParamId::ConstParamId(param_id) = param_id else { - panic!("unmatching param kinds"); - }; - let const_ty = const_param_ty_query(db, param_id); - substs.push(ctx.provided_type_like_const(const_ty, konst).into()); - args.next(); - params.next(); - } else { - // See the `_ => { ... }` branch. - if !had_count_error { - ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg); - } - while args.next().is_some() {} - } - } - _ => { - // We expected one kind of parameter, but the user provided - // another. This is an error. However, if we already know that - // the arguments don't match up with the parameters, we won't issue - // an additional error, as the user already knows what's wrong. - if !had_count_error { - ctx.report_arg_mismatch(param_id, arg_idx as u32, has_self_arg); - } - - // We've reported the error, but we want to make sure that this - // problem doesn't bubble down and create additional, irrelevant - // errors. In this case, we're simply going to ignore the argument - // and any following arguments. The rest of the parameters will be - // inferred. - while args.next().is_some() {} - } - }, - - (Some(&(_, arg)), None) => { - // We should never be able to reach this point with well-formed input. - // There are two situations in which we can encounter this issue. - // - // 1. The number of arguments is incorrect. In this case, an error - // will already have been emitted, and we can ignore it. - // 2. We've inferred some lifetimes, which have been provided later (i.e. - // after a type or const). We want to throw an error in this case. - if !had_count_error { - assert!( - matches!(arg, GenericArg::Lifetime(_)), - "the only possible situation here is incorrect lifetime order" - ); - let (provided_arg_idx, param_id) = - force_infer_lt.expect("lifetimes ought to have been inferred"); - ctx.report_arg_mismatch(param_id, provided_arg_idx, has_self_arg); - } - - break; - } - - (None, Some(&(param_id, param))) => { - // If there are fewer arguments than parameters, it means we're inferring the remaining arguments. - let param = if let GenericParamId::LifetimeParamId(_) = param_id { - match &lifetime_elision { - LifetimeElisionKind::ElisionFailure - | LifetimeElisionKind::AnonymousCreateParameter { report_in_path: true } - | LifetimeElisionKind::AnonymousReportError => { - assert!(had_count_error); - ctx.inferred_kind(def, param_id, param, infer_args, &substs) - } - LifetimeElisionKind::StaticIfNoLifetimeInScope { only_lint: _ } => { - Region::new_static(interner).into() - } - LifetimeElisionKind::Elided(lifetime) => (*lifetime).into(), - LifetimeElisionKind::AnonymousCreateParameter { report_in_path: false } - | LifetimeElisionKind::Infer => { - // FIXME: With `AnonymousCreateParameter`, we need to create a new lifetime parameter here - // (but this will probably be done in hir-def lowering instead). - ctx.inferred_kind(def, param_id, param, infer_args, &substs) - } - } - } else { - ctx.inferred_kind(def, param_id, param, infer_args, &substs) - }; - substs.push(param); - params.next(); - } - - (None, None) => break, - } - } - - crate::next_solver::GenericArgs::new_from_iter(interner, substs) -} - -fn type_looks_like_const( - store: &ExpressionStore, - type_ref: TypeRefId, -) -> Option> { - // A path/`_` const will be parsed as a type, instead of a const, because when parsing/lowering - // in hir-def we don't yet know the expected argument kind. rustc does this a bit differently, - // when lowering to HIR it resolves the path, and if it doesn't resolve to the type namespace - // it is lowered as a const. Our behavior could deviate from rustc when the value is resolvable - // in both the type and value namespaces, but I believe we only allow more code. - let type_ref = &store[type_ref]; - match type_ref { - TypeRef::Path(path) => Some(TypeLikeConst::Path(path)), - TypeRef::Placeholder => Some(TypeLikeConst::Infer), - _ => None, - } -} - -fn unknown_subst<'db>( - interner: DbInterner<'db>, - def: impl Into, -) -> crate::next_solver::GenericArgs<'db> { - let params = generics(interner.db(), def.into()); - crate::next_solver::GenericArgs::new_from_iter( - interner, - params.iter_id().map(|id| match id { - GenericParamId::TypeParamId(_) => Ty::new_error(interner, ErrorGuaranteed).into(), - GenericParamId::ConstParamId(id) => { - unknown_const_as_generic(const_param_ty_query(interner.db(), id)) - } - GenericParamId::LifetimeParamId(_) => { - crate::next_solver::Region::error(interner).into() - } - }), - ) -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs deleted file mode 100644 index 5125a38825cb8..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs +++ /dev/null @@ -1,169 +0,0 @@ -//! This module contains the implementations of the `ToChalk` trait, which -//! handles conversion between our data types and their corresponding types in -//! Chalk (in both directions); plus some helper functions for more specialized -//! conversions. - -use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId}; -use salsa::{ - Id, - plumbing::{AsId, FromId}, -}; - -use crate::{ - AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId, Interner, OpaqueTyId, - PlaceholderIndex, chalk_db, - db::{HirDatabase, InternedLifetimeParamId, InternedTypeOrConstParamId}, -}; - -pub trait ToChalk { - type Chalk; - fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk; - fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self; -} - -pub(crate) fn from_chalk(db: &dyn HirDatabase, chalk: ChalkT) -> T -where - T: ToChalk, -{ - T::from_chalk(db, chalk) -} - -impl ToChalk for hir_def::ImplId { - type Chalk = chalk_db::ImplId; - - fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId { - chalk_ir::ImplId(self.as_id()) - } - - fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId { - FromId::from_id(impl_id.0.as_id()) - } -} - -impl ToChalk for CallableDefId { - type Chalk = FnDefId; - - fn to_chalk(self, _db: &dyn HirDatabase) -> FnDefId { - chalk_ir::FnDefId(salsa::plumbing::AsId::as_id(&self)) - } - - fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId { - salsa::plumbing::FromIdWithDb::from_id(fn_def_id.0, db.zalsa()) - } -} - -impl From for crate::db::InternedOpaqueTyId { - fn from(id: OpaqueTyId) -> Self { - FromId::from_id(id.0) - } -} - -impl From for OpaqueTyId { - fn from(id: crate::db::InternedOpaqueTyId) -> Self { - chalk_ir::OpaqueTyId(id.as_id()) - } -} - -impl From> for crate::db::InternedClosureId { - fn from(id: chalk_ir::ClosureId) -> Self { - FromId::from_id(id.0) - } -} - -impl From for chalk_ir::ClosureId { - fn from(id: crate::db::InternedClosureId) -> Self { - chalk_ir::ClosureId(id.as_id()) - } -} - -impl From> for crate::db::InternedCoroutineId { - fn from(id: chalk_ir::CoroutineId) -> Self { - Self::from_id(id.0) - } -} - -impl From for chalk_ir::CoroutineId { - fn from(id: crate::db::InternedCoroutineId) -> Self { - chalk_ir::CoroutineId(id.as_id()) - } -} - -pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId { - chalk_ir::ForeignDefId(id.as_id()) -} - -pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId { - FromId::from_id(id.0) -} - -pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId { - chalk_ir::AssocTypeId(id.as_id()) -} - -pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId { - FromId::from_id(id.0) -} - -pub fn from_placeholder_idx( - db: &dyn HirDatabase, - idx: PlaceholderIndex, -) -> (TypeOrConstParamId, u32) { - assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT); - // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound. - let interned_id = - InternedTypeOrConstParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) }); - interned_id.loc(db) -} - -pub fn to_placeholder_idx( - db: &dyn HirDatabase, - id: TypeOrConstParamId, - idx: u32, -) -> PlaceholderIndex { - let interned_id = InternedTypeOrConstParamId::new(db, (id, idx)); - PlaceholderIndex { - ui: chalk_ir::UniverseIndex::ROOT, - idx: interned_id.as_id().index() as usize, - } -} - -pub fn to_placeholder_idx_no_index( - db: &dyn HirDatabase, - id: TypeOrConstParamId, -) -> PlaceholderIndex { - let index = crate::generics::generics(db, id.parent) - .type_or_const_param_idx(id) - .expect("param not found"); - to_placeholder_idx(db, id, index as u32) -} - -pub fn lt_from_placeholder_idx( - db: &dyn HirDatabase, - idx: PlaceholderIndex, -) -> (LifetimeParamId, u32) { - assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT); - // SAFETY: We cannot really encapsulate this unfortunately, so just hope this is sound. - let interned_id = - InternedLifetimeParamId::from_id(unsafe { Id::from_index(idx.idx.try_into().unwrap()) }); - interned_id.loc(db) -} - -pub fn lt_to_placeholder_idx( - db: &dyn HirDatabase, - id: LifetimeParamId, - idx: u32, -) -> PlaceholderIndex { - let interned_id = InternedLifetimeParamId::new(db, (id, idx)); - PlaceholderIndex { - ui: chalk_ir::UniverseIndex::ROOT, - idx: interned_id.as_id().index() as usize, - } -} - -pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId { - chalk_ir::TraitId(id.as_id()) -} - -pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId { - FromId::from_id(id.0) -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index bce17905037cc..cec63566338f1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -25,10 +25,8 @@ use smallvec::{SmallVec, smallvec}; use stdx::never; use triomphe::Arc; -use crate::next_solver::infer::InferCtxt; -use crate::next_solver::infer::select::ImplSource; use crate::{ - TraitEnvironment, TyBuilder, + TraitEnvironment, autoderef::{self, AutoderefKind}, db::HirDatabase, infer::{Adjust, Adjustment, OverloadedDeref, PointerCast, unify::InferenceTable}, @@ -37,7 +35,8 @@ use crate::{ Canonical, DbInterner, ErrorGuaranteed, GenericArgs, Goal, Predicate, Region, SolverDefId, TraitRef, Ty, TyKind, TypingMode, infer::{ - DbInternerInferExt, + DbInternerInferExt, InferCtxt, + select::ImplSource, traits::{Obligation, ObligationCause, PredicateObligation}, }, obligation_ctxt::ObligationCtxt, @@ -1597,9 +1596,9 @@ fn is_valid_impl_method_candidate<'db>( return IsValidCandidate::NotVisible; } let self_ty_matches = table.run_in_snapshot(|table| { - let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id) - .fill_with_inference_vars(table) - .build(table.interner()); + let impl_args = table.fresh_args_for_item(impl_id.into()); + let expected_self_ty = + db.impl_self_ty(impl_id).instantiate(table.interner(), impl_args); table.unify(expected_self_ty, self_ty) }); if !self_ty_matches { @@ -1727,7 +1726,7 @@ fn is_valid_impl_fn_candidate<'db>( // We need to consider the bounds on the impl to distinguish functions of the same name // for a type. - let predicates = db.generic_predicates_ns(impl_id.into()); + let predicates = db.generic_predicates(impl_id.into()); let Some(predicates) = predicates.instantiate(table.interner(), impl_subst) else { return IsValidCandidate::Yes; }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 936895fb7fd3b..7aebe17e5b4ee 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -196,7 +196,7 @@ impl ProjectionElem { }, ProjectionElem::Field(Either::Left(f)) => match base.kind() { TyKind::Adt(_, subst) => { - db.field_types_ns(f.parent)[f.local_id].instantiate(interner, subst) + db.field_types(f.parent)[f.local_id].instantiate(interner, subst) } ty => { never!("Only adt has field, found {:?}", ty); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 444336ca3f9b2..6e62bcbbddefd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -1696,7 +1696,7 @@ impl<'db> Evaluator<'db> { if let TyKind::Adt(adt_ef, subst) = kind && let AdtId::StructId(struct_id) = adt_ef.def_id().0 { - let field_types = self.db.field_types_ns(struct_id.into()); + let field_types = self.db.field_types(struct_id.into()); if let Some(ty) = field_types.iter().last().map(|it| it.1.instantiate(self.interner(), subst)) { @@ -1775,9 +1775,9 @@ impl<'db> Evaluator<'db> { else { not_supported!("unsizing struct without field"); }; - let target_last_field = self.db.field_types_ns(id.into())[last_field] + let target_last_field = self.db.field_types(id.into())[last_field] .instantiate(self.interner(), target_subst); - let current_last_field = self.db.field_types_ns(id.into())[last_field] + let current_last_field = self.db.field_types(id.into())[last_field] .instantiate(self.interner(), current_subst); return self.unsizing_ptr_from_addr( target_last_field, @@ -2268,7 +2268,7 @@ impl<'db> Evaluator<'db> { AdtId::StructId(s) => { let data = s.fields(this.db); let layout = this.layout(ty)?; - let field_types = this.db.field_types_ns(s.into()); + let field_types = this.db.field_types(s.into()); for (f, _) in data.fields().iter() { let offset = layout .fields @@ -2296,7 +2296,7 @@ impl<'db> Evaluator<'db> { e, ) { let data = v.fields(this.db); - let field_types = this.db.field_types_ns(v.into()); + let field_types = this.db.field_types(v.into()); for (f, _) in data.fields().iter() { let offset = l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize(); @@ -2373,7 +2373,7 @@ impl<'db> Evaluator<'db> { } TyKind::Adt(id, args) => match id.def_id().0 { AdtId::StructId(s) => { - for (i, (_, ty)) in self.db.field_types_ns(s.into()).iter().enumerate() { + for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); let ty = ty.instantiate(self.interner(), args); self.patch_addresses( @@ -2394,7 +2394,7 @@ impl<'db> Evaluator<'db> { self.read_memory(addr, layout.size.bytes_usize())?, e, ) { - for (i, (_, ty)) in self.db.field_types_ns(ev.into()).iter().enumerate() { + for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() { let offset = layout.fields.offset(i).bytes_usize(); let ty = ty.instantiate(self.interner(), args); self.patch_addresses( @@ -2895,7 +2895,7 @@ impl<'db> Evaluator<'db> { let variant_fields = s.fields(self.db); match variant_fields.shape { FieldsShape::Record | FieldsShape::Tuple => { - let field_types = self.db.field_types_ns(s.into()); + let field_types = self.db.field_types(s.into()); for (field, _) in variant_fields.fields().iter() { let offset = layout .fields diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index c45ae9dcc3d3e..4b1adecf8c87d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -1383,7 +1383,7 @@ impl<'db> Evaluator<'db> { AdtId::StructId(s) => s, _ => not_supported!("unsized enum or union"), }; - let field_types = self.db.field_types_ns(id.into()); + let field_types = self.db.field_types(id.into()); let last_field_ty = field_types.iter().next_back().unwrap().1.instantiate(self.interner(), subst); let sized_part_size = diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs index ade94b94c0ee5..4c64a70a7a624 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -34,7 +34,7 @@ impl<'db> Evaluator<'db> { let Some((first_field, _)) = fields.iter().next() else { not_supported!("simd type with no field"); }; - let field_ty = self.db.field_types_ns(id.into())[first_field] + let field_ty = self.db.field_types(id.into())[first_field] .instantiate(self.interner(), subst); return Ok((fields.len(), field_ty)); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs index f5b4fa1e2a004..1fb9a82ac9e0f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs @@ -32,7 +32,7 @@ pub use region::*; pub use solver::*; pub use ty::*; -pub use crate::lower_nextsolver::ImplTraitIdx; +pub use crate::lower::ImplTraitIdx; pub use rustc_ast_ir::Mutability; pub type Binder<'db, T> = rustc_type_ir::Binder, T>; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs index 8d81a382c362a..c28af948bfc83 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs @@ -14,8 +14,7 @@ use rustc_type_ir::{ use crate::{ MemoryMap, - interner::InternedWrapperNoDebug, - next_solver::{ClauseKind, ParamEnv}, + next_solver::{ClauseKind, ParamEnv, interner::InternedWrapperNoDebug}, }; use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, Ty}; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs index b2632ba63709d..24f22bcb0c3e2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs @@ -207,7 +207,7 @@ impl<'db> GenericArgs<'db> { where F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { - let defaults = interner.db.generic_defaults_ns(def_id); + let defaults = interner.db.generic_defaults(def_id); Self::for_item(interner, def_id.into(), |idx, id, prev| match defaults.get(idx as usize) { Some(default) => default.instantiate(interner, prev), None => fallback(idx, id, prev), @@ -240,7 +240,7 @@ impl<'db> GenericArgs<'db> { where F: FnMut(u32, GenericParamId, &[GenericArg<'db>]) -> GenericArg<'db>, { - let defaults = interner.db.generic_defaults_ns(def_id); + let defaults = interner.db.generic_defaults(def_id); Self::fill_rest(interner, def_id.into(), first, |idx, id, prev| { defaults .get(idx as usize) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index 331bcdcb26d38..42f1d926d7db3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -32,8 +32,8 @@ use crate::{ method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint}, next_solver::{ AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper, - CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, InternedWrapperNoDebug, - RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper, + CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, RegionAssumptions, + SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper, util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls}, }, }; @@ -53,6 +53,9 @@ use super::{ util::sizedness_constraint_for_ty, }; +#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] +pub struct InternedWrapperNoDebug(pub(crate) T); + #[macro_export] #[doc(hidden)] macro_rules! _interned_vec_nolifetime_salsa { @@ -611,7 +614,7 @@ impl<'db> inherent::AdtDef> for AdtDef { return None; }; let id: VariantId = struct_id.into(); - let field_types = interner.db().field_types_ns(id); + let field_types = interner.db().field_types(id); field_types.iter().last().map(|f| *f.1) } @@ -623,7 +626,7 @@ impl<'db> inherent::AdtDef> for AdtDef { let db = interner.db(); // FIXME: this is disabled just to match the behavior with chalk right now let _field_tys = |id: VariantId| { - db.field_types_ns(id).iter().map(|(_, ty)| ty.skip_binder()).collect::>() + db.field_types(id).iter().map(|(_, ty)| ty.skip_binder()).collect::>() }; let field_tys = |_id: VariantId| vec![]; let tys: Vec<_> = match self.inner().id { @@ -1284,7 +1287,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { self, def_id: Self::DefId, ) -> EarlyBinder> { - let predicates = self.db().generic_predicates_ns(def_id.try_into().unwrap()); + let predicates = self.db().generic_predicates(def_id.try_into().unwrap()); let predicates: Vec<_> = predicates.iter().cloned().collect(); EarlyBinder::bind(predicates.into_iter()) } @@ -1311,7 +1314,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { let predicates: Vec<(Clause<'db>, Span)> = self .db() - .generic_predicates_ns(def_id.0.into()) + .generic_predicates(def_id.0.into()) .iter() .filter(|p| match p.kind().skip_binder() { // rustc has the following assertion: @@ -1345,7 +1348,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { let predicates: Vec<(Clause<'db>, Span)> = self .db() - .generic_predicates_ns(def_id.try_into().unwrap()) + .generic_predicates(def_id.try_into().unwrap()) .iter() .filter(|p| match p.kind().skip_binder() { rustc_type_ir::ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()), @@ -1765,7 +1768,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> { return UnsizingParams(DenseBitSet::new_empty(num_params)); }; - let field_types = self.db().field_types_ns(variant.id()); + let field_types = self.db().field_types(variant.id()); let mut unsizing_params = DenseBitSet::new_empty(num_params); let ty = field_types[tail_field.0]; for arg in ty.instantiate_identity().walk() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs index 671f06f1b88a6..2b29561393ee7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs @@ -1,1736 +1,13 @@ //! Things useful for mapping to/from Chalk and next-trait-solver types. -use chalk_ir::{ - InferenceVar, Substitution, TyVariableKind, WellFormed, cast::Cast, fold::Shift, - interner::HasInterner, -}; -use hir_def::{CallableDefId, ConstParamId, GeneralConstId, TypeParamId, signatures::TraitFlags}; -use hir_def::{GenericDefId, GenericParamId}; -use rustc_type_ir::{ - AliasTerm, BoundVar, DebruijnIndex, ExistentialProjection, ExistentialTraitRef, - OutlivesPredicate, ProjectionPredicate, TypeFoldable, TypeSuperFoldable, TypeVisitable, - UniverseIndex, elaborate, - inherent::{BoundVarLike, IntoKind, SliceLike, Ty as _}, - shift_vars, - solve::Goal, -}; +use crate::next_solver::interner::DbInterner; -use crate::next_solver::BoundConst; -use crate::{ - ConstScalar, Interner, MemoryMap, - db::{InternedClosureId, InternedCoroutineId, InternedOpaqueTyId}, - from_assoc_type_id, from_chalk_trait_id, - mapping::ToChalk, - next_solver::{ - Binder, ClauseKind, ConstBytes, TraitPredicate, UnevaluatedConst, - interner::{AdtDef, BoundVarKind, BoundVarKinds, DbInterner}, - }, - to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id, -}; -use crate::{ - from_placeholder_idx, lt_from_placeholder_idx, lt_to_placeholder_idx, to_placeholder_idx, -}; - -use super::{ - BoundExistentialPredicates, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, Canonical, - CanonicalVars, Clause, Clauses, Const, EarlyParamRegion, ErrorGuaranteed, ExistentialPredicate, - GenericArg, GenericArgs, ParamConst, ParamEnv, ParamTy, Predicate, PredicateKind, Region, - SolverDefId, SubtypePredicate, Term, TraitRef, Ty, Tys, ValueConst, -}; - -// FIXME: This should urgently go (as soon as we finish the migration off Chalk, that is). -pub fn convert_binder_to_early_binder<'db, T: rustc_type_ir::TypeFoldable>>( - interner: DbInterner<'db>, - def: GenericDefId, - binder: rustc_type_ir::Binder, T>, -) -> rustc_type_ir::EarlyBinder, T> { - let mut folder = BinderToEarlyBinder { - interner, - debruijn: rustc_type_ir::DebruijnIndex::ZERO, - params: crate::generics::generics(interner.db, def).iter_id().collect(), - }; - rustc_type_ir::EarlyBinder::bind(binder.skip_binder().fold_with(&mut folder)) -} - -struct BinderToEarlyBinder<'db> { - interner: DbInterner<'db>, - debruijn: rustc_type_ir::DebruijnIndex, - params: Vec, -} - -impl<'db> rustc_type_ir::TypeFolder> for BinderToEarlyBinder<'db> { - fn cx(&self) -> DbInterner<'db> { - self.interner - } - - fn fold_binder( - &mut self, - t: rustc_type_ir::Binder, T>, - ) -> rustc_type_ir::Binder, T> - where - T: TypeFoldable>, - { - self.debruijn.shift_in(1); - let result = t.super_fold_with(self); - self.debruijn.shift_out(1); - result - } - - fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { - match t.kind() { - rustc_type_ir::TyKind::Bound(debruijn, bound_ty) if self.debruijn == debruijn => { - let var: rustc_type_ir::BoundVar = bound_ty.var(); - let GenericParamId::TypeParamId(id) = self.params[bound_ty.var.as_usize()] else { - unreachable!() - }; - Ty::new( - self.cx(), - rustc_type_ir::TyKind::Param(ParamTy { index: var.as_u32(), id }), - ) - } - _ => t.super_fold_with(self), - } - } - - fn fold_region(&mut self, r: Region<'db>) -> Region<'db> { - match r.kind() { - rustc_type_ir::ReBound(debruijn, bound_region) if self.debruijn == debruijn => { - let var: rustc_type_ir::BoundVar = bound_region.var(); - let GenericParamId::LifetimeParamId(id) = self.params[bound_region.var.as_usize()] - else { - unreachable!() - }; - Region::new( - self.cx(), - rustc_type_ir::RegionKind::ReEarlyParam(EarlyParamRegion { - index: var.as_u32(), - id, - }), - ) - } - _ => r, - } - } - - fn fold_const(&mut self, c: Const<'db>) -> Const<'db> { - match c.kind() { - rustc_type_ir::ConstKind::Bound(debruijn, var) if self.debruijn == debruijn => { - let GenericParamId::ConstParamId(id) = self.params[var.var.as_usize()] else { - unreachable!() - }; - Const::new( - self.cx(), - rustc_type_ir::ConstKind::Param(ParamConst { index: var.var.as_u32(), id }), - ) - } - _ => c.super_fold_with(self), - } - } -} - -pub trait ChalkToNextSolver<'db, Out> { +pub(crate) trait ChalkToNextSolver<'db, Out> { fn to_nextsolver(&self, interner: DbInterner<'db>) -> Out; } -impl<'db, A, OutA, B, OutB> ChalkToNextSolver<'db, (OutA, OutB)> for (A, B) -where - A: ChalkToNextSolver<'db, OutA>, - B: ChalkToNextSolver<'db, OutB>, -{ - fn to_nextsolver(&self, interner: DbInterner<'db>) -> (OutA, OutB) { - (self.0.to_nextsolver(interner), self.1.to_nextsolver(interner)) - } -} - -pub trait NextSolverToChalk<'db, Out> { - fn to_chalk(self, interner: DbInterner<'db>) -> Out; -} - -impl<'db, T, Out> NextSolverToChalk<'db, Option> for Option -where - T: NextSolverToChalk<'db, Out>, -{ - fn to_chalk(self, interner: DbInterner<'db>) -> Option { - self.map(|it| it.to_chalk(interner)) - } -} - -impl NextSolverToChalk<'_, chalk_ir::Mutability> for rustc_ast_ir::Mutability { - fn to_chalk(self, _interner: DbInterner<'_>) -> chalk_ir::Mutability { - match self { - rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, - rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, - } - } -} - -impl NextSolverToChalk<'_, chalk_ir::Safety> for crate::next_solver::abi::Safety { - fn to_chalk(self, _interner: DbInterner<'_>) -> chalk_ir::Safety { - match self { - crate::next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, - crate::next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe, - } - } -} - -impl<'db> ChalkToNextSolver<'db, Ty<'db>> for chalk_ir::Ty { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Ty<'db> { - Ty::new( - interner, - match self.kind(Interner) { - chalk_ir::TyKind::Adt(adt_id, substitution) => { - let def = AdtDef::new(adt_id.0, interner); - let args = substitution.to_nextsolver(interner); - rustc_type_ir::TyKind::Adt(def, args) - } - chalk_ir::TyKind::AssociatedType(assoc_type_id, substitution) => { - let def_id = SolverDefId::TypeAliasId(from_assoc_type_id(*assoc_type_id)); - let args: GenericArgs<'db> = substitution.to_nextsolver(interner); - let alias_ty = rustc_type_ir::AliasTy::new(interner, def_id, args.iter()); - rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Projection, alias_ty) - } - chalk_ir::TyKind::Scalar(scalar) => match scalar { - chalk_ir::Scalar::Bool => rustc_type_ir::TyKind::Bool, - chalk_ir::Scalar::Char => rustc_type_ir::TyKind::Char, - chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize) => { - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::Isize) - } - chalk_ir::Scalar::Int(chalk_ir::IntTy::I8) => { - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I8) - } - chalk_ir::Scalar::Int(chalk_ir::IntTy::I16) => { - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I16) - } - chalk_ir::Scalar::Int(chalk_ir::IntTy::I32) => { - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I32) - } - chalk_ir::Scalar::Int(chalk_ir::IntTy::I64) => { - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I64) - } - chalk_ir::Scalar::Int(chalk_ir::IntTy::I128) => { - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I128) - } - chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize) => { - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::Usize) - } - chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8) => { - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U8) - } - chalk_ir::Scalar::Uint(chalk_ir::UintTy::U16) => { - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U16) - } - chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32) => { - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U32) - } - chalk_ir::Scalar::Uint(chalk_ir::UintTy::U64) => { - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U64) - } - chalk_ir::Scalar::Uint(chalk_ir::UintTy::U128) => { - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U128) - } - chalk_ir::Scalar::Float(chalk_ir::FloatTy::F16) => { - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F16) - } - chalk_ir::Scalar::Float(chalk_ir::FloatTy::F32) => { - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F32) - } - chalk_ir::Scalar::Float(chalk_ir::FloatTy::F64) => { - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F64) - } - chalk_ir::Scalar::Float(chalk_ir::FloatTy::F128) => { - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F128) - } - }, - chalk_ir::TyKind::Tuple(_, substitution) => { - let args = substitution.to_nextsolver(interner); - rustc_type_ir::TyKind::Tuple(args) - } - chalk_ir::TyKind::Array(ty, len) => rustc_type_ir::TyKind::Array( - ty.to_nextsolver(interner), - len.to_nextsolver(interner), - ), - chalk_ir::TyKind::Slice(ty) => { - rustc_type_ir::TyKind::Slice(ty.to_nextsolver(interner)) - } - chalk_ir::TyKind::Raw(mutability, ty) => rustc_type_ir::RawPtr( - ty.to_nextsolver(interner), - mutability.to_nextsolver(interner), - ), - chalk_ir::TyKind::Ref(mutability, lifetime, ty) => rustc_type_ir::TyKind::Ref( - lifetime.to_nextsolver(interner), - ty.to_nextsolver(interner), - mutability.to_nextsolver(interner), - ), - chalk_ir::TyKind::OpaqueType(def_id, substitution) => { - let id: InternedOpaqueTyId = (*def_id).into(); - let args: GenericArgs<'db> = substitution.to_nextsolver(interner); - let alias_ty = rustc_type_ir::AliasTy::new(interner, id.into(), args); - rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Opaque, alias_ty) - } - chalk_ir::TyKind::FnDef(fn_def_id, substitution) => { - let def_id = CallableDefId::from_chalk(interner.db(), *fn_def_id); - rustc_type_ir::TyKind::FnDef( - def_id.into(), - substitution.to_nextsolver(interner), - ) - } - chalk_ir::TyKind::Str => rustc_type_ir::TyKind::Str, - chalk_ir::TyKind::Never => rustc_type_ir::TyKind::Never, - chalk_ir::TyKind::Closure(closure_id, substitution) => { - let id: InternedClosureId = (*closure_id).into(); - rustc_type_ir::TyKind::Closure(id.into(), substitution.to_nextsolver(interner)) - } - chalk_ir::TyKind::Coroutine(coroutine_id, substitution) => { - let id: InternedCoroutineId = (*coroutine_id).into(); - rustc_type_ir::TyKind::Coroutine( - id.into(), - substitution.to_nextsolver(interner), - ) - } - chalk_ir::TyKind::CoroutineWitness(coroutine_id, substitution) => { - let id: InternedCoroutineId = (*coroutine_id).into(); - rustc_type_ir::TyKind::CoroutineWitness( - id.into(), - substitution.to_nextsolver(interner), - ) - } - chalk_ir::TyKind::Foreign(foreign_def_id) => rustc_type_ir::TyKind::Foreign( - crate::from_foreign_def_id(*foreign_def_id).into(), - ), - chalk_ir::TyKind::Error => rustc_type_ir::TyKind::Error(ErrorGuaranteed), - chalk_ir::TyKind::Dyn(dyn_ty) => { - // exists { for<...> ^1.0: ... } - let bounds = BoundExistentialPredicates::new_from_iter( - interner, - dyn_ty.bounds.skip_binders().iter(Interner).filter_map(|pred| { - // for<...> ^1.0: ... - let (val, binders) = pred.clone().into_value_and_skipped_binders(); - let bound_vars = binders.to_nextsolver(interner); - let clause = match val { - chalk_ir::WhereClause::Implemented(trait_ref) => { - let trait_id = from_chalk_trait_id(trait_ref.trait_id); - if interner - .db() - .trait_signature(trait_id) - .flags - .contains(TraitFlags::AUTO) - { - ExistentialPredicate::AutoTrait(trait_id.into()) - } else { - let args = GenericArgs::new_from_iter( - interner, - trait_ref - .substitution - .iter(Interner) - .skip(1) - .map(|a| a.clone().shifted_out(Interner).unwrap()) - .map(|a| a.to_nextsolver(interner)), - ); - let trait_ref = ExistentialTraitRef::new_from_args( - interner, trait_id.into(), args, - ); - ExistentialPredicate::Trait(trait_ref) - } - } - chalk_ir::WhereClause::AliasEq(alias_eq) => { - let (def_id, args) = match &alias_eq.alias { - chalk_ir::AliasTy::Projection(projection) => { - let id = - from_assoc_type_id(projection.associated_ty_id); - let def_id = SolverDefId::TypeAliasId(id); - let substs = projection.substitution.iter(Interner).skip(1); - - let args = GenericArgs::new_from_iter( - interner, - substs - .map(|a| { - a.clone().shifted_out(Interner).unwrap() - }) - .map(|a| a.to_nextsolver(interner)), - ); - (def_id, args) - } - chalk_ir::AliasTy::Opaque(_opaque_ty) => { - panic!("Invalid ExistentialPredicate (opaques can't be named)."); - } - }; - let term = alias_eq - .ty - .clone() - .shifted_out(Interner) - .unwrap() - .to_nextsolver(interner) - .into(); - let projection = ExistentialProjection::new_from_args( - interner, def_id, args, term, - ); - ExistentialPredicate::Projection(projection) - } - chalk_ir::WhereClause::LifetimeOutlives(_lifetime_outlives) => { - return None; - } - chalk_ir::WhereClause::TypeOutlives(_type_outlives) => return None, - }; - - Some(Binder::bind_with_vars(clause, bound_vars)) - }), - ); - let region = dyn_ty.lifetime.to_nextsolver(interner); - rustc_type_ir::TyKind::Dynamic(bounds, region) - } - chalk_ir::TyKind::Alias(alias_ty) => match alias_ty { - chalk_ir::AliasTy::Projection(projection_ty) => { - let def_id = SolverDefId::TypeAliasId(from_assoc_type_id( - projection_ty.associated_ty_id, - )); - let alias_ty = rustc_type_ir::AliasTy::new_from_args( - interner, - def_id, - projection_ty.substitution.to_nextsolver(interner), - ); - rustc_type_ir::TyKind::Alias( - rustc_type_ir::AliasTyKind::Projection, - alias_ty, - ) - } - chalk_ir::AliasTy::Opaque(opaque_ty) => { - let id: InternedOpaqueTyId = opaque_ty.opaque_ty_id.into(); - let def_id = SolverDefId::InternedOpaqueTyId(id); - let alias_ty = rustc_type_ir::AliasTy::new_from_args( - interner, - def_id, - opaque_ty.substitution.to_nextsolver(interner), - ); - rustc_type_ir::TyKind::Alias(rustc_type_ir::AliasTyKind::Opaque, alias_ty) - } - }, - chalk_ir::TyKind::Function(fn_pointer) => { - let sig_tys = fn_pointer.clone().into_binders(Interner).to_nextsolver(interner); - let header = rustc_type_ir::FnHeader { - abi: fn_pointer.sig.abi, - c_variadic: fn_pointer.sig.variadic, - safety: match fn_pointer.sig.safety { - chalk_ir::Safety::Safe => super::abi::Safety::Safe, - chalk_ir::Safety::Unsafe => super::abi::Safety::Unsafe, - }, - }; - - rustc_type_ir::TyKind::FnPtr(sig_tys, header) - } - // The schema here is quite confusing. - // The new solver, like rustc, uses `Param` and `EarlyBinder` for generic params. It uses `BoundVar` - // and `Placeholder` together with `Binder` for HRTB, which we mostly don't handle. - // Chalk uses `Placeholder` for generic params and `BoundVar` quite liberally, and this is quite a - // problem. `chalk_ir::TyKind::BoundVar` can represent either HRTB or generic params, depending on the - // context. When returned from signature queries, the outer `Binders` represent the generic params. - // But there are also inner `Binders` for HRTB. - // AFAIK there is no way to tell which of the meanings is relevant, so we just use `rustc_type_ir::Bound` - // here, and hope for the best. If you are working with new solver types, therefore, use the new solver - // lower queries. - // Hopefully sooner than later Chalk will be ripped from the codebase and we can avoid that problem. - // For details about the rustc setup, read: https://rustc-dev-guide.rust-lang.org/generic_parameters_summary.html - // and the following chapters. - chalk_ir::TyKind::Placeholder(placeholder_index) => { - let (id, index) = from_placeholder_idx(interner.db, *placeholder_index); - rustc_type_ir::TyKind::Param(ParamTy { - id: TypeParamId::from_unchecked(id), - index, - }) - } - chalk_ir::TyKind::BoundVar(bound_var) => rustc_type_ir::TyKind::Bound( - bound_var.debruijn.to_nextsolver(interner), - BoundTy { - var: rustc_type_ir::BoundVar::from_usize(bound_var.index), - kind: BoundTyKind::Anon, - }, - ), - chalk_ir::TyKind::InferenceVar(inference_var, ty_variable_kind) => { - rustc_type_ir::TyKind::Infer( - (*inference_var, *ty_variable_kind).to_nextsolver(interner), - ) - } - }, - ) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::Ty> for Ty<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Ty { - convert_ty_for_result(interner, self) - } -} - -impl<'db> ChalkToNextSolver<'db, Region<'db>> for chalk_ir::Lifetime { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Region<'db> { - Region::new( - interner, - match self.data(Interner) { - chalk_ir::LifetimeData::BoundVar(bound_var) => rustc_type_ir::RegionKind::ReBound( - bound_var.debruijn.to_nextsolver(interner), - BoundRegion { - var: rustc_type_ir::BoundVar::from_u32(bound_var.index as u32), - kind: BoundRegionKind::Anon, - }, - ), - chalk_ir::LifetimeData::InferenceVar(inference_var) => { - rustc_type_ir::RegionKind::ReVar(rustc_type_ir::RegionVid::from_u32( - inference_var.index(), - )) - } - chalk_ir::LifetimeData::Placeholder(placeholder_index) => { - let (id, index) = lt_from_placeholder_idx(interner.db, *placeholder_index); - rustc_type_ir::RegionKind::ReEarlyParam(EarlyParamRegion { id, index }) - } - chalk_ir::LifetimeData::Static => rustc_type_ir::RegionKind::ReStatic, - chalk_ir::LifetimeData::Erased => rustc_type_ir::RegionKind::ReErased, - chalk_ir::LifetimeData::Phantom(_, _) => { - unreachable!() - } - chalk_ir::LifetimeData::Error => { - rustc_type_ir::RegionKind::ReError(ErrorGuaranteed) - } - }, - ) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::Lifetime> for Region<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Lifetime { - convert_region_for_result(interner, self) - } -} - -impl<'db> ChalkToNextSolver<'db, Const<'db>> for chalk_ir::Const { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Const<'db> { - let data = self.data(Interner); - Const::new( - interner, - match &data.value { - chalk_ir::ConstValue::BoundVar(bound_var) => rustc_type_ir::ConstKind::Bound( - bound_var.debruijn.to_nextsolver(interner), - BoundConst { var: rustc_type_ir::BoundVar::from_usize(bound_var.index) }, - ), - chalk_ir::ConstValue::InferenceVar(inference_var) => { - rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Var( - rustc_type_ir::ConstVid::from_u32(inference_var.index()), - )) - } - chalk_ir::ConstValue::Placeholder(placeholder_index) => { - let (id, index) = from_placeholder_idx(interner.db, *placeholder_index); - rustc_type_ir::ConstKind::Param(ParamConst { - id: ConstParamId::from_unchecked(id), - index, - }) - } - chalk_ir::ConstValue::Concrete(concrete_const) => match &concrete_const.interned { - ConstScalar::Bytes(bytes, memory) => { - rustc_type_ir::ConstKind::Value(ValueConst::new( - data.ty.to_nextsolver(interner), - ConstBytes { memory: bytes.clone(), memory_map: memory.clone() }, - )) - } - ConstScalar::UnevaluatedConst(c, subst) => { - let def = match *c { - GeneralConstId::ConstId(id) => SolverDefId::ConstId(id), - GeneralConstId::StaticId(id) => SolverDefId::StaticId(id), - }; - let args = subst.to_nextsolver(interner); - rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new(def, args)) - } - ConstScalar::Unknown => rustc_type_ir::ConstKind::Error(ErrorGuaranteed), - }, - }, - ) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::Const> for Const<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Const { - convert_const_for_result(interner, self) - } -} - -impl<'db> ChalkToNextSolver<'db, rustc_type_ir::FnSigTys>> - for chalk_ir::FnSubst -{ - fn to_nextsolver(&self, interner: DbInterner<'db>) -> rustc_type_ir::FnSigTys> { - rustc_type_ir::FnSigTys { - inputs_and_output: Tys::new_from_iter( - interner, - self.0.iter(Interner).map(|g| g.assert_ty_ref(Interner).to_nextsolver(interner)), - ), - } - } -} - -impl< - 'db, - U: TypeVisitable>, - T: Clone + ChalkToNextSolver<'db, U> + HasInterner, -> ChalkToNextSolver<'db, rustc_type_ir::Binder, U>> for chalk_ir::Binders -{ - fn to_nextsolver( - &self, - interner: DbInterner<'db>, - ) -> rustc_type_ir::Binder, U> { - let (val, binders) = self.clone().into_value_and_skipped_binders(); - rustc_type_ir::Binder::bind_with_vars( - val.to_nextsolver(interner), - binders.to_nextsolver(interner), - ) - } -} - -impl<'db, T: NextSolverToChalk<'db, U>, U: HasInterner> - NextSolverToChalk<'db, chalk_ir::Binders> for rustc_type_ir::Binder, T> -{ - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Binders { - chalk_ir::Binders::new( - self.bound_vars().to_chalk(interner), - self.skip_binder().to_chalk(interner), - ) - } -} - -impl<'db> ChalkToNextSolver<'db, BoundVarKinds<'db>> for chalk_ir::VariableKinds { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> BoundVarKinds<'db> { - BoundVarKinds::new_from_iter( - interner, - self.iter(Interner).map(|v| v.to_nextsolver(interner)), - ) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::VariableKinds> for BoundVarKinds<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::VariableKinds { - chalk_ir::VariableKinds::from_iter(Interner, self.iter().map(|v| v.to_chalk(interner))) - } -} - -impl<'db> ChalkToNextSolver<'db, BoundVarKind> for chalk_ir::VariableKind { - fn to_nextsolver(&self, _interner: DbInterner<'db>) -> BoundVarKind { - match self { - chalk_ir::VariableKind::Ty(_ty_variable_kind) => BoundVarKind::Ty(BoundTyKind::Anon), - chalk_ir::VariableKind::Lifetime => BoundVarKind::Region(BoundRegionKind::Anon), - chalk_ir::VariableKind::Const(_ty) => BoundVarKind::Const, - } - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::VariableKind> for BoundVarKind { - fn to_chalk(self, _interner: DbInterner<'db>) -> chalk_ir::VariableKind { - match self { - BoundVarKind::Ty(_) => chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General), - BoundVarKind::Region(_) => chalk_ir::VariableKind::Lifetime, - BoundVarKind::Const => { - chalk_ir::VariableKind::Const(chalk_ir::TyKind::Error.intern(Interner)) - } - } - } -} - -impl<'db> ChalkToNextSolver<'db, GenericArg<'db>> for chalk_ir::GenericArg { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> GenericArg<'db> { - match self.data(Interner) { - chalk_ir::GenericArgData::Ty(ty) => ty.to_nextsolver(interner).into(), - chalk_ir::GenericArgData::Lifetime(lifetime) => lifetime.to_nextsolver(interner).into(), - chalk_ir::GenericArgData::Const(const_) => const_.to_nextsolver(interner).into(), - } - } -} - -impl<'db> NextSolverToChalk<'db, crate::GenericArg> for GenericArg<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> crate::GenericArg { - match self { - GenericArg::Ty(ty) => ty.to_chalk(interner).cast(Interner), - GenericArg::Lifetime(region) => region.to_chalk(interner).cast(Interner), - GenericArg::Const(konst) => konst.to_chalk(interner).cast(Interner), - } - } -} - -impl<'db> ChalkToNextSolver<'db, GenericArgs<'db>> for chalk_ir::Substitution { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> GenericArgs<'db> { - GenericArgs::new_from_iter( - interner, - self.iter(Interner).map(|arg| -> GenericArg<'db> { arg.to_nextsolver(interner) }), - ) - } -} - -impl<'db> ChalkToNextSolver<'db, crate::lower_nextsolver::ImplTraitIdx<'db>> - for crate::ImplTraitIdx -{ - fn to_nextsolver( - &self, - _interner: DbInterner<'db>, - ) -> crate::lower_nextsolver::ImplTraitIdx<'db> { - crate::lower_nextsolver::ImplTraitIdx::from_raw(self.into_raw()) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::Substitution> for GenericArgs<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Substitution { - convert_args_for_result(interner, self.as_slice()) - } -} - -impl<'db> ChalkToNextSolver<'db, Tys<'db>> for chalk_ir::Substitution { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Tys<'db> { - Tys::new_from_iter( - interner, - self.iter(Interner).map(|arg| -> Ty<'db> { - match arg.data(Interner) { - chalk_ir::GenericArgData::Ty(ty) => ty.to_nextsolver(interner), - chalk_ir::GenericArgData::Lifetime(_) => unreachable!(), - chalk_ir::GenericArgData::Const(_) => unreachable!(), - } - }), - ) - } -} - -impl<'db> NextSolverToChalk<'db, crate::Substitution> for Tys<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> crate::Substitution { - Substitution::from_iter( - Interner, - self.inner().iter().map(|ty| ty.to_chalk(interner).cast(Interner)), - ) - } -} - -impl<'db> ChalkToNextSolver<'db, rustc_type_ir::DebruijnIndex> for chalk_ir::DebruijnIndex { - fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_type_ir::DebruijnIndex { - rustc_type_ir::DebruijnIndex::from_u32(self.depth()) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::DebruijnIndex> for rustc_type_ir::DebruijnIndex { - fn to_chalk(self, _interner: DbInterner<'db>) -> chalk_ir::DebruijnIndex { - chalk_ir::DebruijnIndex::new(self.index() as u32) - } -} - -impl<'db> ChalkToNextSolver<'db, rustc_type_ir::UniverseIndex> for chalk_ir::UniverseIndex { - fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_type_ir::UniverseIndex { - rustc_type_ir::UniverseIndex::from_u32(self.counter as u32) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::UniverseIndex> for rustc_type_ir::UniverseIndex { - fn to_chalk(self, _interner: DbInterner<'db>) -> chalk_ir::UniverseIndex { - chalk_ir::UniverseIndex { counter: self.index() } - } -} - -impl<'db> ChalkToNextSolver<'db, rustc_type_ir::InferTy> - for (chalk_ir::InferenceVar, chalk_ir::TyVariableKind) -{ - fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_type_ir::InferTy { - match self.1 { - chalk_ir::TyVariableKind::General => { - rustc_type_ir::InferTy::TyVar(rustc_type_ir::TyVid::from_u32(self.0.index())) - } - chalk_ir::TyVariableKind::Integer => { - rustc_type_ir::InferTy::IntVar(rustc_type_ir::IntVid::from_u32(self.0.index())) - } - chalk_ir::TyVariableKind::Float => { - rustc_type_ir::InferTy::FloatVar(rustc_type_ir::FloatVid::from_u32(self.0.index())) - } - } - } -} - -impl<'db> ChalkToNextSolver<'db, rustc_ast_ir::Mutability> for chalk_ir::Mutability { - fn to_nextsolver(&self, _interner: DbInterner<'db>) -> rustc_ast_ir::Mutability { - match self { - chalk_ir::Mutability::Mut => rustc_ast_ir::Mutability::Mut, - chalk_ir::Mutability::Not => rustc_ast_ir::Mutability::Not, - } - } -} - -impl<'db> ChalkToNextSolver<'db, Goal, Predicate<'db>>> - for chalk_ir::InEnvironment> -{ - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Goal, Predicate<'db>> { - Goal::new( - interner, - self.environment.to_nextsolver(interner), - self.goal.to_nextsolver(interner), - ) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::InEnvironment>> - for Goal, Predicate<'db>> -{ - fn to_chalk( - self, - interner: DbInterner<'db>, - ) -> chalk_ir::InEnvironment> { - chalk_ir::InEnvironment { - environment: self.param_env.to_chalk(interner), - goal: self.predicate.to_chalk(interner), - } - } -} - -impl<'db, T: HasInterner + ChalkToNextSolver<'db, U>, U> - ChalkToNextSolver<'db, Canonical<'db, U>> for chalk_ir::Canonical -{ - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Canonical<'db, U> { - let variables = CanonicalVars::new_from_iter( - interner, - self.binders.iter(Interner).map(|k| match &k.kind { - chalk_ir::VariableKind::Ty(ty_variable_kind) => match ty_variable_kind { - // FIXME(next-solver): the info is incorrect, but we have no way to store the information in Chalk. - TyVariableKind::General => rustc_type_ir::CanonicalVarKind::Ty { - ui: UniverseIndex::ROOT, - sub_root: BoundVar::from_u32(0), - }, - TyVariableKind::Integer => rustc_type_ir::CanonicalVarKind::Int, - TyVariableKind::Float => rustc_type_ir::CanonicalVarKind::Float, - }, - chalk_ir::VariableKind::Lifetime => { - rustc_type_ir::CanonicalVarKind::Region(UniverseIndex::ROOT) - } - chalk_ir::VariableKind::Const(_ty) => { - rustc_type_ir::CanonicalVarKind::Const(UniverseIndex::ROOT) - } - }), - ); - Canonical { - max_universe: UniverseIndex::ROOT, - value: self.value.to_nextsolver(interner), - variables, - } - } -} - -impl<'db, T: NextSolverToChalk<'db, U>, U: HasInterner> - NextSolverToChalk<'db, chalk_ir::Canonical> for Canonical<'db, T> -{ - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Canonical { - let binders = chalk_ir::CanonicalVarKinds::from_iter( - Interner, - self.variables.iter().map(|v| match v { - rustc_type_ir::CanonicalVarKind::Ty { ui, sub_root: _ } => { - chalk_ir::CanonicalVarKind::new( - chalk_ir::VariableKind::Ty(TyVariableKind::General), - chalk_ir::UniverseIndex { counter: ui.as_usize() }, - ) - } - rustc_type_ir::CanonicalVarKind::Int => chalk_ir::CanonicalVarKind::new( - chalk_ir::VariableKind::Ty(TyVariableKind::Integer), - chalk_ir::UniverseIndex::root(), - ), - rustc_type_ir::CanonicalVarKind::Float => chalk_ir::CanonicalVarKind::new( - chalk_ir::VariableKind::Ty(TyVariableKind::Float), - chalk_ir::UniverseIndex::root(), - ), - rustc_type_ir::CanonicalVarKind::Region(ui) => chalk_ir::CanonicalVarKind::new( - chalk_ir::VariableKind::Lifetime, - chalk_ir::UniverseIndex { counter: ui.as_usize() }, - ), - rustc_type_ir::CanonicalVarKind::Const(ui) => chalk_ir::CanonicalVarKind::new( - chalk_ir::VariableKind::Const(chalk_ir::TyKind::Error.intern(Interner)), - chalk_ir::UniverseIndex { counter: ui.as_usize() }, - ), - rustc_type_ir::CanonicalVarKind::PlaceholderTy(_) => unimplemented!(), - rustc_type_ir::CanonicalVarKind::PlaceholderRegion(_) => unimplemented!(), - rustc_type_ir::CanonicalVarKind::PlaceholderConst(_) => unimplemented!(), - }), - ); - let value = self.value.to_chalk(interner); - chalk_ir::Canonical { binders, value } - } -} - -impl<'db> ChalkToNextSolver<'db, Predicate<'db>> for chalk_ir::Goal { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Predicate<'db> { - match self.data(Interner) { - chalk_ir::GoalData::Quantified(_quantifier_kind, binders) => { - if !binders.binders.is_empty(Interner) { - panic!("Should not be constructed."); - } - let (val, _) = binders.clone().into_value_and_skipped_binders(); - val.shifted_out(Interner).unwrap().to_nextsolver(interner) - } - chalk_ir::GoalData::Implies(_program_clauses, _goal) => { - panic!("Should not be constructed.") - } - chalk_ir::GoalData::All(_goals) => panic!("Should not be constructed."), - chalk_ir::GoalData::Not(_goal) => panic!("Should not be constructed."), - chalk_ir::GoalData::EqGoal(eq_goal) => { - let arg_to_term = |g: &chalk_ir::GenericArg| match g.data(Interner) { - chalk_ir::GenericArgData::Ty(ty) => Term::Ty(ty.to_nextsolver(interner)), - chalk_ir::GenericArgData::Const(const_) => { - Term::Const(const_.to_nextsolver(interner)) - } - chalk_ir::GenericArgData::Lifetime(_lifetime) => unreachable!(), - }; - let pred_kind = PredicateKind::AliasRelate( - arg_to_term(&eq_goal.a), - arg_to_term(&eq_goal.b), - rustc_type_ir::AliasRelationDirection::Equate, - ); - let pred_kind = - Binder::bind_with_vars(pred_kind, BoundVarKinds::new_from_iter(interner, [])); - Predicate::new(interner, pred_kind) - } - chalk_ir::GoalData::SubtypeGoal(subtype_goal) => { - let subtype_predicate = SubtypePredicate { - a: subtype_goal.a.to_nextsolver(interner), - b: subtype_goal.b.to_nextsolver(interner), - a_is_expected: true, - }; - let pred_kind = PredicateKind::Subtype(subtype_predicate); - let pred_kind = Binder::bind_with_vars( - shift_vars(interner, pred_kind, 1), - BoundVarKinds::new_from_iter(interner, []), - ); - Predicate::new(interner, pred_kind) - } - chalk_ir::GoalData::DomainGoal(domain_goal) => { - let pred_kind = domain_goal.to_nextsolver(interner); - let pred_kind = Binder::bind_with_vars( - shift_vars(interner, pred_kind, 1), - BoundVarKinds::new_from_iter(interner, []), - ); - Predicate::new(interner, pred_kind) - } - chalk_ir::GoalData::CannotProve => panic!("Should not be constructed."), - } - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::Goal> for Predicate<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Goal { - chalk_ir::Goal::new(Interner, self.kind().skip_binder().to_chalk(interner)) - } -} - -impl<'db> NextSolverToChalk<'db, crate::ProjectionTy> for crate::next_solver::AliasTy<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> crate::ProjectionTy { - let SolverDefId::TypeAliasId(assoc_id) = self.def_id else { unreachable!() }; - crate::ProjectionTy { - associated_ty_id: to_assoc_type_id(assoc_id), - substitution: self.args.to_chalk(interner), - } - } -} - -impl<'db> ChalkToNextSolver<'db, ParamEnv<'db>> for chalk_ir::Environment { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> ParamEnv<'db> { - let clauses = Clauses::new_from_iter( - interner, - self.clauses.iter(Interner).map(|c| c.to_nextsolver(interner)), - ); - let clauses = - Clauses::new_from_iter(interner, elaborate::elaborate(interner, clauses.iter())); - ParamEnv { clauses } - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::Environment> for ParamEnv<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::Environment { - let clauses = chalk_ir::ProgramClauses::from_iter( - Interner, - self.clauses.iter().filter_map(|c| -> Option> { - c.to_chalk(interner) - }), - ); - chalk_ir::Environment { clauses } - } -} - -impl<'db> ChalkToNextSolver<'db, Clause<'db>> for chalk_ir::ProgramClause { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> Clause<'db> { - Clause(Predicate::new(interner, self.data(Interner).0.to_nextsolver(interner))) - } -} - -impl<'db> NextSolverToChalk<'db, Option>> for Clause<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> Option> { - let value: chalk_ir::ProgramClauseImplication = - as NextSolverToChalk< - 'db, - Option>, - >>::to_chalk(self.0.kind().skip_binder(), interner)?; - Some(chalk_ir::ProgramClause::new( - Interner, - chalk_ir::ProgramClauseData(chalk_ir::Binders::empty(Interner, value)), - )) - } -} - -impl<'db> ChalkToNextSolver<'db, PredicateKind<'db>> - for chalk_ir::ProgramClauseImplication -{ - fn to_nextsolver(&self, interner: DbInterner<'db>) -> PredicateKind<'db> { - assert!(self.conditions.is_empty(Interner)); - assert!(self.constraints.is_empty(Interner)); - self.consequence.to_nextsolver(interner) - } -} - -impl<'db> NextSolverToChalk<'db, Option>> - for PredicateKind<'db> -{ - fn to_chalk( - self, - interner: DbInterner<'db>, - ) -> Option> { - let chalk_ir::GoalData::DomainGoal(consequence) = self.to_chalk(interner) else { - return None; - }; - - Some(chalk_ir::ProgramClauseImplication { - consequence, - conditions: chalk_ir::Goals::empty(Interner), - constraints: chalk_ir::Constraints::empty(Interner), - priority: chalk_ir::ClausePriority::High, - }) - } -} - -impl<'db> ChalkToNextSolver<'db, PredicateKind<'db>> for chalk_ir::DomainGoal { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> PredicateKind<'db> { - match self { - chalk_ir::DomainGoal::Holds(where_clause) => match where_clause { - chalk_ir::WhereClause::Implemented(trait_ref) => { - let predicate = TraitPredicate { - trait_ref: trait_ref.to_nextsolver(interner), - polarity: rustc_type_ir::PredicatePolarity::Positive, - }; - PredicateKind::Clause(ClauseKind::Trait(predicate)) - } - chalk_ir::WhereClause::AliasEq(alias_eq) => match &alias_eq.alias { - chalk_ir::AliasTy::Projection(p) => { - let def_id = - SolverDefId::TypeAliasId(from_assoc_type_id(p.associated_ty_id)); - let args = p.substitution.to_nextsolver(interner); - let term: Ty<'db> = alias_eq.ty.to_nextsolver(interner); - let term: Term<'db> = term.into(); - let predicate = ProjectionPredicate { - projection_term: AliasTerm::new_from_args(interner, def_id, args), - term, - }; - PredicateKind::Clause(ClauseKind::Projection(predicate)) - } - chalk_ir::AliasTy::Opaque(opaque) => { - let id: InternedOpaqueTyId = opaque.opaque_ty_id.into(); - let def_id = SolverDefId::InternedOpaqueTyId(id); - let args = opaque.substitution.to_nextsolver(interner); - let term: Ty<'db> = alias_eq.ty.to_nextsolver(interner); - let term: Term<'db> = term.into(); - let opaque_ty = Ty::new( - interner, - rustc_type_ir::TyKind::Alias( - rustc_type_ir::AliasTyKind::Opaque, - rustc_type_ir::AliasTy::new_from_args(interner, def_id, args), - ), - ) - .into(); - PredicateKind::AliasRelate( - opaque_ty, - term, - rustc_type_ir::AliasRelationDirection::Equate, - ) - } - }, - chalk_ir::WhereClause::LifetimeOutlives(lifetime_outlives) => { - let predicate = OutlivesPredicate( - lifetime_outlives.a.to_nextsolver(interner), - lifetime_outlives.b.to_nextsolver(interner), - ); - PredicateKind::Clause(ClauseKind::RegionOutlives(predicate)) - } - chalk_ir::WhereClause::TypeOutlives(type_outlives) => { - let predicate = OutlivesPredicate( - type_outlives.ty.to_nextsolver(interner), - type_outlives.lifetime.to_nextsolver(interner), - ); - PredicateKind::Clause(ClauseKind::TypeOutlives(predicate)) - } - }, - chalk_ir::DomainGoal::Normalize(normalize) => { - let proj_ty = match &normalize.alias { - chalk_ir::AliasTy::Projection(proj) => proj, - _ => unimplemented!(), - }; - let args: GenericArgs<'db> = proj_ty.substitution.to_nextsolver(interner); - let alias = Ty::new( - interner, - rustc_type_ir::TyKind::Alias( - rustc_type_ir::AliasTyKind::Projection, - rustc_type_ir::AliasTy::new( - interner, - from_assoc_type_id(proj_ty.associated_ty_id).into(), - args, - ), - ), - ) - .into(); - let term = normalize.ty.to_nextsolver(interner).into(); - PredicateKind::AliasRelate( - alias, - term, - rustc_type_ir::AliasRelationDirection::Equate, - ) - } - chalk_ir::DomainGoal::WellFormed(well_formed) => { - let term = match well_formed { - WellFormed::Trait(_) => panic!("Should not be constructed."), - WellFormed::Ty(ty) => Term::Ty(ty.to_nextsolver(interner)), - }; - PredicateKind::Clause(rustc_type_ir::ClauseKind::WellFormed(term)) - } - chalk_ir::DomainGoal::FromEnv(from_env) => match from_env { - chalk_ir::FromEnv::Trait(trait_ref) => { - let predicate = TraitPredicate { - trait_ref: trait_ref.to_nextsolver(interner), - polarity: rustc_type_ir::PredicatePolarity::Positive, - }; - PredicateKind::Clause(ClauseKind::Trait(predicate)) - } - chalk_ir::FromEnv::Ty(ty) => PredicateKind::Clause(ClauseKind::WellFormed( - Term::Ty(ty.to_nextsolver(interner)), - )), - }, - chalk_ir::DomainGoal::IsLocal(_ty) => panic!("Should not be constructed."), - chalk_ir::DomainGoal::IsUpstream(_ty) => panic!("Should not be constructed."), - chalk_ir::DomainGoal::IsFullyVisible(_ty) => panic!("Should not be constructed."), - chalk_ir::DomainGoal::LocalImplAllowed(_trait_ref) => { - panic!("Should not be constructed.") - } - chalk_ir::DomainGoal::Compatible => panic!("Should not be constructed."), - chalk_ir::DomainGoal::DownstreamType(_ty) => panic!("Should not be constructed."), - chalk_ir::DomainGoal::Reveal => panic!("Should not be constructed."), - chalk_ir::DomainGoal::ObjectSafe(_trait_id) => panic!("Should not be constructed."), - } - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::GoalData> for PredicateKind<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::GoalData { - match self { - rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Trait(trait_pred)) => { - let trait_ref = trait_pred.trait_ref.to_chalk(interner); - let where_clause = chalk_ir::WhereClause::Implemented(trait_ref); - chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) - } - rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Projection( - proj_predicate, - )) => { - let associated_ty_id = match proj_predicate.def_id() { - SolverDefId::TypeAliasId(id) => to_assoc_type_id(id), - _ => unreachable!(), - }; - let substitution = proj_predicate.projection_term.args.to_chalk(interner); - let alias = chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { - associated_ty_id, - substitution, - }); - let ty = match proj_predicate.term.kind() { - rustc_type_ir::TermKind::Ty(ty) => ty, - rustc_type_ir::TermKind::Const(_) => unimplemented!(), - }; - let ty = ty.to_chalk(interner); - let alias_eq = chalk_ir::AliasEq { alias, ty }; - let where_clause = chalk_ir::WhereClause::AliasEq(alias_eq); - chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) - } - rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::TypeOutlives( - outlives, - )) => { - let lifetime = outlives.1.to_chalk(interner); - let ty = outlives.0.to_chalk(interner); - let where_clause = - chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives { lifetime, ty }); - chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) - } - rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::RegionOutlives( - outlives, - )) => { - let a = outlives.0.to_chalk(interner); - let b = outlives.1.to_chalk(interner); - let where_clause = - chalk_ir::WhereClause::LifetimeOutlives(chalk_ir::LifetimeOutlives { a, b }); - chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(where_clause)) - } - rustc_type_ir::PredicateKind::AliasRelate( - alias_term, - target_term, - _alias_relation_direction, - ) => { - let term_to_generic_arg = |term: Term<'db>| match term { - Term::Ty(ty) => chalk_ir::GenericArg::new( - Interner, - chalk_ir::GenericArgData::Ty(ty.to_chalk(interner)), - ), - Term::Const(const_) => chalk_ir::GenericArg::new( - Interner, - chalk_ir::GenericArgData::Const(const_.to_chalk(interner)), - ), - }; - - chalk_ir::GoalData::EqGoal(chalk_ir::EqGoal { - a: term_to_generic_arg(alias_term), - b: term_to_generic_arg(target_term), - }) - } - rustc_type_ir::PredicateKind::Clause(_) => unimplemented!(), - rustc_type_ir::PredicateKind::DynCompatible(_) => unimplemented!(), - rustc_type_ir::PredicateKind::Subtype(_) => unimplemented!(), - rustc_type_ir::PredicateKind::Coerce(_) => unimplemented!(), - rustc_type_ir::PredicateKind::ConstEquate(_, _) => unimplemented!(), - rustc_type_ir::PredicateKind::Ambiguous => unimplemented!(), - rustc_type_ir::PredicateKind::NormalizesTo(_) => unimplemented!(), - } - } -} - -impl<'db> ChalkToNextSolver<'db, TraitRef<'db>> for chalk_ir::TraitRef { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> TraitRef<'db> { - let args = self.substitution.to_nextsolver(interner); - TraitRef::new_from_args(interner, from_chalk_trait_id(self.trait_id).into(), args) - } -} - -impl<'db> NextSolverToChalk<'db, chalk_ir::TraitRef> for TraitRef<'db> { - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::TraitRef { - let trait_id = to_chalk_trait_id(self.def_id.0); - let substitution = self.args.to_chalk(interner); - chalk_ir::TraitRef { trait_id, substitution } - } -} - -impl<'db> ChalkToNextSolver<'db, PredicateKind<'db>> for chalk_ir::WhereClause { - fn to_nextsolver(&self, interner: DbInterner<'db>) -> PredicateKind<'db> { - match self { - chalk_ir::WhereClause::Implemented(trait_ref) => { - let predicate = TraitPredicate { - trait_ref: trait_ref.to_nextsolver(interner), - polarity: rustc_type_ir::PredicatePolarity::Positive, - }; - PredicateKind::Clause(ClauseKind::Trait(predicate)) - } - chalk_ir::WhereClause::AliasEq(alias_eq) => { - let projection = match &alias_eq.alias { - chalk_ir::AliasTy::Projection(p) => p, - _ => unimplemented!(), - }; - let def_id = - SolverDefId::TypeAliasId(from_assoc_type_id(projection.associated_ty_id)); - let args = projection.substitution.to_nextsolver(interner); - let term: Ty<'db> = alias_eq.ty.to_nextsolver(interner); - let term: Term<'db> = term.into(); - let predicate = ProjectionPredicate { - projection_term: AliasTerm::new_from_args(interner, def_id, args), - term, - }; - PredicateKind::Clause(ClauseKind::Projection(predicate)) - } - chalk_ir::WhereClause::TypeOutlives(type_outlives) => { - let ty = type_outlives.ty.to_nextsolver(interner); - let r = type_outlives.lifetime.to_nextsolver(interner); - PredicateKind::Clause(ClauseKind::TypeOutlives(OutlivesPredicate(ty, r))) - } - chalk_ir::WhereClause::LifetimeOutlives(lifetime_outlives) => { - let a = lifetime_outlives.a.to_nextsolver(interner); - let b = lifetime_outlives.b.to_nextsolver(interner); - PredicateKind::Clause(ClauseKind::RegionOutlives(OutlivesPredicate(a, b))) - } - } - } -} - -impl<'db, I> NextSolverToChalk<'db, chalk_ir::ConstrainedSubst> for I -where - I: IntoIterator>, -{ - fn to_chalk(self, interner: DbInterner<'db>) -> chalk_ir::ConstrainedSubst { - chalk_ir::ConstrainedSubst { - constraints: chalk_ir::Constraints::empty(Interner), - subst: GenericArgs::new_from_iter(interner, self).to_chalk(interner), - } - } -} - -impl<'db> NextSolverToChalk<'db, crate::CallableSig> for rustc_type_ir::FnSig> { - fn to_chalk(self, interner: DbInterner<'db>) -> crate::CallableSig { - crate::CallableSig { - abi: self.abi, - is_varargs: self.c_variadic, - safety: match self.safety { - super::abi::Safety::Safe => chalk_ir::Safety::Safe, - super::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, - }, - params_and_return: triomphe::Arc::from_iter( - self.inputs_and_output.iter().map(|ty| convert_ty_for_result(interner, ty)), - ), - } - } -} - -pub fn convert_canonical_args_for_result<'db>( - interner: DbInterner<'db>, - args: Canonical<'db, Vec>>, -) -> chalk_ir::Canonical> { - args.to_chalk(interner) -} - -pub fn convert_args_for_result<'db>( - interner: DbInterner<'db>, - args: &[GenericArg<'db>], -) -> crate::Substitution { - let mut substs = Vec::with_capacity(args.len()); - for arg in args { - match (*arg).kind() { - rustc_type_ir::GenericArgKind::Type(ty) => { - let ty = convert_ty_for_result(interner, ty); - substs.push(chalk_ir::GenericArgData::Ty(ty).intern(Interner)); - } - rustc_type_ir::GenericArgKind::Lifetime(region) => { - let lifetime = convert_region_for_result(interner, region); - substs.push(chalk_ir::GenericArgData::Lifetime(lifetime).intern(Interner)); - } - rustc_type_ir::GenericArgKind::Const(const_) => { - substs.push( - chalk_ir::GenericArgData::Const(convert_const_for_result(interner, const_)) - .intern(Interner), - ); - } - } - } - Substitution::from_iter(Interner, substs) -} - -pub fn convert_ty_for_result<'db>(interner: DbInterner<'db>, ty: Ty<'db>) -> crate::Ty { - use crate::{Scalar, TyKind}; - use chalk_ir::{FloatTy, IntTy, UintTy}; - match ty.kind() { - rustc_type_ir::TyKind::Bool => TyKind::Scalar(Scalar::Bool), - rustc_type_ir::TyKind::Char => TyKind::Scalar(Scalar::Char), - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I8) => { - TyKind::Scalar(Scalar::Int(IntTy::I8)) - } - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I16) => { - TyKind::Scalar(Scalar::Int(IntTy::I16)) - } - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I32) => { - TyKind::Scalar(Scalar::Int(IntTy::I32)) - } - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I64) => { - TyKind::Scalar(Scalar::Int(IntTy::I64)) - } - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::I128) => { - TyKind::Scalar(Scalar::Int(IntTy::I128)) - } - rustc_type_ir::TyKind::Int(rustc_type_ir::IntTy::Isize) => { - TyKind::Scalar(Scalar::Int(IntTy::Isize)) - } - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U8) => { - TyKind::Scalar(Scalar::Uint(UintTy::U8)) - } - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U16) => { - TyKind::Scalar(Scalar::Uint(UintTy::U16)) - } - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U32) => { - TyKind::Scalar(Scalar::Uint(UintTy::U32)) - } - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U64) => { - TyKind::Scalar(Scalar::Uint(UintTy::U64)) - } - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::U128) => { - TyKind::Scalar(Scalar::Uint(UintTy::U128)) - } - rustc_type_ir::TyKind::Uint(rustc_type_ir::UintTy::Usize) => { - TyKind::Scalar(Scalar::Uint(UintTy::Usize)) - } - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F16) => { - TyKind::Scalar(Scalar::Float(FloatTy::F16)) - } - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F32) => { - TyKind::Scalar(Scalar::Float(FloatTy::F32)) - } - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F64) => { - TyKind::Scalar(Scalar::Float(FloatTy::F64)) - } - rustc_type_ir::TyKind::Float(rustc_type_ir::FloatTy::F128) => { - TyKind::Scalar(Scalar::Float(FloatTy::F128)) - } - rustc_type_ir::TyKind::Str => TyKind::Str, - rustc_type_ir::TyKind::Error(_) => TyKind::Error, - rustc_type_ir::TyKind::Never => TyKind::Never, - - rustc_type_ir::TyKind::Adt(def, args) => { - let adt_id = def.inner().id; - let subst = convert_args_for_result(interner, args.as_slice()); - TyKind::Adt(chalk_ir::AdtId(adt_id), subst) - } - - rustc_type_ir::TyKind::Infer(infer_ty) => { - let (var, kind) = match infer_ty { - rustc_type_ir::InferTy::TyVar(var) => { - (InferenceVar::from(var.as_u32()), TyVariableKind::General) - } - rustc_type_ir::InferTy::IntVar(var) => { - (InferenceVar::from(var.as_u32()), TyVariableKind::Integer) - } - rustc_type_ir::InferTy::FloatVar(var) => { - (InferenceVar::from(var.as_u32()), TyVariableKind::Float) - } - rustc_type_ir::InferTy::FreshFloatTy(..) - | rustc_type_ir::InferTy::FreshIntTy(..) - | rustc_type_ir::InferTy::FreshTy(..) => { - panic!("Freshening shouldn't happen.") - } - }; - TyKind::InferenceVar(var, kind) - } - - rustc_type_ir::TyKind::Ref(r, ty, mutability) => { - let mutability = match mutability { - rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, - rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, - }; - let r = convert_region_for_result(interner, r); - let ty = convert_ty_for_result(interner, ty); - TyKind::Ref(mutability, r, ty) - } - - rustc_type_ir::TyKind::Tuple(tys) => { - let size = tys.len(); - let subst = Substitution::from_iter( - Interner, - tys.iter().map(|ty| { - chalk_ir::GenericArgData::Ty(convert_ty_for_result(interner, ty)) - .intern(Interner) - }), - ); - TyKind::Tuple(size, subst) - } - - rustc_type_ir::TyKind::Array(ty, const_) => { - let ty = convert_ty_for_result(interner, ty); - let const_ = convert_const_for_result(interner, const_); - TyKind::Array(ty, const_) - } - - rustc_type_ir::TyKind::Alias(alias_ty_kind, alias_ty) => match alias_ty_kind { - rustc_type_ir::AliasTyKind::Projection => { - let assoc_ty_id = match alias_ty.def_id { - SolverDefId::TypeAliasId(id) => id, - _ => unreachable!(), - }; - let associated_ty_id = to_assoc_type_id(assoc_ty_id); - let substitution = convert_args_for_result(interner, alias_ty.args.as_slice()); - TyKind::Alias(crate::AliasTy::Projection(crate::ProjectionTy { - associated_ty_id, - substitution, - })) - } - rustc_type_ir::AliasTyKind::Opaque => { - let opaque_ty_id = match alias_ty.def_id { - SolverDefId::InternedOpaqueTyId(id) => id, - _ => unreachable!(), - }; - let substitution = convert_args_for_result(interner, alias_ty.args.as_slice()); - TyKind::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { - opaque_ty_id: opaque_ty_id.into(), - substitution, - })) - } - rustc_type_ir::AliasTyKind::Inherent => unimplemented!(), - rustc_type_ir::AliasTyKind::Free => unimplemented!(), - }, - - // For `Placeholder`, `Bound` and `Param`, see the comment on the reverse conversion. - rustc_type_ir::TyKind::Placeholder(_placeholder) => { - unimplemented!( - "A `rustc_type_ir::TyKind::Placeholder` doesn't have a direct \ - correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ - It therefore feels safer to leave it panicking, but if you hit this panic \ - feel free to do the same as in `rustc_type_ir::TyKind::Bound` here." - ) - } - rustc_type_ir::TyKind::Bound(debruijn_index, ty) => TyKind::BoundVar(chalk_ir::BoundVar { - debruijn: chalk_ir::DebruijnIndex::new(debruijn_index.as_u32()), - index: ty.var.as_usize(), - }), - rustc_type_ir::TyKind::Param(param) => { - let placeholder = to_placeholder_idx(interner.db, param.id.into(), param.index); - TyKind::Placeholder(placeholder) - } - - rustc_type_ir::TyKind::FnPtr(bound_sig, fn_header) => { - let num_binders = bound_sig.bound_vars().len(); - let sig = chalk_ir::FnSig { - abi: fn_header.abi, - safety: match fn_header.safety { - crate::next_solver::abi::Safety::Safe => chalk_ir::Safety::Safe, - crate::next_solver::abi::Safety::Unsafe => chalk_ir::Safety::Unsafe, - }, - variadic: fn_header.c_variadic, - }; - let args = GenericArgs::new_from_iter( - interner, - bound_sig.skip_binder().inputs_and_output.iter().map(|a| a.into()), - ); - let substitution = convert_args_for_result(interner, args.as_slice()); - let substitution = chalk_ir::FnSubst(substitution); - let fnptr = chalk_ir::FnPointer { num_binders, sig, substitution }; - TyKind::Function(fnptr) - } - - rustc_type_ir::TyKind::Dynamic(preds, region) => { - let self_ty = Ty::new_bound( - interner, - DebruijnIndex::from_u32(1), - BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_u32(0) }, - ); - let bounds = chalk_ir::QuantifiedWhereClauses::from_iter( - Interner, - preds.iter().map(|p| { - let binders = chalk_ir::VariableKinds::from_iter( - Interner, - p.bound_vars().iter().map(|b| match b { - BoundVarKind::Ty(_kind) => { - chalk_ir::VariableKind::Ty(TyVariableKind::General) - } - BoundVarKind::Region(_kind) => chalk_ir::VariableKind::Lifetime, - BoundVarKind::Const => { - chalk_ir::VariableKind::Const(crate::TyKind::Error.intern(Interner)) - } - }), - ); - - // Rust and chalk have slightly different - // representation for trait objects. - // - // Chalk uses `for for<'a> T0: Trait<'a>` while rustc - // uses `ExistentialPredicate`s, which do not have a self ty. - // We need to shift escaping bound vars by 1 to accommodate - // the newly introduced `for` binder. - let p = shift_vars(interner, p, 1); - - let where_clause = match p.skip_binder() { - rustc_type_ir::ExistentialPredicate::Trait(trait_ref) => { - let trait_ref = TraitRef::new( - interner, - trait_ref.def_id, - [self_ty.into()].into_iter().chain(trait_ref.args.iter()), - ); - let trait_id = to_chalk_trait_id(trait_ref.def_id.0); - let substitution = - convert_args_for_result(interner, trait_ref.args.as_slice()); - let trait_ref = chalk_ir::TraitRef { trait_id, substitution }; - chalk_ir::WhereClause::Implemented(trait_ref) - } - rustc_type_ir::ExistentialPredicate::AutoTrait(trait_) => { - let trait_id = to_chalk_trait_id(trait_.0); - let substitution = chalk_ir::Substitution::from1( - Interner, - convert_ty_for_result(interner, self_ty), - ); - let trait_ref = chalk_ir::TraitRef { trait_id, substitution }; - chalk_ir::WhereClause::Implemented(trait_ref) - } - rustc_type_ir::ExistentialPredicate::Projection(existential_projection) => { - let projection = ProjectionPredicate { - projection_term: AliasTerm::new( - interner, - existential_projection.def_id, - [self_ty.into()] - .iter() - .chain(existential_projection.args.iter()), - ), - term: existential_projection.term, - }; - let associated_ty_id = match projection.projection_term.def_id { - SolverDefId::TypeAliasId(id) => to_assoc_type_id(id), - _ => unreachable!(), - }; - let substitution = convert_args_for_result( - interner, - projection.projection_term.args.as_slice(), - ); - let alias = chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy { - associated_ty_id, - substitution, - }); - let ty = match projection.term { - Term::Ty(ty) => ty, - _ => unreachable!(), - }; - let ty = convert_ty_for_result(interner, ty); - let alias_eq = chalk_ir::AliasEq { alias, ty }; - chalk_ir::WhereClause::AliasEq(alias_eq) - } - }; - chalk_ir::Binders::new(binders, where_clause) - }), - ); - let binders = chalk_ir::VariableKinds::from1( - Interner, - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General), - ); - let bounds = chalk_ir::Binders::new(binders, bounds); - let dyn_ty = - chalk_ir::DynTy { bounds, lifetime: convert_region_for_result(interner, region) }; - TyKind::Dyn(dyn_ty) - } - - rustc_type_ir::TyKind::Slice(ty) => { - let ty = convert_ty_for_result(interner, ty); - TyKind::Slice(ty) - } - - rustc_type_ir::TyKind::Foreign(foreign) => TyKind::Foreign(to_foreign_def_id(foreign.0)), - rustc_type_ir::TyKind::Pat(_, _) => unimplemented!(), - rustc_type_ir::TyKind::RawPtr(ty, mutability) => { - let mutability = match mutability { - rustc_ast_ir::Mutability::Mut => chalk_ir::Mutability::Mut, - rustc_ast_ir::Mutability::Not => chalk_ir::Mutability::Not, - }; - let ty = convert_ty_for_result(interner, ty); - TyKind::Raw(mutability, ty) - } - rustc_type_ir::TyKind::FnDef(def_id, args) => { - let subst = convert_args_for_result(interner, args.as_slice()); - TyKind::FnDef(def_id.0.to_chalk(interner.db()), subst) - } - - rustc_type_ir::TyKind::Closure(def_id, args) => { - let subst = convert_args_for_result(interner, args.as_slice()); - TyKind::Closure(def_id.0.into(), subst) - } - rustc_type_ir::TyKind::CoroutineClosure(_, _) => unimplemented!(), - rustc_type_ir::TyKind::Coroutine(def_id, args) => { - let subst = convert_args_for_result(interner, args.as_slice()); - TyKind::Coroutine(def_id.0.into(), subst) - } - rustc_type_ir::TyKind::CoroutineWitness(def_id, args) => { - let subst = convert_args_for_result(interner, args.as_slice()); - TyKind::CoroutineWitness(def_id.0.into(), subst) - } - - rustc_type_ir::TyKind::UnsafeBinder(_) => unimplemented!(), - } - .intern(Interner) -} - -pub fn convert_const_for_result<'db>( - interner: DbInterner<'db>, - const_: Const<'db>, -) -> crate::Const { - let value: chalk_ir::ConstValue = match const_.kind() { - rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Var(var)) => { - chalk_ir::ConstValue::InferenceVar(chalk_ir::InferenceVar::from(var.as_u32())) - } - rustc_type_ir::ConstKind::Infer(rustc_type_ir::InferConst::Fresh(_fresh)) => { - panic!("Vars should not be freshened.") - } - rustc_type_ir::ConstKind::Param(param) => { - let placeholder = to_placeholder_idx(interner.db, param.id.into(), param.index); - chalk_ir::ConstValue::Placeholder(placeholder) - } - rustc_type_ir::ConstKind::Bound(debruijn_index, var) => { - chalk_ir::ConstValue::BoundVar(chalk_ir::BoundVar::new( - chalk_ir::DebruijnIndex::new(debruijn_index.as_u32()), - var.var.index(), - )) - } - rustc_type_ir::ConstKind::Placeholder(_placeholder_const) => { - unimplemented!( - "A `rustc_type_ir::ConstKind::Placeholder` doesn't have a direct \ - correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ - It therefore feels safer to leave it panicking, but if you hit this panic \ - feel free to do the same as in `rustc_type_ir::ConstKind::Bound` here." - ) - } - rustc_type_ir::ConstKind::Unevaluated(unevaluated_const) => { - let id = match unevaluated_const.def { - SolverDefId::ConstId(id) => GeneralConstId::ConstId(id), - SolverDefId::StaticId(id) => GeneralConstId::StaticId(id), - _ => unreachable!(), - }; - let subst = convert_args_for_result(interner, unevaluated_const.args.as_slice()); - chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { - interned: ConstScalar::UnevaluatedConst(id, subst), - }) - } - rustc_type_ir::ConstKind::Value(value_const) => { - let bytes = value_const.value.inner(); - let value = chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { - // SAFETY: we will never actually use this without a database - interned: ConstScalar::Bytes(bytes.memory.clone(), unsafe { - std::mem::transmute::, MemoryMap<'static>>( - bytes.memory_map.clone(), - ) - }), - }); - return chalk_ir::ConstData { - ty: convert_ty_for_result(interner, value_const.ty), - value, - } - .intern(Interner); - } - rustc_type_ir::ConstKind::Error(_) => { - chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst { - interned: ConstScalar::Unknown, - }) - } - rustc_type_ir::ConstKind::Expr(_) => unimplemented!(), - }; - chalk_ir::ConstData { ty: crate::TyKind::Error.intern(Interner), value }.intern(Interner) -} - -pub fn convert_region_for_result<'db>( - interner: DbInterner<'db>, - region: Region<'db>, -) -> crate::Lifetime { - let lifetime = match region.kind() { - rustc_type_ir::RegionKind::ReEarlyParam(early) => { - let placeholder = lt_to_placeholder_idx(interner.db, early.id, early.index); - chalk_ir::LifetimeData::Placeholder(placeholder) - } - rustc_type_ir::RegionKind::ReBound(db, bound) => { - chalk_ir::LifetimeData::BoundVar(chalk_ir::BoundVar::new( - chalk_ir::DebruijnIndex::new(db.as_u32()), - bound.var.as_usize(), - )) - } - rustc_type_ir::RegionKind::RePlaceholder(_placeholder) => unimplemented!( - "A `rustc_type_ir::RegionKind::RePlaceholder` doesn't have a direct \ - correspondence in Chalk, as it represents a universally instantiated `Bound`.\n\ - It therefore feels safer to leave it panicking, but if you hit this panic \ - feel free to do the same as in `rustc_type_ir::RegionKind::ReBound` here." - ), - rustc_type_ir::RegionKind::ReLateParam(_) => unimplemented!(), - rustc_type_ir::RegionKind::ReStatic => chalk_ir::LifetimeData::Static, - rustc_type_ir::RegionKind::ReVar(vid) => { - chalk_ir::LifetimeData::InferenceVar(chalk_ir::InferenceVar::from(vid.as_u32())) - } - rustc_type_ir::RegionKind::ReErased => chalk_ir::LifetimeData::Erased, - rustc_type_ir::RegionKind::ReError(_) => chalk_ir::LifetimeData::Error, - }; - chalk_ir::Lifetime::new(Interner, lifetime) -} - -pub trait InferenceVarExt { - fn to_vid(self) -> rustc_type_ir::TyVid; - fn from_vid(vid: rustc_type_ir::TyVid) -> InferenceVar; -} - -impl InferenceVarExt for InferenceVar { - fn to_vid(self) -> rustc_type_ir::TyVid { - rustc_type_ir::TyVid::from_u32(self.index()) - } - fn from_vid(vid: rustc_type_ir::TyVid) -> InferenceVar { - InferenceVar::from(vid.as_u32()) +impl<'db> ChalkToNextSolver<'db, crate::lower::ImplTraitIdx<'db>> for crate::ImplTraitIdx { + fn to_nextsolver(&self, _interner: DbInterner<'db>) -> crate::lower::ImplTraitIdx<'db> { + crate::lower::ImplTraitIdx::from_raw(self.into_raw()) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs index 70b6f20ede04d..3438b755fb9ec 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs @@ -13,7 +13,7 @@ use rustc_type_ir::{ }; use smallvec::SmallVec; -use crate::next_solver::TraitIdWrapper; +use crate::next_solver::{InternedWrapperNoDebug, TraitIdWrapper}; use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db}; @@ -171,9 +171,6 @@ impl<'db> rustc_type_ir::relate::Relate> for BoundExistentialPre } } -#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)] -pub struct InternedWrapperNoDebug(pub(crate) T); - #[salsa::interned(constructor = new_)] pub struct Predicate<'db> { #[returns(ref)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs index 8932f519785c0..7cf23b82f63d9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs @@ -26,11 +26,11 @@ use rustc_type_ir::{ use crate::{ ImplTraitId, db::HirDatabase, - interner::InternedWrapperNoDebug, next_solver::{ AdtDef, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const, CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper, abi::Safety, + interner::InternedWrapperNoDebug, mapping::ChalkToNextSolver, util::{CoroutineArgsExt, IntegerTypeExt}, }, @@ -531,7 +531,7 @@ impl<'db> Ty<'db> { TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => { match db.lookup_intern_impl_trait_id(opaque_ty.def_id.expect_opaque_ty()) { ImplTraitId::ReturnTypeImplTrait(func, idx) => { - db.return_type_impl_traits_ns(func).map(|it| { + db.return_type_impl_traits(func).map(|it| { let data = (*it).as_ref().map_bound(|rpit| { &rpit.impl_traits[idx.to_nextsolver(interner)].predicates }); @@ -540,7 +540,7 @@ impl<'db> Ty<'db> { }) } ImplTraitId::TypeAliasImplTrait(alias, idx) => { - db.type_alias_impl_traits_ns(alias).map(|it| { + db.type_alias_impl_traits(alias).map(|it| { let data = (*it).as_ref().map_bound(|rpit| { &rpit.impl_traits[idx.to_nextsolver(interner)].predicates }); @@ -575,7 +575,7 @@ impl<'db> Ty<'db> { TypeOrConstParamData::TypeParamData(p) => match p.provenance { TypeParamProvenance::ArgumentImplTrait => { let predicates = db - .generic_predicates_ns(param.id.parent()) + .generic_predicates(param.id.parent()) .instantiate_identity() .into_iter() .flatten() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs index 97f536305805e..bb0d0552c7109 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs @@ -1,46 +1,42 @@ //! Various utilities for the next-trait-solver. -use std::iter; -use std::ops::{self, ControlFlow}; +use std::{ + iter, + ops::{self, ControlFlow}, +}; use base_db::Crate; -use hir_def::lang_item::LangItem; -use hir_def::{BlockId, HasModule}; +use hir_def::{BlockId, HasModule, lang_item::LangItem}; use intern::sym; use la_arena::Idx; use rustc_abi::{Float, HasDataLayout, Integer, IntegerType, Primitive, ReprOptions}; -use rustc_type_ir::data_structures::IndexMap; -use rustc_type_ir::inherent::{ - AdtDef, GenericArg as _, GenericArgs as _, ParamEnv as _, SliceLike, Ty as _, -}; -use rustc_type_ir::lang_items::SolverTraitLangItem; -use rustc_type_ir::solve::SizedTraitKind; -use rustc_type_ir::{ - BoundVar, Canonical, DebruijnIndex, GenericArgKind, INNERMOST, Interner, PredicatePolarity, - TypeVisitableExt, -}; use rustc_type_ir::{ - ConstKind, CoroutineArgs, FloatTy, IntTy, RegionKind, TypeFolder, TypeSuperFoldable, - TypeSuperVisitable, TypeVisitor, UintTy, UniverseIndex, inherent::IntoKind, + ConstKind, CoroutineArgs, DebruijnIndex, FloatTy, GenericArgKind, INNERMOST, IntTy, Interner, + PredicatePolarity, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable, + TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex, + inherent::{ + AdtDef, GenericArg as _, GenericArgs as _, IntoKind, ParamEnv as _, SliceLike, Ty as _, + }, + lang_items::SolverTraitLangItem, + solve::SizedTraitKind, }; -use rustc_type_ir::{InferCtxtLike, TypeFoldable}; -use crate::lower_nextsolver::{LifetimeElisionKind, TyLoweringContext}; -use crate::next_solver::infer::InferCtxt; -use crate::next_solver::{ - BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion, -}; use crate::{ db::HirDatabase, + lower::{LifetimeElisionKind, TyLoweringContext}, method_resolution::{TraitImpls, TyFingerprint}, + next_solver::{ + BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion, + infer::InferCtxt, + }, }; -use super::fold::{BoundVarReplacer, FnMutDelegate}; use super::{ AliasTerm, AliasTy, Binder, BoundRegion, BoundTy, BoundTyKind, BoundVarKind, BoundVarKinds, - CanonicalVars, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, GenericArg, - GenericArgs, Predicate, PredicateKind, ProjectionPredicate, Region, SolverContext, SolverDefId, - Term, TraitPredicate, TraitRef, Ty, TyKind, + Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, GenericArgs, Predicate, + PredicateKind, ProjectionPredicate, Region, SolverDefId, Term, TraitPredicate, TraitRef, Ty, + TyKind, + fold::{BoundVarReplacer, FnMutDelegate}, }; #[derive(Clone, Debug)] @@ -510,151 +506,6 @@ pub fn apply_args_to_binder<'db, T: TypeFoldable>>( b.skip_binder().fold_with(&mut instantiate) } -pub(crate) fn mini_canonicalize<'db, T: TypeFoldable>>( - mut context: SolverContext<'db>, - val: T, -) -> Canonical, T> { - let mut canon = MiniCanonicalizer { - context: &mut context, - db: DebruijnIndex::ZERO, - vars: IndexMap::default(), - }; - let canon_val = val.fold_with(&mut canon); - let vars = canon.vars; - Canonical { - value: canon_val, - max_universe: UniverseIndex::from_u32(1), - variables: CanonicalVars::new_from_iter( - context.cx(), - vars.iter().enumerate().map(|(idx, (k, _v))| match (*k).kind() { - GenericArgKind::Type(ty) => match ty.kind() { - TyKind::Int(..) | TyKind::Uint(..) => rustc_type_ir::CanonicalVarKind::Int, - TyKind::Float(..) => rustc_type_ir::CanonicalVarKind::Float, - _ => rustc_type_ir::CanonicalVarKind::Ty { - ui: UniverseIndex::ZERO, - sub_root: BoundVar::from_usize(idx), - }, - }, - GenericArgKind::Lifetime(_) => { - rustc_type_ir::CanonicalVarKind::Region(UniverseIndex::ZERO) - } - GenericArgKind::Const(_) => { - rustc_type_ir::CanonicalVarKind::Const(UniverseIndex::ZERO) - } - }), - ), - } -} - -struct MiniCanonicalizer<'a, 'db> { - context: &'a mut SolverContext<'db>, - db: DebruijnIndex, - vars: IndexMap, usize>, -} - -impl<'db> TypeFolder> for MiniCanonicalizer<'_, 'db> { - fn cx(&self) -> DbInterner<'db> { - self.context.cx() - } - - fn fold_binder>>( - &mut self, - t: rustc_type_ir::Binder, T>, - ) -> rustc_type_ir::Binder, T> { - self.db.shift_in(1); - let res = t.map_bound(|t| t.fold_with(self)); - self.db.shift_out(1); - res - } - - fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> { - match t.kind() { - rustc_type_ir::TyKind::Bound(db, _) => { - if db >= self.db { - panic!("Unexpected bound var"); - } - t - } - rustc_type_ir::TyKind::Infer(infer) => { - let t = match infer { - rustc_type_ir::InferTy::TyVar(vid) => { - self.context.opportunistic_resolve_ty_var(vid) - } - rustc_type_ir::InferTy::IntVar(vid) => { - self.context.opportunistic_resolve_int_var(vid) - } - rustc_type_ir::InferTy::FloatVar(vid) => { - self.context.opportunistic_resolve_float_var(vid) - } - _ => t, - }; - let len = self.vars.len(); - let var = *self.vars.entry(t.into()).or_insert(len); - Ty::new( - self.cx(), - TyKind::Bound( - self.db, - BoundTy { kind: super::BoundTyKind::Anon, var: BoundVar::from_usize(var) }, - ), - ) - } - _ => t.super_fold_with(self), - } - } - - fn fold_region( - &mut self, - r: as rustc_type_ir::Interner>::Region, - ) -> as rustc_type_ir::Interner>::Region { - match r.kind() { - RegionKind::ReBound(db, _) => { - if db >= self.db { - panic!("Unexpected bound var"); - } - r - } - RegionKind::ReVar(_vid) => { - let len = self.vars.len(); - let var = *self.vars.entry(r.into()).or_insert(len); - Region::new( - self.cx(), - RegionKind::ReBound( - self.db, - BoundRegion { - kind: super::BoundRegionKind::Anon, - var: BoundVar::from_usize(var), - }, - ), - ) - } - _ => r, - } - } - - fn fold_const( - &mut self, - c: as rustc_type_ir::Interner>::Const, - ) -> as rustc_type_ir::Interner>::Const { - match c.kind() { - ConstKind::Bound(db, _) => { - if db >= self.db { - panic!("Unexpected bound var"); - } - c - } - ConstKind::Infer(_infer) => { - let len = self.vars.len(); - let var = *self.vars.entry(c.into()).or_insert(len); - Const::new( - self.cx(), - ConstKind::Bound(self.db, BoundConst { var: BoundVar::from_usize(var) }), - ) - } - _ => c.super_fold_with(self), - } - } -} - pub fn explicit_item_bounds<'db>( interner: DbInterner<'db>, def_id: SolverDefId, @@ -713,7 +564,7 @@ pub fn explicit_item_bounds<'db>( match full_id { crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { let datas = db - .return_type_impl_traits_ns(func) + .return_type_impl_traits(func) .expect("impl trait id without impl traits"); let datas = (*datas).as_ref().skip_binder(); let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())]; @@ -721,7 +572,7 @@ pub fn explicit_item_bounds<'db>( } crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => { let datas = db - .type_alias_impl_traits_ns(alias) + .type_alias_impl_traits(alias) .expect("impl trait id without impl traits"); let datas = (*datas).as_ref().skip_binder(); let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())]; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index 76cd5f7ab3302..bc4701970c76c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -44,7 +44,7 @@ fn foo() -> i32 { "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_ns_shim", + "return_type_impl_traits_shim", "expr_scopes_shim", "lang_item", "crate_lang_items", @@ -131,7 +131,7 @@ fn baz() -> i32 { "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_ns_shim", + "return_type_impl_traits_shim", "expr_scopes_shim", "lang_item", "crate_lang_items", @@ -143,7 +143,7 @@ fn baz() -> i32 { "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_ns_shim", + "return_type_impl_traits_shim", "expr_scopes_shim", "infer_shim", "function_signature_shim", @@ -151,7 +151,7 @@ fn baz() -> i32 { "body_shim", "body_with_source_map_shim", "trait_environment_shim", - "return_type_impl_traits_ns_shim", + "return_type_impl_traits_shim", "expr_scopes_shim", ] "#]], @@ -585,8 +585,8 @@ fn main() { "crate_lang_items", "attrs_shim", "attrs_shim", - "generic_predicates_ns_shim", - "return_type_impl_traits_ns_shim", + "generic_predicates_shim", + "return_type_impl_traits_shim", "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", @@ -594,7 +594,7 @@ fn main() { "expr_scopes_shim", "struct_signature_shim", "struct_signature_with_source_map_shim", - "generic_predicates_ns_shim", + "generic_predicates_shim", "value_ty_shim", "VariantFields::firewall_", "VariantFields::query_", @@ -608,9 +608,9 @@ fn main() { "trait_impls_in_crate_shim", "impl_trait_with_diagnostics_shim", "impl_self_ty_with_diagnostics_shim", - "generic_predicates_ns_shim", + "generic_predicates_shim", "value_ty_shim", - "generic_predicates_ns_shim", + "generic_predicates_shim", ] "#]], ); @@ -682,13 +682,13 @@ fn main() { "attrs_shim", "attrs_shim", "attrs_shim", - "generic_predicates_ns_shim", - "return_type_impl_traits_ns_shim", + "generic_predicates_shim", + "return_type_impl_traits_shim", "infer_shim", "function_signature_with_source_map_shim", "expr_scopes_shim", "struct_signature_with_source_map_shim", - "generic_predicates_ns_shim", + "generic_predicates_shim", "VariantFields::query_", "inherent_impls_in_crate_shim", "impl_signature_with_source_map_shim", @@ -697,8 +697,8 @@ fn main() { "trait_impls_in_crate_shim", "impl_trait_with_diagnostics_shim", "impl_self_ty_with_diagnostics_shim", - "generic_predicates_ns_shim", - "generic_predicates_ns_shim", + "generic_predicates_shim", + "generic_predicates_shim", ] "#]], ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs deleted file mode 100644 index fe4cf7a3da527..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs +++ /dev/null @@ -1,155 +0,0 @@ -//! Implementation of Chalk debug helper functions using TLS. -use std::fmt::{self, Display}; - -use itertools::Itertools; -use span::Edition; - -use crate::{ - CallableDefId, Interner, ProjectionTyExt, chalk_db, db::HirDatabase, from_assoc_type_id, - from_chalk_trait_id, mapping::from_chalk, -}; -use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId}; - -#[allow(unused)] -pub(crate) use unsafe_tls::{set_current_program, with_current_program}; - -pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase); - -impl DebugContext<'_> { - pub(crate) fn debug_struct_id( - &self, - id: chalk_db::AdtId, - f: &mut fmt::Formatter<'_>, - ) -> Result<(), fmt::Error> { - let name = match id.0 { - AdtId::StructId(it) => self.0.struct_signature(it).name.clone(), - AdtId::UnionId(it) => self.0.union_signature(it).name.clone(), - AdtId::EnumId(it) => self.0.enum_signature(it).name.clone(), - }; - name.display(self.0, Edition::LATEST).fmt(f)?; - Ok(()) - } - - pub(crate) fn debug_trait_id( - &self, - id: chalk_db::TraitId, - f: &mut fmt::Formatter<'_>, - ) -> Result<(), fmt::Error> { - let trait_: hir_def::TraitId = from_chalk_trait_id(id); - let trait_data = self.0.trait_signature(trait_); - trait_data.name.display(self.0, Edition::LATEST).fmt(f)?; - Ok(()) - } - - pub(crate) fn debug_assoc_type_id( - &self, - id: chalk_db::AssocTypeId, - fmt: &mut fmt::Formatter<'_>, - ) -> Result<(), fmt::Error> { - let type_alias: TypeAliasId = from_assoc_type_id(id); - let type_alias_data = self.0.type_alias_signature(type_alias); - let trait_ = match type_alias.lookup(self.0).container { - ItemContainerId::TraitId(t) => t, - _ => panic!("associated type not in trait"), - }; - let trait_data = self.0.trait_signature(trait_); - write!( - fmt, - "{}::{}", - trait_data.name.display(self.0, Edition::LATEST), - type_alias_data.name.display(self.0, Edition::LATEST) - )?; - Ok(()) - } - - pub(crate) fn debug_projection_ty( - &self, - projection_ty: &chalk_ir::ProjectionTy, - fmt: &mut fmt::Formatter<'_>, - ) -> Result<(), fmt::Error> { - let type_alias = from_assoc_type_id(projection_ty.associated_ty_id); - let type_alias_data = self.0.type_alias_signature(type_alias); - let trait_ = match type_alias.lookup(self.0).container { - ItemContainerId::TraitId(t) => t, - _ => panic!("associated type not in trait"), - }; - let trait_name = &self.0.trait_signature(trait_).name; - let trait_ref = projection_ty.trait_ref(self.0); - let trait_params = trait_ref.substitution.as_slice(Interner); - let self_ty = trait_ref.self_type_parameter(Interner); - write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0, Edition::LATEST))?; - if trait_params.len() > 1 { - write!( - fmt, - "<{}>", - trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), - )?; - } - write!(fmt, ">::{}", type_alias_data.name.display(self.0, Edition::LATEST))?; - - let proj_params = &projection_ty.substitution.as_slice(Interner)[trait_params.len()..]; - if !proj_params.is_empty() { - write!( - fmt, - "<{}>", - proj_params.iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), - )?; - } - - Ok(()) - } - - pub(crate) fn debug_fn_def_id( - &self, - fn_def_id: chalk_ir::FnDefId, - fmt: &mut fmt::Formatter<'_>, - ) -> Result<(), fmt::Error> { - let def: CallableDefId = from_chalk(self.0, fn_def_id); - let name = match def { - CallableDefId::FunctionId(ff) => self.0.function_signature(ff).name.clone(), - CallableDefId::StructId(s) => self.0.struct_signature(s).name.clone(), - CallableDefId::EnumVariantId(e) => { - let loc = e.lookup(self.0); - loc.parent.enum_variants(self.0).variants[loc.index as usize].1.clone() - } - }; - match def { - CallableDefId::FunctionId(_) => { - write!(fmt, "{{fn {}}}", name.display(self.0, Edition::LATEST)) - } - CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => { - write!(fmt, "{{ctor {}}}", name.display(self.0, Edition::LATEST)) - } - } - } -} - -mod unsafe_tls { - use super::DebugContext; - use crate::db::HirDatabase; - use scoped_tls::scoped_thread_local; - - scoped_thread_local!(static PROGRAM: DebugContext<'_>); - - pub(crate) fn with_current_program( - op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R, - ) -> R { - if PROGRAM.is_set() { PROGRAM.with(|prog| op(Some(prog))) } else { op(None) } - } - - #[allow(dead_code)] - pub(crate) fn set_current_program(p: &dyn HirDatabase, op: OP) -> R - where - OP: FnOnce() -> R, - { - let ctx = DebugContext(p); - // we're transmuting the lifetime in the DebugContext to static. This is - // fine because we only keep the reference for the lifetime of this - // function, *and* the only way to access the context is through - // `with_current_program`, which hides the lifetime through the `for` - // type. - let static_p: &DebugContext<'static> = - unsafe { std::mem::transmute::<&DebugContext<'_>, &DebugContext<'static>>(&ctx) }; - PROGRAM.set(static_p, op) - } -} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 35c8a197f52cb..7f6d4ff17f9fc 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -3,33 +3,26 @@ use core::fmt; use std::hash::Hash; -use chalk_ir::{DebruijnIndex, GoalData, fold::TypeFoldable}; - use base_db::Crate; use hir_def::{BlockId, TraitId, lang_item::LangItem}; use hir_expand::name::Name; use intern::sym; use rustc_next_trait_solver::solve::{HasChanged, SolverDelegateEvalExt}; use rustc_type_ir::{ - InferCtxtLike, TypingMode, - inherent::{IntoKind, SliceLike, Span as _}, + TypingMode, + inherent::{IntoKind, Span as _}, solve::Certainty, }; -use span::Edition; use triomphe::Arc; use crate::{ - AliasEq, AliasTy, Canonical, DomainGoal, Goal, InEnvironment, Interner, ProjectionTyExt, - TraitRefExt, TyKind, WhereClause, db::HirDatabase, next_solver::{ - DbInterner, GenericArg, ParamEnv, Predicate, SolverContext, Span, + Canonical, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, SolverContext, Span, Ty, + TyKind, infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause}, - mapping::{ChalkToNextSolver, convert_canonical_args_for_result}, obligation_ctxt::ObligationCtxt, - util::mini_canonicalize, }, - utils::UnevaluatedConstEvaluatorFolder, }; /// A set of clauses that we assume to be true. E.g. if we are inside this function: @@ -42,7 +35,7 @@ pub struct TraitEnvironment<'db> { pub krate: Crate, pub block: Option, // FIXME make this a BTreeMap - traits_from_clauses: Box<[(crate::next_solver::Ty<'db>, TraitId)]>, + traits_from_clauses: Box<[(Ty<'db>, TraitId)]>, pub env: ParamEnv<'db>, } @@ -59,7 +52,7 @@ impl<'db> TraitEnvironment<'db> { pub fn new( krate: Crate, block: Option, - traits_from_clauses: Box<[(crate::next_solver::Ty<'db>, TraitId)]>, + traits_from_clauses: Box<[(Ty<'db>, TraitId)]>, env: ParamEnv<'db>, ) -> Arc { Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env }) @@ -70,10 +63,7 @@ impl<'db> TraitEnvironment<'db> { Arc::make_mut(this).block = Some(block); } - pub fn traits_in_scope_from_clauses( - &self, - ty: crate::next_solver::Ty<'db>, - ) -> impl Iterator + '_ { + pub fn traits_in_scope_from_clauses(&self, ty: Ty<'db>) -> impl Iterator + '_ { self.traits_from_clauses .iter() .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id)) @@ -83,92 +73,19 @@ impl<'db> TraitEnvironment<'db> { /// This should be used in `hir` only. pub fn structurally_normalize_ty<'db>( infcx: &InferCtxt<'db>, - ty: crate::next_solver::Ty<'db>, + ty: Ty<'db>, env: Arc>, -) -> crate::next_solver::Ty<'db> { - let crate::next_solver::TyKind::Alias(..) = ty.kind() else { return ty }; +) -> Ty<'db> { + let TyKind::Alias(..) = ty.kind() else { return ty }; let mut ocx = ObligationCtxt::new(infcx); let ty = ocx.structurally_normalize_ty(&ObligationCause::dummy(), env.env, ty).unwrap_or(ty); ty.replace_infer_with_error(infcx.interner) } -fn identity_subst( - binders: chalk_ir::CanonicalVarKinds, -) -> chalk_ir::Canonical> { - let identity_subst = chalk_ir::Substitution::from_iter( - Interner, - binders.iter(Interner).enumerate().map(|(index, c)| { - let index_db = chalk_ir::BoundVar::new(DebruijnIndex::INNERMOST, index); - match &c.kind { - chalk_ir::VariableKind::Ty(_) => { - chalk_ir::GenericArgData::Ty(TyKind::BoundVar(index_db).intern(Interner)) - .intern(Interner) - } - chalk_ir::VariableKind::Lifetime => chalk_ir::GenericArgData::Lifetime( - chalk_ir::LifetimeData::BoundVar(index_db).intern(Interner), - ) - .intern(Interner), - chalk_ir::VariableKind::Const(ty) => chalk_ir::GenericArgData::Const( - chalk_ir::ConstData { - ty: ty.clone(), - value: chalk_ir::ConstValue::BoundVar(index_db), - } - .intern(Interner), - ) - .intern(Interner), - } - }), - ); - chalk_ir::Canonical { binders, value: identity_subst } -} - -fn solve_nextsolver<'db>( - db: &'db dyn HirDatabase, - krate: Crate, - block: Option, - goal: &chalk_ir::UCanonical>>, -) -> Result< - (HasChanged, Certainty, rustc_type_ir::Canonical, Vec>>), - rustc_type_ir::solve::NoSolution, -> { - // FIXME: should use analysis_in_body, but that needs GenericDefId::Block - let context = SolverContext( - DbInterner::new_with(db, Some(krate), block) - .infer_ctxt() - .build(TypingMode::non_body_analysis()), - ); - - match goal.canonical.value.goal.data(Interner) { - // FIXME: args here should be...what? not empty - GoalData::All(goals) if goals.is_empty(Interner) => { - return Ok((HasChanged::No, Certainty::Yes, mini_canonicalize(context, vec![]))); - } - _ => {} - } - - let goal = goal.canonical.to_nextsolver(context.cx()); - tracing::info!(?goal); - - let (goal, var_values) = context.instantiate_canonical(&goal); - tracing::info!(?var_values); - - let res = context.evaluate_root_goal(goal, Span::dummy(), None); - - let vars = - var_values.var_values.iter().map(|g| context.0.resolve_vars_if_possible(g)).collect(); - let canonical_var_values = mini_canonicalize(context, vars); - - let res = res.map(|r| (r.has_changed, r.certainty, canonical_var_values)); - - tracing::debug!("solve_nextsolver({:?}) => {:?}", goal, res); - - res -} - #[derive(Clone, Debug, PartialEq)] pub enum NextTraitSolveResult { - Certain(chalk_ir::Canonical>), - Uncertain(chalk_ir::Canonical>), + Certain, + Uncertain, NoSolution, } @@ -178,75 +95,17 @@ impl NextTraitSolveResult { } pub fn certain(&self) -> bool { - matches!(self, NextTraitSolveResult::Certain(..)) + matches!(self, NextTraitSolveResult::Certain) } pub fn uncertain(&self) -> bool { - matches!(self, NextTraitSolveResult::Uncertain(..)) - } -} - -pub fn next_trait_solve( - db: &dyn HirDatabase, - krate: Crate, - block: Option, - goal: Canonical>, -) -> NextTraitSolveResult { - let detail = match &goal.value.goal.data(Interner) { - GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => { - db.trait_signature(it.hir_trait_id()).name.display(db, Edition::LATEST).to_string() - } - GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(), - _ => "??".to_owned(), - }; - let _p = tracing::info_span!("next_trait_solve", ?detail).entered(); - tracing::info!("next_trait_solve({:?})", goal.value.goal); - - if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq { - alias: AliasTy::Projection(projection_ty), - .. - }))) = &goal.value.goal.data(Interner) - && let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) - { - // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible - // FIXME - return NextTraitSolveResult::Uncertain(identity_subst(goal.binders.clone())); - } - - // Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So - // we should get rid of it when talking to chalk. - let goal = goal - .try_fold_with(&mut UnevaluatedConstEvaluatorFolder { db }, DebruijnIndex::INNERMOST) - .unwrap(); - - // We currently don't deal with universes (I think / hope they're not yet - // relevant for our use cases?) - let u_canonical = chalk_ir::UCanonical { canonical: goal, universes: 1 }; - tracing::info!(?u_canonical); - - let next_solver_res = solve_nextsolver(db, krate, block, &u_canonical); - - match next_solver_res { - Err(_) => NextTraitSolveResult::NoSolution, - Ok((_, Certainty::Yes, args)) => NextTraitSolveResult::Certain( - convert_canonical_args_for_result(DbInterner::new_with(db, Some(krate), block), args), - ), - Ok((_, Certainty::Maybe { .. }, args)) => { - let subst = convert_canonical_args_for_result( - DbInterner::new_with(db, Some(krate), block), - args, - ); - NextTraitSolveResult::Uncertain(chalk_ir::Canonical { - binders: subst.binders, - value: subst.value.subst, - }) - } + matches!(self, NextTraitSolveResult::Uncertain) } } pub fn next_trait_solve_canonical_in_ctxt<'db>( infer_ctxt: &InferCtxt<'db>, - goal: crate::next_solver::Canonical<'db, crate::next_solver::Goal<'db, Predicate<'db>>>, + goal: Canonical<'db, Goal<'db, Predicate<'db>>>, ) -> NextTraitSolveResult { let context = SolverContext(infer_ctxt.clone()); @@ -257,33 +116,21 @@ pub fn next_trait_solve_canonical_in_ctxt<'db>( let res = context.evaluate_root_goal(goal, Span::dummy(), None); - let vars = - var_values.var_values.iter().map(|g| context.0.resolve_vars_if_possible(g)).collect(); - let canonical_var_values = mini_canonicalize(context, vars); - - let res = res.map(|r| (r.has_changed, r.certainty, canonical_var_values)); + let res = res.map(|r| (r.has_changed, r.certainty)); tracing::debug!("solve_nextsolver({:?}) => {:?}", goal, res); match res { Err(_) => NextTraitSolveResult::NoSolution, - Ok((_, Certainty::Yes, args)) => NextTraitSolveResult::Certain( - convert_canonical_args_for_result(infer_ctxt.interner, args), - ), - Ok((_, Certainty::Maybe { .. }, args)) => { - let subst = convert_canonical_args_for_result(infer_ctxt.interner, args); - NextTraitSolveResult::Uncertain(chalk_ir::Canonical { - binders: subst.binders, - value: subst.value.subst, - }) - } + Ok((_, Certainty::Yes)) => NextTraitSolveResult::Certain, + Ok((_, Certainty::Maybe { .. })) => NextTraitSolveResult::Uncertain, } } /// Solve a trait goal using next trait solver. pub fn next_trait_solve_in_ctxt<'db, 'a>( infer_ctxt: &'a InferCtxt<'db>, - goal: crate::next_solver::Goal<'db, crate::next_solver::Predicate<'db>>, + goal: Goal<'db, Predicate<'db>>, ) -> Result<(HasChanged, Certainty), rustc_type_ir::solve::NoSolution> { tracing::info!(?goal); @@ -377,7 +224,7 @@ impl FnTrait { /// This should not be used in `hir-ty`, only in `hir`. pub fn implements_trait_unique<'db>( - ty: crate::next_solver::Ty<'db>, + ty: Ty<'db>, db: &'db dyn HirDatabase, env: Arc>, trait_: TraitId, @@ -392,7 +239,7 @@ pub fn implements_trait_unique_with_args<'db>( db: &'db dyn HirDatabase, env: Arc>, trait_: TraitId, - args: crate::next_solver::GenericArgs<'db>, + args: GenericArgs<'db>, ) -> bool { implements_trait_unique_impl(db, env, trait_, &mut |_| args) } @@ -401,7 +248,7 @@ fn implements_trait_unique_impl<'db>( db: &'db dyn HirDatabase, env: Arc>, trait_: TraitId, - create_args: &mut dyn FnMut(&InferCtxt<'db>) -> crate::next_solver::GenericArgs<'db>, + create_args: &mut dyn FnMut(&InferCtxt<'db>) -> GenericArgs<'db>, ) -> bool { let interner = DbInterner::new_with(db, Some(env.krate), env.block); // FIXME(next-solver): I believe this should be `PostAnalysis`. @@ -409,7 +256,7 @@ fn implements_trait_unique_impl<'db>( let args = create_args(&infcx); let trait_ref = rustc_type_ir::TraitRef::new_from_args(interner, trait_.into(), args); - let goal = crate::next_solver::Goal::new(interner, env.env, trait_ref); + let goal = Goal::new(interner, env.env, trait_ref); let result = crate::traits::next_trait_solve_in_ctxt(&infcx, goal); matches!(result, Ok((_, Certainty::Yes))) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 15359922c80e7..ca5e33fe6ad00 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -1,40 +1,30 @@ //! Helper functions for working with def, which don't need to be a separate //! query, but can't be computed directly from `*Data` (ie, which need a `db`). -use std::{cell::LazyCell, iter}; +use std::cell::LazyCell; use base_db::{ Crate, target::{self, TargetData}, }; -use chalk_ir::{DebruijnIndex, fold::FallibleTypeFolder}; use hir_def::{ - EnumId, EnumVariantId, FunctionId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, + EnumId, EnumVariantId, FunctionId, Lookup, TraitId, db::DefDatabase, hir::generics::WherePredicate, lang_item::LangItem, resolver::{HasResolver, TypeNs}, type_ref::{TraitBoundModifier, TypeRef}, }; -use hir_expand::name::Name; use intern::sym; use rustc_abi::TargetDataLayout; -use rustc_hash::FxHashSet; -use rustc_type_ir::inherent::{IntoKind, SliceLike}; use smallvec::{SmallVec, smallvec}; use span::Edition; use crate::{ - ChalkTraitId, Const, ConstScalar, Interner, TargetFeatures, TraitRef, TraitRefExt, - consteval::unknown_const, + TargetFeatures, db::HirDatabase, layout::{Layout, TagEncoding}, mir::pad16, - next_solver::{ - DbInterner, - mapping::{ChalkToNextSolver, NextSolverToChalk, convert_args_for_result}, - }, - to_chalk_trait_id, }; pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: Crate) -> impl Iterator + '_ { @@ -75,49 +65,6 @@ pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[Trai result } -/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for -/// super traits. The original trait ref will be included. So the difference to -/// `all_super_traits` is that we keep track of type parameters; for example if -/// we have `Self: Trait` and `Trait: OtherTrait` we'll get -/// `Self: OtherTrait`. -pub(super) fn all_super_trait_refs( - db: &dyn HirDatabase, - trait_ref: TraitRef, - cb: impl FnMut(TraitRef) -> Option, -) -> Option { - let seen = iter::once(trait_ref.trait_id).collect(); - SuperTraits { db, seen, stack: vec![trait_ref] }.find_map(cb) -} - -struct SuperTraits<'a> { - db: &'a dyn HirDatabase, - stack: Vec, - seen: FxHashSet, -} - -impl SuperTraits<'_> { - fn elaborate(&mut self, trait_ref: &TraitRef) { - direct_super_trait_refs(self.db, trait_ref, |trait_ref| { - if !self.seen.contains(&trait_ref.trait_id) { - self.stack.push(trait_ref); - } - }); - } -} - -impl Iterator for SuperTraits<'_> { - type Item = TraitRef; - - fn next(&mut self) -> Option { - if let Some(next) = self.stack.pop() { - self.elaborate(&next); - Some(next) - } else { - None - } - } -} - fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) { let resolver = LazyCell::new(|| trait_.resolver(db)); let (generic_params, store) = db.generic_params_and_store(trait_.into()); @@ -148,49 +95,6 @@ fn direct_super_traits_cb(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut( .for_each(cb); } -fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) { - let interner = DbInterner::new_with(db, None, None); - let generic_params = db.generic_params(trait_ref.hir_trait_id().into()); - let trait_self = match generic_params.trait_self_param() { - Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p }, - None => return, - }; - let trait_ref_args: crate::next_solver::GenericArgs<'_> = - trait_ref.substitution.to_nextsolver(interner); - db.generic_predicates_for_param_ns(trait_self.parent, trait_self, None) - .iter() - .filter_map(|pred| { - let pred = pred.kind(); - // FIXME: how to correctly handle higher-ranked bounds here? - let pred = pred.no_bound_vars().expect("FIXME unexpected higher-ranked trait bound"); - match pred { - rustc_type_ir::ClauseKind::Trait(t) => { - let t = - rustc_type_ir::EarlyBinder::bind(t).instantiate(interner, trait_ref_args); - let trait_id = to_chalk_trait_id(t.def_id().0); - - let substitution = - convert_args_for_result(interner, t.trait_ref.args.as_slice()); - let tr = chalk_ir::TraitRef { trait_id, substitution }; - Some(tr) - } - _ => None, - } - }) - .for_each(cb); -} - -pub(super) fn associated_type_by_name_including_super_traits( - db: &dyn HirDatabase, - trait_ref: TraitRef, - name: &Name, -) -> Option<(TraitRef, TypeAliasId)> { - all_super_trait_refs(db, trait_ref, |t| { - let assoc_type = t.hir_trait_id().trait_items(db).associated_type_by_name(name)?; - Some((t, assoc_type)) - }) -} - #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Unsafety { Safe, @@ -263,41 +167,6 @@ pub fn is_fn_unsafe_to_call( } } -pub(crate) struct UnevaluatedConstEvaluatorFolder<'a> { - pub(crate) db: &'a dyn HirDatabase, -} - -impl FallibleTypeFolder for UnevaluatedConstEvaluatorFolder<'_> { - type Error = (); - - fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder { - self - } - - fn interner(&self) -> Interner { - Interner - } - - fn try_fold_const( - &mut self, - constant: Const, - _outer_binder: DebruijnIndex, - ) -> Result { - if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value - && let ConstScalar::UnevaluatedConst(id, subst) = &c.interned - { - let interner = DbInterner::conjure(); - if let Ok(eval) = self.db.const_eval(*id, subst.to_nextsolver(interner), None) { - return Ok(eval.to_chalk(interner)); - } else { - return Ok(unknown_const(constant.data(Interner).ty.to_nextsolver(interner)) - .to_chalk(interner)); - } - } - Ok(constant) - } -} - pub(crate) fn detect_variant_from_bytes<'a>( layout: &'a Layout, db: &dyn HirDatabase, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs index 46898ddeec126..b57bf03f24727 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs @@ -128,7 +128,7 @@ impl<'db> Context<'db> { GenericDefId::AdtId(adt) => { let db = self.db; let mut add_constraints_from_variant = |variant| { - for (_, field) in db.field_types_ns(variant).iter() { + for (_, field) in db.field_types(variant).iter() { self.add_constraints_from_ty( field.instantiate_identity(), Variance::Covariant, diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index b31bb248e8397..d61c2eca8347b 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -484,7 +484,7 @@ impl<'db> HirDisplay<'db> for TypeParam { let param_data = ¶ms[self.id.local_id()]; let krate = self.id.parent().krate(f.db).id; let ty = self.ty(f.db).ty; - let predicates = f.db.generic_predicates_ns(self.id.parent()); + let predicates = f.db.generic_predicates(self.id.parent()); let predicates = predicates .instantiate_identity() .into_iter() diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 78b4533a94b0e..48eafb0bd4c60 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1271,7 +1271,7 @@ impl<'db> InstantiatedField<'db> { let interner = DbInterner::new_with(db, Some(krate.base()), None); let var_id = self.inner.parent.into(); - let field = db.field_types_ns(var_id)[self.inner.id]; + let field = db.field_types(var_id)[self.inner.id]; let ty = field.instantiate(interner, self.args); TypeNs::new(db, var_id, ty) } @@ -1350,7 +1350,7 @@ impl Field { /// context of the field definition. pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> TypeNs<'db> { let var_id = self.parent.into(); - let ty = db.field_types_ns(var_id)[self.id].skip_binder(); + let ty = db.field_types(var_id)[self.id].skip_binder(); TypeNs::new(db, var_id, ty) } @@ -1368,7 +1368,7 @@ impl Field { }; let interner = DbInterner::new_with(db, None, None); let args = generic_args_from_tys(interner, def_id.into(), generics.map(|ty| ty.ty)); - let ty = db.field_types_ns(var_id)[self.id].instantiate(interner, args); + let ty = db.field_types(var_id)[self.id].instantiate(interner, args); Type::new(db, var_id, ty) } @@ -3693,7 +3693,7 @@ impl GenericDef { }; expr_store_diagnostics(db, acc, &source_map); - push_ty_diagnostics(db, acc, db.generic_defaults_ns_with_diagnostics(def).1, &source_map); + push_ty_diagnostics(db, acc, db.generic_defaults_with_diagnostics(def).1, &source_map); push_ty_diagnostics( db, acc, @@ -4192,7 +4192,7 @@ impl TypeParam { /// parameter, not additional bounds that might be added e.g. by a method if /// the parameter comes from an impl! pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec { - db.generic_predicates_for_param_ns(self.id.parent(), self.id.into(), None) + db.generic_predicates_for_param(self.id.parent(), self.id.into(), None) .iter() .filter_map(|pred| match &pred.kind().skip_binder() { ClauseKind::Trait(trait_ref) => Some(Trait::from(trait_ref.def_id().0)), @@ -4282,7 +4282,7 @@ impl ConstParam { fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option> { let local_idx = hir_ty::param_idx(db, id)?; - let defaults = db.generic_defaults_ns(id.parent); + let defaults = db.generic_defaults(id.parent); let ty = defaults.get(local_idx)?; // FIXME: This shouldn't be `instantiate_identity()`, we shouldn't leak `TyKind::Param`s. Some(ty.instantiate_identity()) @@ -4883,7 +4883,7 @@ impl<'db> Type<'db> { if variant_data.fields().is_empty() { vec![] } else { - let field_types = self.interner.db().field_types_ns(id); + let field_types = self.interner.db().field_types(id); variant_data .fields() .iter() @@ -5216,7 +5216,7 @@ impl<'db> Type<'db> { _ => return Vec::new(), }; - db.field_types_ns(variant_id) + db.field_types(variant_id) .iter() .map(|(local_id, ty)| { let def = Field { parent: variant_id.into(), id: local_id }; @@ -6450,7 +6450,7 @@ fn generic_args_from_tys<'db>( fn has_non_default_type_params(db: &dyn HirDatabase, generic_def: GenericDefId) -> bool { let params = db.generic_params(generic_def); - let defaults = db.generic_defaults_ns(generic_def); + let defaults = db.generic_defaults(generic_def); params .iter_type_or_consts() .filter(|(_, param)| matches!(param, TypeOrConstParamData::TypeParamData(_))) diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 8d2ba7e604e76..15eab14b88dfd 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -712,8 +712,7 @@ impl<'db> SourceAnalyzer<'db> { let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?; let variant_data = variant.fields(db); let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? }; - let field_ty = - (*db.field_types_ns(variant).get(field.local_id)?).instantiate(interner, subst); + let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst); Some(( field.into(), local, @@ -735,8 +734,7 @@ impl<'db> SourceAnalyzer<'db> { let variant_data = variant.fields(db); let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? }; let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?; - let field_ty = - (*db.field_types_ns(variant).get(field.local_id)?).instantiate(interner, subst); + let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst); Some(( field.into(), Type::new_with_resolver(db, &self.resolver, field_ty), @@ -802,7 +800,7 @@ impl<'db> SourceAnalyzer<'db> { |variant: VariantId, subst: GenericArgs<'db>, container: &mut _| { let fields = variant.fields(db); let field = fields.field(&field_name.as_name())?; - let field_types = db.field_types_ns(variant); + let field_types = db.field_types(variant); *container = Either::Right(field_types[field].instantiate(interner, subst)); let generic_def = match variant { VariantId::EnumVariantId(it) => it.loc(db).parent.into(), @@ -1255,7 +1253,7 @@ impl<'db> SourceAnalyzer<'db> { missing_fields: Vec, ) -> Vec<(Field, Type<'db>)> { let interner = DbInterner::new_with(db, None, None); - let field_types = db.field_types_ns(variant); + let field_types = db.field_types(variant); missing_fields .into_iter() diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index df3dc53f7c039..91fb4d0a67153 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -9393,7 +9393,7 @@ fn main(a$0: T) {} *a* ```rust - a: T + a: T ``` --- From b5263103b83cf26c432bc50a94cb4d88ff743a94 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 20 Oct 2025 15:54:03 +0200 Subject: [PATCH 39/76] fix: Fix `signature_help` to proto conversion creating invalid utf16 offsets --- .../crates/rust-analyzer/src/lsp/to_proto.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index d51ddb86d197f..496d7caa1f95b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -493,8 +493,15 @@ pub(crate) fn signature_help( .parameter_ranges() .iter() .map(|it| { - let start = call_info.signature[..it.start().into()].chars().count() as u32; - let end = call_info.signature[..it.end().into()].chars().count() as u32; + let start = call_info.signature[..it.start().into()] + .chars() + .map(|c| c.len_utf16()) + .sum::() as u32; + let end = start + + call_info.signature[it.start().into()..it.end().into()] + .chars() + .map(|c| c.len_utf16()) + .sum::() as u32; [start, end] }) .map(|label_offsets| lsp_types::ParameterInformation { @@ -513,9 +520,9 @@ pub(crate) fn signature_help( label.push_str(", "); } first = false; - let start = label.chars().count() as u32; + let start = label.len() as u32; label.push_str(param); - let end = label.chars().count() as u32; + let end = label.len() as u32; params.push(lsp_types::ParameterInformation { label: lsp_types::ParameterLabel::LabelOffsets([start, end]), documentation: None, From 14c771a24c7075677ca792817a62654ff85c2e41 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Mon, 20 Oct 2025 19:00:42 +0300 Subject: [PATCH 40/76] Fix beta Clippy --- src/tools/rust-analyzer/crates/hir-ty/src/db.rs | 2 +- src/tools/rust-analyzer/xtask/src/release.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index c79ff98578980..2ef7963322995 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -291,7 +291,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { // cycle_initial = crate::variance::variances_of_cycle_initial, cycle_result = crate::variance::variances_of_cycle_initial, )] - fn variances_of(&self, def: GenericDefId) -> VariancesOf<'_>; + fn variances_of<'db>(&'db self, def: GenericDefId) -> VariancesOf<'db>; } #[test] diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs index d06a25c8929b5..13cb44ebed5ab 100644 --- a/src/tools/rust-analyzer/xtask/src/release.rs +++ b/src/tools/rust-analyzer/xtask/src/release.rs @@ -43,7 +43,7 @@ impl flags::Release { .unwrap_or_default(); let tags = cmd!(sh, "git tag --list").read()?; - let prev_tag = tags.lines().filter(|line| is_release_tag(line)).next_back().unwrap(); + let prev_tag = tags.lines().rfind(|line| is_release_tag(line)).unwrap(); let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?; let path = changelog_dir.join(format!("{today}-changelog-{changelog_n}.adoc")); From 37147c4135a0a10db1e10dc4d983a8fbedb9deb9 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Tue, 21 Oct 2025 11:27:50 +0800 Subject: [PATCH 41/76] Add a FIXME for unordered fields --- .../src/handlers/convert_named_struct_to_tuple_struct.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs index 0847719d6922b..e518c39dabc27 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -187,6 +187,7 @@ fn process_struct_name_reference( return None; } + // FIXME: Processing RecordPat and RecordExpr for unordered fields, and insert RestPat let parent = full_path.syntax().parent()?; match_ast! { match parent { From e6656c19b4dff3f9f3440461860e4e8525444b84 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Tue, 21 Oct 2025 11:16:04 +0800 Subject: [PATCH 42/76] Fix invalid RestPat for convert_tuple_struct_to_named_struct ```rust struct X$0(i8, i16, i32, i64); fn foo(X(a, .., d): X) {} ``` **Before this PR**: ```rust struct X { field1: i8, field2: i16, field3: i32, field4: i64 } fn foo(X { field1: a, field2: .., field3: d }: X) {} ``` **After this PR**: ```rust struct X { field1: i8, field2: i16, field3: i32, field4: i64 } fn foo(X { field1: a, field4: d, .. }: X) {} ``` --- .../convert_tuple_struct_to_named_struct.rs | 64 ++++++++++++++++--- 1 file changed, 56 insertions(+), 8 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 3d78895477b31..61d844928a8a6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -154,14 +154,7 @@ fn edit_struct_references( ast::TupleStructPat(tuple_struct_pat) => { Some(make.record_pat_with_fields( tuple_struct_pat.path()?, - ast::make::record_pat_field_list(tuple_struct_pat.fields().zip(names).map( - |(pat, name)| { - ast::make::record_pat_field( - ast::make::name_ref(&name.to_string()), - pat, - ) - }, - ), None), + generate_record_pat_list(&tuple_struct_pat, names), ).syntax().clone()) }, // for tuple struct creations like Foo(42) @@ -284,6 +277,24 @@ fn generate_names(fields: impl Iterator) -> Vec ast::RecordPatFieldList { + let pure_fields = pat.fields().filter(|p| !matches!(p, ast::Pat::RestPat(_))); + let rest_len = names.len().saturating_sub(pure_fields.clone().count()); + let rest_pat = pat.fields().find_map(|p| ast::RestPat::cast(p.syntax().clone())); + let rest_idx = + pat.fields().position(|p| ast::RestPat::can_cast(p.syntax().kind())).unwrap_or(names.len()); + let before_rest = pat.fields().zip(names).take(rest_idx); + let after_rest = pure_fields.zip(names.iter().skip(rest_len)).skip(rest_idx); + + let fields = before_rest + .chain(after_rest) + .map(|(pat, name)| ast::make::record_pat_field(ast::make::name_ref(&name.text()), pat)); + ast::make::record_pat_field_list(fields, rest_pat) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -358,6 +369,43 @@ impl A { ); } + #[test] + fn convert_struct_and_rest_pat() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +struct Inner; +struct A$0(Inner); +fn foo(A(..): A) {} +"#, + r#" +struct Inner; +struct A { field1: Inner } +fn foo(A { .. }: A) {} +"#, + ); + + check_assist( + convert_tuple_struct_to_named_struct, + r#" +struct A; +struct B; +struct C; +struct D; +struct X$0(A, B, C, D); +fn foo(X(a, .., d): X) {} +"#, + r#" +struct A; +struct B; +struct C; +struct D; +struct X { field1: A, field2: B, field3: C, field4: D } +fn foo(X { field1: a, field4: d, .. }: X) {} +"#, + ); + } + #[test] fn convert_simple_struct_cursor_on_struct_keyword() { check_assist( From ba61c29ce1c45bf772e9a3bb6a366ecd14b18e58 Mon Sep 17 00:00:00 2001 From: Daniel Paoliello Date: Wed, 27 Aug 2025 15:28:32 -0700 Subject: [PATCH 43/76] Allow env vars set in cargo.extraEnv to be resolved by the env! macro --- .../project-model/src/cargo_workspace.rs | 11 +-- .../crates/project-model/src/env.rs | 78 +++++++++++++++---- .../crates/project-model/src/workspace.rs | 41 ++++++---- 3 files changed, 91 insertions(+), 39 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 04fb2275893c7..76ba01f3a2633 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -49,8 +49,9 @@ pub struct CargoWorkspace { is_virtual_workspace: bool, /// Whether this workspace represents the sysroot workspace. is_sysroot: bool, - /// Environment variables set in the `.cargo/config` file. - config_env: Env, + /// Environment variables set in the `.cargo/config` file and the extraEnv + /// configuration option. + env: Env, requires_rustc_private: bool, } @@ -325,7 +326,7 @@ impl CargoWorkspace { pub fn new( mut meta: cargo_metadata::Metadata, ws_manifest_path: ManifestPath, - cargo_config_env: Env, + cargo_env: Env, is_sysroot: bool, ) -> CargoWorkspace { let mut pkg_by_id = FxHashMap::default(); @@ -498,7 +499,7 @@ impl CargoWorkspace { is_virtual_workspace, requires_rustc_private, is_sysroot, - config_env: cargo_config_env, + env: cargo_env, } } @@ -589,7 +590,7 @@ impl CargoWorkspace { } pub fn env(&self) -> &Env { - &self.config_env + &self.env } pub fn is_sysroot(&self) -> bool { diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index d281492fc98c6..ae0458af7aa7b 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -1,6 +1,7 @@ //! Cargo-like environment variables injection. use base_db::Env; use paths::Utf8Path; +use rustc_hash::FxHashMap; use toolchain::Tool; use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile}; @@ -60,8 +61,14 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe env.set("CARGO_CRATE_NAME", cargo_name.replace('-', "_")); } -pub(crate) fn cargo_config_env(manifest: &ManifestPath, config: &Option) -> Env { +pub(crate) fn cargo_config_env( + manifest: &ManifestPath, + config: &Option, + extra_env: &FxHashMap>, +) -> Env { let mut env = Env::default(); + env.extend(extra_env.iter().filter_map(|(k, v)| v.as_ref().map(|v| (k.clone(), v.clone())))); + let Some(serde_json::Value::Object(env_json)) = config.as_ref().and_then(|c| c.get("env")) else { return env; @@ -72,22 +79,34 @@ pub(crate) fn cargo_config_env(manifest: &ManifestPath, config: &Option>::as_ref(manifest.parent()); for (key, entry) in env_json { - let serde_json::Value::Object(entry) = entry else { - continue; - }; - let Some(value) = entry.get("value").and_then(|v| v.as_str()) else { - continue; - }; + let value = match entry { + serde_json::Value::String(s) => s.clone(), + serde_json::Value::Object(entry) => { + // Each entry MUST have a `value` key. + let Some(value) = entry.get("value").and_then(|v| v.as_str()) else { + continue; + }; + // If the entry already exists in the environment AND the `force` key is not set to + // true, then don't overwrite the value. + if extra_env.get(key).is_some_and(Option::is_some) + && !entry.get("force").and_then(|v| v.as_bool()).unwrap_or(false) + { + continue; + } - let value = if entry - .get("relative") - .and_then(|v| v.as_bool()) - .is_some_and(std::convert::identity) - { - base.join(value).to_string() - } else { - value.to_owned() + if entry + .get("relative") + .and_then(|v| v.as_bool()) + .is_some_and(std::convert::identity) + { + base.join(value).to_string() + } else { + value.to_owned() + } + } + _ => continue, }; + env.insert(key, value); } @@ -113,7 +132,19 @@ fn parse_output_cargo_config_env_works() { }, "TEST": { "value": "test" - } + }, + "FORCED": { + "value": "test", + "force": true + }, + "UNFORCED": { + "value": "test", + "force": false + }, + "OVERWRITTEN": { + "value": "test" + }, + "NOT_AN_OBJECT": "value" } } "#; @@ -121,9 +152,22 @@ fn parse_output_cargo_config_env_works() { let cwd = paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap(); let manifest = paths::AbsPathBuf::assert(cwd.join("Cargo.toml")); let manifest = ManifestPath::try_from(manifest).unwrap(); - let env = cargo_config_env(&manifest, &Some(config)); + let extra_env = [ + ("FORCED", Some("ignored")), + ("UNFORCED", Some("newvalue")), + ("OVERWRITTEN", Some("newvalue")), + ("TEST", None), + ] + .iter() + .map(|(k, v)| (k.to_string(), v.map(ToString::to_string))) + .collect(); + let env = cargo_config_env(&manifest, &Some(config), &extra_env); assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str())); assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str())); assert_eq!(env.get("INVALID").as_deref(), Some("../relative")); assert_eq!(env.get("TEST").as_deref(), Some("test")); + assert_eq!(env.get("FORCED").as_deref(), Some("test")); + assert_eq!(env.get("UNFORCED").as_deref(), Some("newvalue")); + assert_eq!(env.get("OVERWRITTEN").as_deref(), Some("newvalue")); + assert_eq!(env.get("NOT_AN_OBJECT").as_deref(), Some("value")); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 22b84791aee9b..b88db419574db 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -387,36 +387,36 @@ impl ProjectWorkspace { progress, ) }); - let cargo_config_extra_env = - s.spawn(move || cargo_config_env(cargo_toml, &config_file)); + let cargo_env = + s.spawn(move || cargo_config_env(cargo_toml, &config_file, &config.extra_env)); thread::Result::Ok(( rustc_cfg.join()?, target_data.join()?, rustc_dir.join()?, loaded_sysroot.join()?, cargo_metadata.join()?, - cargo_config_extra_env.join()?, + cargo_env.join()?, )) }); - let ( - rustc_cfg, - data_layout, - mut rustc, - loaded_sysroot, - cargo_metadata, - cargo_config_extra_env, - ) = match join { - Ok(it) => it, - Err(e) => std::panic::resume_unwind(e), - }; + let (rustc_cfg, data_layout, mut rustc, loaded_sysroot, cargo_metadata, mut cargo_env) = + match join { + Ok(it) => it, + Err(e) => std::panic::resume_unwind(e), + }; + + for (key, value) in config.extra_env.iter() { + if let Some(value) = value { + cargo_env.insert(key.clone(), value.clone()); + } + } let (meta, error) = cargo_metadata.with_context(|| { format!( "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", ) })?; - let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env, false); + let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_env, false); if let Some(loaded_sysroot) = loaded_sysroot { tracing::info!(src_root = ?sysroot.rust_lib_src_root(), root = %loaded_sysroot, "Loaded sysroot"); sysroot.set_workspace(loaded_sysroot); @@ -586,7 +586,8 @@ impl ProjectWorkspace { .unwrap_or_else(|| dir.join("target").into()); let cargo_script = fetch_metadata.exec(&target_dir, false, &|_| ()).ok().map(|(ws, error)| { - let cargo_config_extra_env = cargo_config_env(detached_file, &config_file); + let cargo_config_extra_env = + cargo_config_env(detached_file, &config_file, &config.extra_env); ( CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false), WorkspaceBuildScripts::default(), @@ -1089,7 +1090,13 @@ fn project_json_to_crate_graph( }, file_id, )| { - let env = env.clone().into_iter().collect(); + let mut env = env.clone().into_iter().collect::(); + // Override existing env vars with those from `extra_env` + env.extend( + extra_env + .iter() + .filter_map(|(k, v)| v.as_ref().map(|v| (k.clone(), v.clone()))), + ); let target_cfgs = match target.as_deref() { Some(target) => cfg_cache.entry(target).or_insert_with(|| { From d76463c8573964ffcdff9f47f98bfe0f8254e62b Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Tue, 21 Oct 2025 22:58:31 +0300 Subject: [PATCH 44/76] Clear next-solver cache before reporting memory usage in analysis-stats The cache shouldn't be included, as it is mostly temporary (per-revision). --- .../crates/hir-ty/src/next_solver/interner.rs | 9 +++++++++ src/tools/rust-analyzer/crates/hir/src/lib.rs | 1 + .../crates/rust-analyzer/src/cli/analysis_stats.rs | 2 ++ 3 files changed, 12 insertions(+) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index 42f1d926d7db3..ce8b76837a3c7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -2,6 +2,7 @@ use std::{fmt, ops::ControlFlow}; +pub use tls_cache::clear_tls_solver_cache; pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db}; use base_db::Crate; @@ -2239,4 +2240,12 @@ mod tls_cache { }) }) } + + /// Clears the thread-local trait solver cache. + /// + /// Should be called before getting memory usage estimations, as the solver cache + /// is per-revision and usually should be excluded from estimations. + pub fn clear_tls_solver_cache() { + GLOBAL_CACHE.with_borrow_mut(|handle| *handle = None); + } } diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 48eafb0bd4c60..9418903123179 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -171,6 +171,7 @@ pub use { method_resolution::TyFingerprint, mir::{MirEvalError, MirLowerError}, next_solver::abi::Safety, + next_solver::clear_tls_solver_cache, }, intern::{Symbol, sym}, }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 717bd230a21e9..de24bc09ff0fa 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -345,6 +345,8 @@ impl flags::AnalysisStats { self.run_term_search(&workspace, db, &vfs, &file_ids, verbosity); } + hir::clear_tls_solver_cache(); + let db = host.raw_database_mut(); db.trigger_lru_eviction(); From 385bd28558f87d06a5ae46f8698786e075e4fd21 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Wed, 22 Oct 2025 05:17:27 +0300 Subject: [PATCH 45/76] Fix "cannot insert `true` or `false` to cfg" error in fixtures --- src/tools/rust-analyzer/crates/cfg/src/lib.rs | 7 +++++++ src/tools/rust-analyzer/crates/test-fixture/src/lib.rs | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index 906106ca5db0b..b1ec4c273a854 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -115,6 +115,13 @@ impl CfgOptions { pub fn shrink_to_fit(&mut self) { self.enabled.shrink_to_fit(); } + + pub fn append(&mut self, other: CfgOptions) { + // Do not call `insert_any_atom()`, as it'll check for `true` and `false`, but this is not + // needed since we already checked for that when constructing `other`. Furthermore, this + // will always err, as `other` inevitably contains `true` (just as we do). + self.enabled.extend(other.enabled); + } } impl Extend for CfgOptions { diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index aefe81f83e294..a718b96a82522 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -252,7 +252,7 @@ impl ChangeFixture { assert!(default_crate_root.is_none()); default_crate_root = Some(file_id); default_edition = meta.edition; - default_cfg.extend(meta.cfg.into_iter()); + default_cfg.append(meta.cfg); default_env.extend_from_other(&meta.env); } From 911edbfe81978d99fd16df674821e4aff9b829a0 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Mon, 15 Sep 2025 17:42:55 +0800 Subject: [PATCH 46/76] Add heuristic sensing `is_in_block` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Example --- ```rust fn foo() -> [i32; 2] { l$0 [0, n] } ``` **Before this PR**: ```text loop~ line!(…)~ macro_rules! line ``` **After this PR**: ```text let~ loop~ letm~ line!(…)~ macro_rules! line ``` --- .../ide-completion/src/context/analysis.rs | 26 +++ .../ide-completion/src/tests/expression.rs | 167 ++++++++++++++++++ 2 files changed, 193 insertions(+) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 873eceff5f5fa..f0a03dedfe881 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -1890,11 +1890,37 @@ fn is_in_breakable(node: &SyntaxNode) -> Option<(BreakableKind, SyntaxNode)> { } fn is_in_block(node: &SyntaxNode) -> bool { + if has_in_newline_expr_first(node) { + return true; + }; node.parent() .map(|node| ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())) .unwrap_or(false) } +/// Similar to `has_parens`, heuristic sensing incomplete statement before ambigiguous `Expr` +/// +/// Heuristic: +/// +/// If the `PathExpr` is left part of the `Expr` and there is a newline after the `PathExpr`, +/// it is considered that the `PathExpr` is not part of the `Expr`. +fn has_in_newline_expr_first(node: &SyntaxNode) -> bool { + if ast::PathExpr::can_cast(node.kind()) + && let Some(NodeOrToken::Token(next)) = node.next_sibling_or_token() + && next.kind() == SyntaxKind::WHITESPACE + && next.text().contains('\n') + && let Some(stmt_like) = node + .ancestors() + .take_while(|it| it.text_range().start() == node.text_range().start()) + .filter_map(Either::::cast) + .last() + { + stmt_like.syntax().parent().and_then(ast::StmtList::cast).is_some() + } else { + false + } +} + fn next_non_trivia_token(e: impl Into) -> Option { let mut token = match e.into() { SyntaxElement::Node(n) => n.last_token()?, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs index 5363a68af7237..f75fa7943ba60 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs @@ -2946,6 +2946,173 @@ fn let_in_let_chain() { check_edit("let", r#"fn f() { if true && $0 {} }"#, r#"fn f() { if true && let $1 = $0 {} }"#); } +#[test] +fn let_in_previous_line_of_ambiguous_expr() { + check_edit( + "let", + r#" + fn f() { + $0 + (1, 2).foo(); + }"#, + r#" + fn f() { + let $1 = $0; + (1, 2).foo(); + }"#, + ); + + check_edit( + "let", + r#" + fn f() { + $0 + (1, 2) + }"#, + r#" + fn f() { + let $1 = $0; + (1, 2) + }"#, + ); + + check_edit( + "let", + r#" + fn f() -> i32 { + $0 + -2 + }"#, + r#" + fn f() -> i32 { + let $1 = $0; + -2 + }"#, + ); + + check_edit( + "let", + r#" + fn f() -> [i32; 2] { + $0 + [1, 2] + }"#, + r#" + fn f() -> [i32; 2] { + let $1 = $0; + [1, 2] + }"#, + ); + + check_edit( + "let", + r#" + fn f() -> [u8; 2] { + $0 + *b"01" + }"#, + r#" + fn f() -> [u8; 2] { + let $1 = $0; + *b"01" + }"#, + ); + + check( + r#" + fn foo() { + $0 + *b"01" + }"#, + expect![[r#" + fn foo() fn() + bt u32 u32 + kw async + kw const + kw crate:: + kw enum + kw extern + kw false + kw fn + kw for + kw if + kw if let + kw impl + kw impl for + kw let + kw letm + kw loop + kw match + kw mod + kw return + kw self:: + kw static + kw struct + kw trait + kw true + kw type + kw union + kw unsafe + kw use + kw while + kw while let + sn macro_rules + sn pd + sn ppd + "#]], + ); + + check( + r#" + fn foo() { + match $0 {} + }"#, + expect![[r#" + fn foo() fn() + bt u32 u32 + kw const + kw crate:: + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], + ); + + check( + r#" + fn foo() { + $0 *b"01" + }"#, + expect![[r#" + fn foo() fn() + bt u32 u32 + kw const + kw crate:: + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], + ); +} + #[test] fn private_inherent_and_public_trait() { check( From 6fe555360e25f6e65f979c8c8d56dde47f1c8838 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sat, 11 Oct 2025 13:49:51 +0800 Subject: [PATCH 47/76] Add shorthand field completion for record-expr Example --- ```rust struct Foo { bar: bool, n: i32 } fn baz() { let bar = true; let foo: Foo = Fo$0; } ``` **Before this PR**: ```rust struct Foo { bar: bool, n: i32 } fn baz() { let bar = true; let foo: Foo = Foo { bar: ${1:()}, n: ${2:()} }$0; } ``` **After this PR**: ```rust struct Foo { bar: bool, n: i32 } fn baz() { let bar = true; let foo: Foo = Foo { bar$1, n: ${2:()} }$0; } ``` --- .../ide-completion/src/completions/record.rs | 27 +++++++++++++++++++ .../ide-completion/src/render/variant.rs | 21 ++++++++++----- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs index 28b324d61afa5..bfa567009c015 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs @@ -178,6 +178,33 @@ fn baz() { ) } + #[test] + fn literal_struct_completion_shorthand() { + check_edit( + "FooDesc{}", + r#" +struct FooDesc { pub bar: bool, n: i32 } + +fn create_foo(foo_desc: &FooDesc) -> () { () } + +fn baz() { + let bar = true; + let foo = create_foo(&$0); +} + "#, + r#" +struct FooDesc { pub bar: bool, n: i32 } + +fn create_foo(foo_desc: &FooDesc) -> () { () } + +fn baz() { + let bar = true; + let foo = create_foo(&FooDesc { bar$1, n: ${2:()} }$0); +} + "#, + ) + } + #[test] fn enum_variant_no_snippets() { let conf = CompletionConfig { snippet_cap: SnippetCap::new(false), ..TEST_CONFIG }; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs index 42324b4290a77..37d0fa18c4972 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs @@ -26,14 +26,23 @@ pub(crate) fn render_record_lit( return RenderedLiteral { literal: path.to_owned(), detail: path.to_owned() }; } let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| { + let mut fmt_field = |fill, tab| { + let field_name = field.name(ctx.db); + + if let Some(local) = ctx.locals.get(&field_name) + && local + .ty(ctx.db) + .could_unify_with_deeply(ctx.db, &field.ty(ctx.db).to_type(ctx.db)) + { + f(&format_args!("{}{tab}", field_name.display(ctx.db, ctx.edition))) + } else { + f(&format_args!("{}: {fill}", field_name.display(ctx.db, ctx.edition))) + } + }; if snippet_cap.is_some() { - f(&format_args!( - "{}: ${{{}:()}}", - field.name(ctx.db).display(ctx.db, ctx.edition), - idx + 1 - )) + fmt_field(format_args!("${{{}:()}}", idx + 1), format_args!("${}", idx + 1)) } else { - f(&format_args!("{}: ()", field.name(ctx.db).display(ctx.db, ctx.edition))) + fmt_field(format_args!("()"), format_args!("")) } }); From 70e3d8ca5ca03a0031e4e2f14debd60d5bc8bab9 Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Sat, 30 Aug 2025 08:47:01 +0800 Subject: [PATCH 48/76] Add type keywords completions Example --- ``` kw dyn kw fn kw for kw impl ``` --- .../crates/ide-completion/src/completions.rs | 7 + .../ide-completion/src/completions/type.rs | 1 + .../crates/ide-completion/src/tests/item.rs | 8 + .../ide-completion/src/tests/predicate.rs | 12 ++ .../ide-completion/src/tests/special.rs | 12 ++ .../ide-completion/src/tests/type_pos.rs | 175 ++++++++++++++++++ 6 files changed, 215 insertions(+) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index ed58e862d437f..abae3cb36802f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -111,6 +111,13 @@ impl Completions { } } + pub(crate) fn add_type_keywords(&mut self, ctx: &CompletionContext<'_>) { + self.add_keyword_snippet(ctx, "fn", "fn($1)"); + self.add_keyword_snippet(ctx, "dyn", "dyn $0"); + self.add_keyword_snippet(ctx, "impl", "impl $0"); + self.add_keyword_snippet(ctx, "for", "for<$1>"); + } + pub(crate) fn add_super_keyword( &mut self, ctx: &CompletionContext<'_>, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs index 3112462cda4e8..3465b73321e97 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/type.rs @@ -205,6 +205,7 @@ pub(crate) fn complete_type_path( }; acc.add_nameref_keywords_with_colon(ctx); + acc.add_type_keywords(ctx); ctx.process_all_names(&mut |name, def, doc_aliases| { if scope_def_applicable(def) { acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs index ed87b339fedf3..61a9da8c278a5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs @@ -23,6 +23,10 @@ impl Tra$0 un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ) @@ -45,6 +49,10 @@ impl Trait for Str$0 un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs index 65036f6a22405..682b8904e5501 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs @@ -22,6 +22,10 @@ struct Foo<'lt, T, const C: usize> where $0 {} un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -95,6 +99,10 @@ struct Foo<'lt, T, const C: usize> where for<'a> $0 {} un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -120,6 +128,10 @@ impl Record { un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs index c438ca7880625..59a0c144c8937 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs @@ -1492,6 +1492,10 @@ fn foo(_: a_$0) { } expect![[r#" bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -1506,6 +1510,10 @@ fn foo() { tp T bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -1531,6 +1539,10 @@ fn foo() {} expect![[r#" bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs index 125e11e9e3589..3bbba18c2b9fd 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs @@ -25,6 +25,10 @@ struct Foo<'lt, T, const C: usize> { un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ) @@ -50,6 +54,10 @@ struct Foo<'lt, T, const C: usize>(f$0); un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw pub kw pub(crate) kw pub(super) @@ -76,6 +84,37 @@ fn x<'lt, T, const C: usize>() -> $0 un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl + kw self:: + "#]], + ); +} + +#[test] +fn fn_return_type_after_reference() { + check_with_base_items( + r#" +fn x<'lt, T, const C: usize>(_: &()) -> &$0 +"#, + expect![[r#" + en Enum Enum + ma makro!(…) macro_rules! makro + md module + st Record Record + st Tuple Tuple + st Unit Unit + tt Trait + tp T + un Union Union + bt u32 u32 + kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -106,6 +145,10 @@ fn foo() -> B$0 { bt u32 u32 it () kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ) @@ -131,6 +174,10 @@ const FOO: $0 = Foo(2); bt u32 u32 it Foo kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -157,6 +204,10 @@ fn f2() { bt u32 u32 it i32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -185,6 +236,10 @@ fn f2() { bt u32 u32 it u64 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -210,6 +265,10 @@ fn f2(x: u64) -> $0 { bt u32 u32 it u64 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -236,6 +295,10 @@ fn f2(x: $0) { bt u32 u32 it i32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -270,6 +333,10 @@ fn foo<'lt, T, const C: usize>() { bt u32 u32 it a::Foo> kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -299,6 +366,10 @@ fn foo<'lt, T, const C: usize>() { bt u32 u32 it Foo kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -325,6 +396,10 @@ fn foo<'lt, T, const C: usize>() { un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -392,6 +467,10 @@ fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {} un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -442,6 +521,10 @@ impl Tr<$0 un Union Union bt u32 u32 kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -487,6 +570,10 @@ fn f(t: impl MyTrait() { S::; } ct CONST Unit ma makro!(…) macro_rules! makro kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -968,6 +1135,10 @@ fn foo<'a>() { S::<'static, 'static, F$0, _>; } ct CONST Unit ma makro!(…) macro_rules! makro kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); @@ -980,6 +1151,10 @@ fn foo<'a>() { S::<'static, F$0, _, _>; } lt 'a ma makro!(…) macro_rules! makro kw crate:: + kw dyn + kw fn + kw for + kw impl kw self:: "#]], ); From 922aad6b6d22ced38024fc5d2506f2b3884f643f Mon Sep 17 00:00:00 2001 From: A4-Tacks Date: Wed, 22 Oct 2025 14:37:12 +0800 Subject: [PATCH 49/76] Improve parsing of missing name in MethodCallExpr Usually, this occurs when preparing to input a method name However, once an identifier is entered, it is not reasonable for the parsing result to change from `CallExpr(FieldExpr())` to `MethodCallExpr()` Example --- ```rust fn foo() { x. () } ``` **Before this PR**: ```text SOURCE_FILE FN FN_KW "fn" WHITESPACE " " NAME IDENT "foo" PARAM_LIST L_PAREN "(" R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST L_CURLY "{" WHITESPACE "\n " CALL_EXPR FIELD_EXPR PATH_EXPR PATH PATH_SEGMENT NAME_REF IDENT "x" DOT "." WHITESPACE "\n " ARG_LIST L_PAREN "(" R_PAREN ")" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" error 17: expected field name or number ``` **After this PR**: ```text SOURCE_FILE FN FN_KW "fn" WHITESPACE " " NAME IDENT "foo" PARAM_LIST L_PAREN "(" R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST L_CURLY "{" WHITESPACE "\n " METHOD_CALL_EXPR PATH_EXPR PATH PATH_SEGMENT NAME_REF IDENT "x" DOT "." WHITESPACE "\n " ARG_LIST L_PAREN "(" R_PAREN ")" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" error 17: expected method name, field name or number ``` --- .../crates/parser/src/grammar/expressions.rs | 23 +++++++++++---- .../parser/test_data/generated/runner.rs | 4 +++ .../err/postfix_dot_expr_ambiguity.rast | 29 +++++++++++++++++++ .../inline/err/postfix_dot_expr_ambiguity.rs | 4 +++ 4 files changed, 55 insertions(+), 5 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rast create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rs diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs index 41fd72d8d5a2f..76d26c1ecdfc2 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs @@ -430,6 +430,11 @@ fn postfix_expr( // } T!['('] if allow_calls => call_expr(p, lhs), T!['['] if allow_calls => index_expr(p, lhs), + // test_err postfix_dot_expr_ambiguity + // fn foo() { + // x. + // () + // } T![.] => match postfix_dot_expr::(p, lhs) { Ok(it) => it, Err(it) => { @@ -458,6 +463,7 @@ fn postfix_dot_expr( if PATH_NAME_REF_KINDS.contains(p.nth(nth1)) && (p.nth(nth2) == T!['('] || p.nth_at(nth2, T![::])) + || p.nth(nth1) == T!['('] { return Ok(method_call_expr::(p, lhs)); } @@ -526,19 +532,26 @@ fn method_call_expr( lhs: CompletedMarker, ) -> CompletedMarker { if FLOAT_RECOVERY { - assert!(p.at_ts(PATH_NAME_REF_KINDS) && (p.nth(1) == T!['('] || p.nth_at(1, T![::]))); + assert!( + p.at_ts(PATH_NAME_REF_KINDS) && (p.nth(1) == T!['('] || p.nth_at(1, T![::])) + || p.current() == T!['('] + ); } else { + assert!(p.at(T![.])); assert!( - p.at(T![.]) - && PATH_NAME_REF_KINDS.contains(p.nth(1)) - && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) + PATH_NAME_REF_KINDS.contains(p.nth(1)) && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) + || p.nth(1) == T!['('] ); } let m = lhs.precede(p); if !FLOAT_RECOVERY { p.bump(T![.]); } - name_ref_mod_path(p); + if p.at_ts(PATH_NAME_REF_KINDS) { + name_ref_mod_path(p); + } else { + p.error("expected method name, field name or number"); + } generic_args::opt_generic_arg_list_expr(p); if p.at(T!['(']) { arg_list(p); diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index cd6d433d0efa7..9bdbe56330338 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -844,6 +844,10 @@ mod err { run_and_expect_errors("test_data/parser/inline/err/pointer_type_no_mutability.rs"); } #[test] + fn postfix_dot_expr_ambiguity() { + run_and_expect_errors("test_data/parser/inline/err/postfix_dot_expr_ambiguity.rs"); + } + #[test] fn precise_capturing_invalid() { run_and_expect_errors("test_data/parser/inline/err/precise_capturing_invalid.rs"); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rast new file mode 100644 index 0000000000000..4ee318de25158 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rast @@ -0,0 +1,29 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + METHOD_CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + WHITESPACE "\n " + ARG_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 17: expected method name, field name or number diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rs new file mode 100644 index 0000000000000..c1aed30342883 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/postfix_dot_expr_ambiguity.rs @@ -0,0 +1,4 @@ +fn foo() { + x. + () +} From 7773fe0fb8808f81a6e982c587e9e16a1648cb2e Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sun, 27 Jul 2025 20:17:10 +0300 Subject: [PATCH 50/76] Rewrite attribute handling Basically, we switch to expanding cfg_attr in AST form, filter irrelevant attributes from the item tree, and move hir-def attributes (non-item-tree) to be flag-based. The main motivation is memory usage, although this also simplifies the code, and fixes some bugs around handling of `cfg_attr`s. --- src/tools/rust-analyzer/Cargo.lock | 6 +- src/tools/rust-analyzer/Cargo.toml | 5 +- .../crates/base-db/src/editioned_file_id.rs | 291 +++ .../rust-analyzer/crates/base-db/src/input.rs | 7 +- .../rust-analyzer/crates/base-db/src/lib.rs | 39 +- src/tools/rust-analyzer/crates/cfg/Cargo.toml | 1 + .../rust-analyzer/crates/cfg/src/cfg_expr.rs | 59 + .../rust-analyzer/crates/cfg/src/tests.rs | 42 +- .../rust-analyzer/crates/hir-def/Cargo.toml | 4 +- .../rust-analyzer/crates/hir-def/src/attr.rs | 901 --------- .../rust-analyzer/crates/hir-def/src/attrs.rs | 1613 +++++++++++++++++ .../rust-analyzer/crates/hir-def/src/db.rs | 71 +- .../crates/hir-def/src/expr_store/expander.rs | 14 +- .../crates/hir-def/src/expr_store/lower.rs | 22 +- .../crates/hir-def/src/expr_store/pretty.rs | 19 +- .../src/expr_store/tests/body/block.rs | 4 +- .../src/expr_store/tests/signatures.rs | 14 +- .../crates/hir-def/src/import_map.rs | 34 +- .../crates/hir-def/src/item_tree.rs | 40 +- .../crates/hir-def/src/item_tree/attrs.rs | 220 +++ .../crates/hir-def/src/item_tree/lower.rs | 35 +- .../crates/hir-def/src/item_tree/pretty.rs | 12 +- .../crates/hir-def/src/item_tree/tests.rs | 9 +- .../crates/hir-def/src/lang_item.rs | 7 +- .../rust-analyzer/crates/hir-def/src/lib.rs | 94 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 43 +- .../hir-def/src/macro_expansion_tests/mod.rs | 15 + .../src/macro_expansion_tests/proc_macros.rs | 96 +- .../crates/hir-def/src/nameres.rs | 15 +- .../crates/hir-def/src/nameres/assoc.rs | 39 +- .../hir-def/src/nameres/attr_resolution.rs | 10 +- .../crates/hir-def/src/nameres/collector.rs | 179 +- .../crates/hir-def/src/nameres/diagnostics.rs | 14 +- .../hir-def/src/nameres/mod_resolution.rs | 5 +- .../crates/hir-def/src/nameres/proc_macro.rs | 24 +- .../crates/hir-def/src/signatures.rs | 130 +- .../rust-analyzer/crates/hir-def/src/src.rs | 9 +- .../crates/hir-def/src/test_db.rs | 33 +- .../crates/hir-expand/Cargo.toml | 2 + .../crates/hir-expand/src/attrs.rs | 808 +++++---- .../crates/hir-expand/src/builtin/fn_macro.rs | 2 +- .../crates/hir-expand/src/cfg_process.rs | 638 ++++--- .../rust-analyzer/crates/hir-expand/src/db.rs | 181 +- .../crates/hir-expand/src/declarative.rs | 58 +- .../crates/hir-expand/src/files.rs | 33 +- .../crates/hir-expand/src/fixup.rs | 5 +- .../crates/hir-expand/src/lib.rs | 165 +- .../crates/hir-expand/src/mod_path.rs | 59 +- .../crates/hir-expand/src/span_map.rs | 13 +- .../crates/hir-ty/src/consteval.rs | 3 +- .../hir-ty/src/diagnostics/decl_check.rs | 6 +- .../diagnostics/match_check/pat_analysis.rs | 6 +- .../hir-ty/src/diagnostics/unsafe_check.rs | 4 +- .../rust-analyzer/crates/hir-ty/src/infer.rs | 14 +- .../crates/hir-ty/src/infer/coerce.rs | 14 +- .../crates/hir-ty/src/infer/expr.rs | 14 +- .../rust-analyzer/crates/hir-ty/src/layout.rs | 4 +- .../crates/hir-ty/src/layout/adt.rs | 35 +- .../crates/hir-ty/src/method_resolution.rs | 5 +- .../crates/hir-ty/src/mir/eval/shim.rs | 45 +- .../crates/hir-ty/src/next_solver/interner.rs | 59 +- .../crates/hir-ty/src/target_feature.rs | 46 +- .../crates/hir-ty/src/tests/incremental.rs | 49 +- .../rust-analyzer/crates/hir-ty/src/utils.rs | 8 +- .../rust-analyzer/crates/hir/src/attrs.rs | 256 ++- .../crates/hir/src/diagnostics.rs | 13 +- src/tools/rust-analyzer/crates/hir/src/lib.rs | 267 +-- .../rust-analyzer/crates/hir/src/semantics.rs | 71 +- .../hir/src/semantics/child_by_source.rs | 13 +- .../rust-analyzer/crates/hir/src/symbols.rs | 8 +- .../src/handlers/add_missing_match_arms.rs | 6 +- .../handlers/destructure_struct_binding.rs | 4 +- .../src/handlers/move_module_to_file.rs | 10 +- .../crates/ide-assists/src/lib.rs | 4 +- .../crates/ide-assists/src/tests.rs | 4 +- .../crates/ide-assists/src/utils.rs | 13 +- .../src/completions/attribute/lint.rs | 2 +- .../src/completions/flyimport.rs | 4 +- .../ide-completion/src/completions/postfix.rs | 2 +- .../ide-completion/src/completions/snippet.rs | 2 +- .../crates/ide-completion/src/context.rs | 22 +- .../crates/ide-completion/src/item.rs | 12 +- .../crates/ide-completion/src/render.rs | 13 +- .../ide-completion/src/render/literal.rs | 2 +- .../ide-completion/src/render/pattern.rs | 2 +- .../ide-completion/src/render/variant.rs | 6 +- .../crates/ide-completion/src/tests.rs | 4 +- .../rust-analyzer/crates/ide-db/src/defs.rs | 38 +- .../crates/ide-db/src/documentation.rs | 351 +--- .../crates/ide-db/src/ra_fixture.rs | 12 +- .../crates/ide-db/src/rust_doc.rs | 2 +- .../rust-analyzer/crates/ide-db/src/search.rs | 16 +- .../ide-db/src/test_data/test_doc_alias.txt | 30 +- .../test_symbol_index_collection.txt | 134 +- .../test_symbols_exclude_imports.txt | 2 +- .../test_data/test_symbols_with_imports.txt | 4 +- .../rust-analyzer/crates/ide-db/src/traits.rs | 6 +- .../src/handlers/inactive_code.rs | 3 +- .../src/handlers/invalid_derive_target.rs | 4 +- .../src/handlers/macro_error.rs | 22 +- .../src/handlers/malformed_derive.rs | 4 +- .../src/handlers/unresolved_macro_call.rs | 5 +- .../crates/ide-diagnostics/src/lib.rs | 43 +- .../crates/ide-ssr/src/from_comment.rs | 2 +- .../rust-analyzer/crates/ide-ssr/src/lib.rs | 6 +- .../crates/ide-ssr/src/search.rs | 8 +- .../rust-analyzer/crates/ide/src/doc_links.rs | 38 +- .../crates/ide/src/doc_links/tests.rs | 73 +- .../rust-analyzer/crates/ide/src/fixture.rs | 32 +- .../crates/ide/src/goto_implementation.rs | 2 +- .../crates/ide/src/highlight_related.rs | 2 +- .../crates/ide/src/hover/render.rs | 52 +- .../crates/ide/src/inlay_hints.rs | 4 +- src/tools/rust-analyzer/crates/ide/src/lib.rs | 13 +- .../crates/ide/src/navigation_target.rs | 38 +- .../crates/ide/src/references.rs | 5 +- .../rust-analyzer/crates/ide/src/runnables.rs | 50 +- .../crates/ide/src/signature_help.rs | 36 +- .../crates/ide/src/static_index.rs | 6 +- .../crates/ide/src/syntax_highlighting.rs | 2 +- .../ide/src/syntax_highlighting/html.rs | 2 +- .../ide/src/syntax_highlighting/inject.rs | 189 +- .../test_data/highlight_doctest.html | 72 +- .../rust-analyzer/crates/ide/src/typing.rs | 5 +- .../crates/ide/src/typing/on_enter.rs | 2 +- .../crates/ide/src/view_item_tree.rs | 2 +- .../rust-analyzer/src/cli/analysis_stats.rs | 8 +- .../crates/rust-analyzer/src/cli/scip.rs | 6 +- .../crates/rust-analyzer/src/cli/ssr.rs | 2 +- .../src/cli/unresolved_references.rs | 2 +- .../crates/rust-analyzer/src/lsp/to_proto.rs | 4 +- .../crates/syntax-bridge/src/lib.rs | 68 +- .../rust-analyzer/crates/syntax/src/ast.rs | 4 +- .../crates/syntax/src/ast/node_ext.rs | 38 +- .../crates/syntax/src/ast/token_ext.rs | 6 +- .../crates/syntax/src/ast/traits.rs | 67 +- .../crates/test-fixture/src/lib.rs | 45 +- 137 files changed, 4953 insertions(+), 3805 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs delete mode 100644 src/tools/rust-analyzer/crates/hir-def/src/attr.rs create mode 100644 src/tools/rust-analyzer/crates/hir-def/src/attrs.rs create mode 100644 src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index ea8d1a781dccb..d31d233dc4b69 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -725,6 +725,7 @@ dependencies = [ name = "hir-expand" version = "0.0.0" dependencies = [ + "arrayvec", "base-db", "cfg", "cov-mark", @@ -743,6 +744,7 @@ dependencies = [ "stdx", "syntax", "syntax-bridge", + "thin-vec", "tracing", "triomphe", "tt", @@ -1991,9 +1993,9 @@ dependencies = [ [[package]] name = "rowan" -version = "0.15.15" +version = "0.15.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49" +checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b" dependencies = [ "countme", "hashbrown 0.14.5", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 8a108974681a1..767dbcae90314 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -52,7 +52,7 @@ debug = 2 # local crates macros = { path = "./crates/macros", version = "0.0.0" } base-db = { path = "./crates/base-db", version = "0.0.0" } -cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] } +cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt", "syntax"] } hir = { path = "./crates/hir", version = "0.0.0" } hir-def = { path = "./crates/hir-def", version = "0.0.0" } hir-expand = { path = "./crates/hir-expand", version = "0.0.0" } @@ -131,7 +131,7 @@ process-wrap = { version = "8.2.1", features = ["std"] } pulldown-cmark-to-cmark = "10.0.4" pulldown-cmark = { version = "0.9.6", default-features = false } rayon = "1.10.0" -rowan = "=0.15.15" +rowan = "=0.15.17" # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work # on impls without it salsa = { version = "0.24.0", default-features = true, features = [ @@ -167,6 +167,7 @@ tracing-subscriber = { version = "0.3.20", default-features = false, features = triomphe = { version = "0.1.14", default-features = false, features = ["std"] } url = "2.5.4" xshell = "0.2.7" +thin-vec = "0.2.14" petgraph = { version = "0.8.2", default-features = false } # We need to freeze the version of the crate, as the raw-api feature is considered unstable diff --git a/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs b/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs new file mode 100644 index 0000000000000..2f8969c0ea339 --- /dev/null +++ b/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs @@ -0,0 +1,291 @@ +//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that +//! is interned (so queries can take it) and remembers its crate. + +use core::fmt; +use std::hash::{Hash, Hasher}; + +use span::Edition; +use vfs::FileId; + +use crate::{Crate, RootQueryDb}; + +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct EditionedFileId( + salsa::Id, + std::marker::PhantomData<&'static salsa::plumbing::interned::Value>, +); + +const _: () = { + use salsa::plumbing as zalsa_; + use zalsa_::interned as zalsa_struct_; + type Configuration_ = EditionedFileId; + + #[derive(Debug, Clone, PartialEq, Eq)] + pub struct EditionedFileIdData { + editioned_file_id: span::EditionedFileId, + krate: Crate, + } + + /// We like to include the origin crate in an `EditionedFileId` (for use in the item tree), + /// but this poses us a problem. + /// + /// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too + /// because that will increase their size, which will increase memory usage significantly. + /// Furthermore, things using spans do not generally need the crate: they are using the + /// file id for queries like `ast_id_map` or `parse`, which do not care about the crate. + /// + /// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare + /// the crate in equality check. This preserves the invariant of `Hash` and `Eq` - + /// although same hashes can be used for different items, same file ids used for multiple + /// crates is a rare thing, and different items always have different hashes. Then, + /// when we only have a `span::EditionedFileId`, we use the `intern()` method to + /// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`]. + /// + /// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401 + /// + /// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin + #[derive(Hash, PartialEq, Eq)] + struct WithoutCrate { + editioned_file_id: span::EditionedFileId, + } + + impl Hash for EditionedFileIdData { + #[inline] + fn hash(&self, state: &mut H) { + let EditionedFileIdData { editioned_file_id, krate: _ } = *self; + editioned_file_id.hash(state); + } + } + + impl zalsa_struct_::HashEqLike for EditionedFileIdData { + #[inline] + fn hash(&self, state: &mut H) { + Hash::hash(self, state); + } + + #[inline] + fn eq(&self, data: &WithoutCrate) -> bool { + let EditionedFileIdData { editioned_file_id, krate: _ } = *self; + editioned_file_id == data.editioned_file_id + } + } + + impl zalsa_::HasJar for EditionedFileId { + type Jar = zalsa_struct_::JarImpl; + const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct; + } + + zalsa_::register_jar! { + zalsa_::ErasedJar::erase::() + } + + impl zalsa_struct_::Configuration for EditionedFileId { + const LOCATION: salsa::plumbing::Location = + salsa::plumbing::Location { file: file!(), line: line!() }; + const DEBUG_NAME: &'static str = "EditionedFileId"; + const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX; + const PERSIST: bool = false; + + type Fields<'a> = EditionedFileIdData; + type Struct<'db> = EditionedFileId; + + fn serialize(_: &Self::Fields<'_>, _: S) -> Result + where + S: zalsa_::serde::Serializer, + { + unimplemented!("attempted to serialize value that set `PERSIST` to false") + } + + fn deserialize<'de, D>(_: D) -> Result, D::Error> + where + D: zalsa_::serde::Deserializer<'de>, + { + unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false"); + } + } + + impl Configuration_ { + pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl { + static CACHE: zalsa_::IngredientCache> = + zalsa_::IngredientCache::new(); + + // SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only + // ingredient created by our jar is the struct ingredient. + unsafe { + CACHE.get_or_create(zalsa, || { + zalsa.lookup_jar_by_type::>() + }) + } + } + } + + impl zalsa_::AsId for EditionedFileId { + fn as_id(&self) -> salsa::Id { + self.0.as_id() + } + } + impl zalsa_::FromId for EditionedFileId { + fn from_id(id: salsa::Id) -> Self { + Self(::from_id(id), std::marker::PhantomData) + } + } + + unsafe impl Send for EditionedFileId {} + unsafe impl Sync for EditionedFileId {} + + impl std::fmt::Debug for EditionedFileId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Self::default_debug_fmt(*self, f) + } + } + + impl zalsa_::SalsaStructInDb for EditionedFileId { + type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex; + + fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices { + aux.lookup_jar_by_type::>().into() + } + + fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator + '_ { + let _ingredient_index = + zalsa.lookup_jar_by_type::>(); + ::ingredient(zalsa).entries(zalsa).map(|entry| entry.key()) + } + + #[inline] + fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option { + if type_id == std::any::TypeId::of::() { + Some(::from_id(id)) + } else { + None + } + } + + #[inline] + unsafe fn memo_table( + zalsa: &zalsa_::Zalsa, + id: zalsa_::Id, + current_revision: zalsa_::Revision, + ) -> zalsa_::MemoTableWithTypes<'_> { + // SAFETY: Guaranteed by caller. + unsafe { + zalsa.table().memos::>(id, current_revision) + } + } + } + + unsafe impl zalsa_::Update for EditionedFileId { + unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { + if unsafe { *old_pointer } != new_value { + unsafe { *old_pointer = new_value }; + true + } else { + false + } + } + } + + impl EditionedFileId { + pub fn from_span( + db: &(impl salsa::Database + ?Sized), + editioned_file_id: span::EditionedFileId, + krate: Crate, + ) -> Self { + let (zalsa, zalsa_local) = db.zalsas(); + Configuration_::ingredient(zalsa).intern( + zalsa, + zalsa_local, + EditionedFileIdData { editioned_file_id, krate }, + |_, data| data, + ) + } + + /// Guesses the crate for the file. + /// + /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases: + /// + /// 1. The file is not in the module tree. + /// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin + /// (e.g. on enter feature, folding, etc.). + pub fn from_span_guess_origin( + db: &dyn RootQueryDb, + editioned_file_id: span::EditionedFileId, + ) -> Self { + let (zalsa, zalsa_local) = db.zalsas(); + Configuration_::ingredient(zalsa).intern( + zalsa, + zalsa_local, + WithoutCrate { editioned_file_id }, + |_, _| { + // FileId not in the database. + let krate = db + .relevant_crates(editioned_file_id.file_id()) + .first() + .copied() + .unwrap_or_else(|| db.all_crates()[0]); + EditionedFileIdData { editioned_file_id, krate } + }, + ) + } + + pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId { + let zalsa = db.zalsa(); + let fields = Configuration_::ingredient(zalsa).fields(zalsa, self); + fields.editioned_file_id + } + + pub fn krate(self, db: &dyn salsa::Database) -> Crate { + let zalsa = db.zalsa(); + let fields = Configuration_::ingredient(zalsa).fields(zalsa, self); + fields.krate + } + + /// Default debug formatting for this struct (may be useful if you define your own `Debug` impl) + pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + zalsa_::with_attached_database(|db| { + let zalsa = db.zalsa(); + let fields = Configuration_::ingredient(zalsa).fields(zalsa, this); + fmt::Debug::fmt(fields, f) + }) + .unwrap_or_else(|| { + f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish() + }) + } + } +}; + +impl EditionedFileId { + #[inline] + pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self { + EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate) + } + + /// Attaches the current edition and guesses the crate for the file. + /// + /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases: + /// + /// 1. The file is not in the module tree. + /// 2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin + /// (e.g. on enter feature, folding, etc.). + #[inline] + pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self { + Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id)) + } + + #[inline] + pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId { + let id = self.editioned_file_id(db); + id.file_id() + } + + #[inline] + pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) { + let id = self.editioned_file_id(db); + (id.file_id(), id.edition()) + } + + #[inline] + pub fn edition(self, db: &dyn salsa::Database) -> Edition { + self.editioned_file_id(db).edition() + } +} diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index cac74778a26b0..28539d59825f1 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -829,9 +829,10 @@ pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet< rev_deps } -impl BuiltCrateData { - pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId { - EditionedFileId::new(db, self.root_file_id, self.edition) +impl Crate { + pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId { + let data = self.data(db); + EditionedFileId::new(db, data.root_file_id, data.edition, self) } } diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index 0e411bcfae60e..32909af5d78d5 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -5,6 +5,7 @@ pub use salsa_macros; // FIXME: Rename this crate, base db is non descriptive mod change; +mod editioned_file_id; mod input; pub mod target; @@ -17,6 +18,7 @@ use std::{ pub use crate::{ change::FileChange, + editioned_file_id::EditionedFileId, input::{ BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap, @@ -29,7 +31,6 @@ pub use query_group::{self}; use rustc_hash::{FxHashSet, FxHasher}; use salsa::{Durability, Setter}; pub use semver::{BuildMetadata, Prerelease, Version, VersionReq}; -use span::Edition; use syntax::{Parse, SyntaxError, ast}; use triomphe::Arc; pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet}; @@ -175,42 +176,6 @@ impl Files { } } -#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)] -#[derive(PartialOrd, Ord)] -pub struct EditionedFileId { - pub editioned_file_id: span::EditionedFileId, -} - -impl EditionedFileId { - // Salsa already uses the name `new`... - #[inline] - pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self { - EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition)) - } - - #[inline] - pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self { - EditionedFileId::new(db, file_id, Edition::CURRENT) - } - - #[inline] - pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId { - let id = self.editioned_file_id(db); - id.file_id() - } - - #[inline] - pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) { - let id = self.editioned_file_id(db); - (id.file_id(), id.edition()) - } - - #[inline] - pub fn edition(self, db: &dyn SourceDatabase) -> Edition { - self.editioned_file_id(db).edition() - } -} - #[salsa_macros::input(debug)] pub struct FileText { #[returns(ref)] diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index e17969bd82d41..9e2a95dbf32c0 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -18,6 +18,7 @@ tracing.workspace = true # locals deps tt = { workspace = true, optional = true } +syntax = { workspace = true, optional = true } intern.workspace = true [dev-dependencies] diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs index 7a21015e14bec..76e0aba859e68 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs @@ -63,6 +63,8 @@ impl From for CfgExpr { } impl CfgExpr { + // FIXME: Parsing from `tt` is only used in a handful of places, reconsider + // if we should switch them to AST. #[cfg(feature = "tt")] pub fn parse(tt: &tt::TopSubtree) -> CfgExpr { next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid) @@ -73,6 +75,13 @@ impl CfgExpr { next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid) } + #[cfg(feature = "syntax")] + pub fn parse_from_ast( + ast: &mut std::iter::Peekable, + ) -> CfgExpr { + next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid) + } + /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option { match self { @@ -89,6 +98,56 @@ impl CfgExpr { } } +#[cfg(feature = "syntax")] +fn next_cfg_expr_from_ast( + it: &mut std::iter::Peekable, +) -> Option { + use intern::sym; + use syntax::{NodeOrToken, SyntaxKind, T, ast}; + + let name = match it.next() { + None => return None, + Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => { + Symbol::intern(ident.text()) + } + Some(_) => return Some(CfgExpr::Invalid), + }; + + let ret = match it.peek() { + Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => { + it.next(); + if let Some(NodeOrToken::Token(literal)) = it.peek() + && matches!(literal.kind(), SyntaxKind::STRING) + { + let literal = tt::token_to_literal(literal.text(), ()).symbol; + it.next(); + CfgAtom::KeyValue { key: name, value: literal.clone() }.into() + } else { + return Some(CfgExpr::Invalid); + } + } + Some(NodeOrToken::Node(subtree)) => { + let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable(); + it.next(); + let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter)); + match name { + s if s == sym::all => CfgExpr::All(subs.collect()), + s if s == sym::any => CfgExpr::Any(subs.collect()), + s if s == sym::not => { + CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid))) + } + _ => CfgExpr::Invalid, + } + } + _ => CfgAtom::Flag(name).into(), + }; + + // Eat comma separator + while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {} + + Some(ret) +} + #[cfg(feature = "tt")] fn next_cfg_expr(it: &mut tt::iter::TtIter<'_, S>) -> Option { use intern::sym; diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs index 6766748097f00..52c581dbbd3ae 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs @@ -1,7 +1,10 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{Expect, expect}; use intern::Symbol; -use syntax::{AstNode, Edition, ast}; +use syntax::{ + AstNode, Edition, + ast::{self, TokenTreeChildren}, +}; use syntax_bridge::{ DocCommentDesugarMode, dummy_test_span_utils::{DUMMY, DummyTestSpanMap}, @@ -10,24 +13,33 @@ use syntax_bridge::{ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; +#[track_caller] +fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr { + CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable()) +} + +#[track_caller] fn assert_parse_result(input: &str, expected: CfgExpr) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt.syntax(), + tt_ast.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, ); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); + let cfg = parse_ast_cfg(&tt_ast); + assert_eq!(cfg, expected); } +#[track_caller] fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt.syntax(), + tt_ast.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, @@ -35,13 +47,17 @@ fn check_dnf(input: &str, expect: Expect) { let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); expect.assert_eq(&actual); + let cfg = parse_ast_cfg(&tt_ast); + let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); + expect.assert_eq(&actual); } +#[track_caller] fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt.syntax(), + tt_ast.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, @@ -50,14 +66,18 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let dnf = DnfExpr::new(&cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); expect.assert_eq(&why_inactive); + let cfg = parse_ast_cfg(&tt_ast); + let dnf = DnfExpr::new(&cfg); + let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); + expect.assert_eq(&why_inactive); } #[track_caller] fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( - tt.syntax(), + tt_ast.syntax(), DummyTestSpanMap, DUMMY, DocCommentDesugarMode::ProcMacro, @@ -66,6 +86,10 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let dnf = DnfExpr::new(&cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); assert_eq!(hints, expected_hints); + let cfg = parse_ast_cfg(&tt_ast); + let dnf = DnfExpr::new(&cfg); + let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); + assert_eq!(hints, expected_hints); } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index abb4819a7672a..e1f60742d3249 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -45,7 +45,8 @@ mbe.workspace = true cfg.workspace = true tt.workspace = true span.workspace = true -thin-vec = "0.2.14" +thin-vec.workspace = true +syntax-bridge.workspace = true [dev-dependencies] expect-test.workspace = true @@ -53,7 +54,6 @@ expect-test.workspace = true # local deps test-utils.workspace = true test-fixture.workspace = true -syntax-bridge.workspace = true [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs deleted file mode 100644 index b4fcfa11aea74..0000000000000 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ /dev/null @@ -1,901 +0,0 @@ -//! A higher level attributes based on TokenTree, with also some shortcuts. - -use std::{borrow::Cow, convert::identity, hash::Hash, ops}; - -use base_db::Crate; -use cfg::{CfgExpr, CfgOptions}; -use either::Either; -use hir_expand::{ - HirFileId, InFile, - attrs::{Attr, AttrId, RawAttrs, collect_attrs}, - span_map::SpanMapRef, -}; -use intern::{Symbol, sym}; -use la_arena::{ArenaMap, Idx, RawIdx}; -use mbe::DelimiterKind; -use rustc_abi::ReprOptions; -use span::AstIdNode; -use syntax::{ - AstPtr, - ast::{self, HasAttrs}, -}; -use triomphe::Arc; -use tt::iter::{TtElement, TtIter}; - -use crate::{ - AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId, - VariantId, - db::DefDatabase, - item_tree::block_item_tree_query, - lang_item::LangItem, - nameres::{ModuleOrigin, ModuleSource}, - src::{HasChildSource, HasSource}, -}; - -/// Desugared attributes of an item post `cfg_attr` expansion. -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct Attrs(RawAttrs); - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct AttrsWithOwner { - attrs: Attrs, - owner: AttrDefId, -} - -impl Attrs { - pub fn new( - db: &dyn DefDatabase, - owner: &dyn ast::HasAttrs, - span_map: SpanMapRef<'_>, - cfg_options: &CfgOptions, - ) -> Self { - Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options)) - } - - pub fn get(&self, id: AttrId) -> Option<&Attr> { - (**self).iter().find(|attr| attr.id == id) - } - - pub(crate) fn expand_cfg_attr( - db: &dyn DefDatabase, - krate: Crate, - raw_attrs: RawAttrs, - ) -> Attrs { - Attrs(raw_attrs.expand_cfg_attr(db, krate)) - } - - pub(crate) fn is_cfg_enabled_for( - db: &dyn DefDatabase, - owner: &dyn ast::HasAttrs, - span_map: SpanMapRef<'_>, - cfg_options: &CfgOptions, - ) -> Result<(), CfgExpr> { - RawAttrs::attrs_iter_expanded::(db, owner, span_map, cfg_options) - .filter_map(|attr| attr.cfg()) - .find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) { - true => None, - false => Some(cfg), - }) - .map_or(Ok(()), Err) - } -} - -impl ops::Deref for Attrs { - type Target = [Attr]; - - fn deref(&self) -> &[Attr] { - &self.0 - } -} - -impl ops::Deref for AttrsWithOwner { - type Target = Attrs; - - fn deref(&self) -> &Attrs { - &self.attrs - } -} - -impl Attrs { - pub const EMPTY: Self = Self(RawAttrs::EMPTY); - - pub(crate) fn fields_attrs_query( - db: &dyn DefDatabase, - v: VariantId, - ) -> Arc> { - let _p = tracing::info_span!("fields_attrs_query").entered(); - let mut res = ArenaMap::default(); - let (fields, file_id, krate) = match v { - VariantId::EnumVariantId(it) => { - let loc = it.lookup(db); - let krate = loc.parent.lookup(db).container.krate; - let source = loc.source(db); - (source.value.field_list(), source.file_id, krate) - } - VariantId::StructId(it) => { - let loc = it.lookup(db); - let krate = loc.container.krate; - let source = loc.source(db); - (source.value.field_list(), source.file_id, krate) - } - VariantId::UnionId(it) => { - let loc = it.lookup(db); - let krate = loc.container.krate; - let source = loc.source(db); - ( - source.value.record_field_list().map(ast::FieldList::RecordFieldList), - source.file_id, - krate, - ) - } - }; - let Some(fields) = fields else { - return Arc::new(res); - }; - - let cfg_options = krate.cfg_options(db); - let span_map = db.span_map(file_id); - - match fields { - ast::FieldList::RecordFieldList(fields) => { - let mut idx = 0; - for field in fields.fields() { - let attrs = - Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options)); - if attrs.is_cfg_enabled(cfg_options).is_ok() { - res.insert(Idx::from_raw(RawIdx::from(idx)), attrs); - idx += 1; - } - } - } - ast::FieldList::TupleFieldList(fields) => { - let mut idx = 0; - for field in fields.fields() { - let attrs = - Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options)); - if attrs.is_cfg_enabled(cfg_options).is_ok() { - res.insert(Idx::from_raw(RawIdx::from(idx)), attrs); - idx += 1; - } - } - } - } - - res.shrink_to_fit(); - Arc::new(res) - } -} - -impl Attrs { - #[inline] - pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> { - AttrQuery { attrs: self, key } - } - - #[inline] - pub fn rust_analyzer_tool(&self) -> impl Iterator { - self.iter() - .filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer)) - } - - #[inline] - pub fn cfg(&self) -> Option { - let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse); - let first = cfgs.next()?; - match cfgs.next() { - Some(second) => { - let cfgs = [first, second].into_iter().chain(cfgs); - Some(CfgExpr::All(cfgs.collect())) - } - None => Some(first), - } - } - - #[inline] - pub fn cfgs(&self) -> impl Iterator + '_ { - self.by_key(sym::cfg).tt_values().map(CfgExpr::parse) - } - - #[inline] - pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> { - self.cfgs().try_for_each(|cfg| { - if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) } - }) - } - - #[inline] - pub fn lang(&self) -> Option<&Symbol> { - self.by_key(sym::lang).string_value() - } - - #[inline] - pub fn lang_item(&self) -> Option { - self.by_key(sym::lang).string_value().and_then(LangItem::from_symbol) - } - - #[inline] - pub fn has_doc_hidden(&self) -> bool { - self.by_key(sym::doc).tt_values().any(|tt| { - tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis && - matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden) - }) - } - - #[inline] - pub fn has_doc_notable_trait(&self) -> bool { - self.by_key(sym::doc).tt_values().any(|tt| { - tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis && - matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait) - }) - } - - #[inline] - pub fn doc_exprs(&self) -> impl Iterator + '_ { - self.by_key(sym::doc).tt_values().map(DocExpr::parse) - } - - #[inline] - pub fn doc_aliases(&self) -> impl Iterator + '_ { - self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec()) - } - - #[inline] - pub fn export_name(&self) -> Option<&Symbol> { - self.by_key(sym::export_name).string_value() - } - - #[inline] - pub fn is_proc_macro(&self) -> bool { - self.by_key(sym::proc_macro).exists() - } - - #[inline] - pub fn is_proc_macro_attribute(&self) -> bool { - self.by_key(sym::proc_macro_attribute).exists() - } - - #[inline] - pub fn is_proc_macro_derive(&self) -> bool { - self.by_key(sym::proc_macro_derive).exists() - } - - #[inline] - pub fn is_test(&self) -> bool { - self.iter().any(|it| { - it.path() - .segments() - .iter() - .rev() - .zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev()) - .all(|it| it.0 == it.1) - }) - } - - #[inline] - pub fn is_ignore(&self) -> bool { - self.by_key(sym::ignore).exists() - } - - #[inline] - pub fn is_bench(&self) -> bool { - self.by_key(sym::bench).exists() - } - - #[inline] - pub fn is_unstable(&self) -> bool { - self.by_key(sym::unstable).exists() - } - - #[inline] - pub fn rustc_legacy_const_generics(&self) -> Option>> { - self.by_key(sym::rustc_legacy_const_generics) - .tt_values() - .next() - .map(parse_rustc_legacy_const_generics) - .filter(|it| !it.is_empty()) - .map(Box::new) - } - - #[inline] - pub fn repr(&self) -> Option { - self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| { - acc.map_or(Some(repr), |mut acc| { - merge_repr(&mut acc, repr); - Some(acc) - }) - }) - } -} - -fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> { - let mut indices = Vec::new(); - let mut iter = tt.iter(); - while let (Some(first), second) = (iter.next(), iter.next()) { - match first { - TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() { - Ok(index) => indices.push(index), - Err(_) => break, - }, - _ => break, - } - - if let Some(comma) = second { - match comma { - TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {} - _ => break, - } - } - } - - indices.into_boxed_slice() -} - -fn merge_repr(this: &mut ReprOptions, other: ReprOptions) { - let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this; - flags.insert(other.flags); - *align = (*align).max(other.align); - *pack = match (*pack, other.pack) { - (Some(pack), None) | (None, Some(pack)) => Some(pack), - _ => (*pack).min(other.pack), - }; - if other.int.is_some() { - *int = other.int; - } -} - -fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option { - use crate::builtin_type::{BuiltinInt, BuiltinUint}; - use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; - - match tt.top_subtree().delimiter { - tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {} - _ => return None, - } - - let mut acc = ReprOptions::default(); - let mut tts = tt.iter(); - while let Some(tt) = tts.next() { - let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else { - continue; - }; - let repr = match &ident.sym { - s if *s == sym::packed => { - let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { - tts.next(); - if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() { - lit.symbol.as_str().parse().unwrap_or_default() - } else { - 0 - } - } else { - 0 - }; - let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE)); - ReprOptions { pack, ..Default::default() } - } - s if *s == sym::align => { - let mut align = None; - if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() { - tts.next(); - if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() - && let Ok(a) = lit.symbol.as_str().parse() - { - align = Align::from_bytes(a).ok(); - } - } - ReprOptions { align, ..Default::default() } - } - s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() }, - s if *s == sym::transparent => { - ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() } - } - s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() }, - repr => { - let mut int = None; - if let Some(builtin) = BuiltinInt::from_suffix_sym(repr) - .map(Either::Left) - .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right)) - { - int = Some(match builtin { - Either::Left(bi) => match bi { - BuiltinInt::Isize => IntegerType::Pointer(true), - BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), - BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), - BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), - BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), - BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), - }, - Either::Right(bu) => match bu { - BuiltinUint::Usize => IntegerType::Pointer(false), - BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), - BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), - BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), - BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), - BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), - }, - }); - } - ReprOptions { int, ..Default::default() } - } - }; - merge_repr(&mut acc, repr); - } - - Some(acc) -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum DocAtom { - /// eg. `#[doc(hidden)]` - Flag(Symbol), - /// eg. `#[doc(alias = "it")]` - /// - /// Note that a key can have multiple values that are all considered "active" at the same time. - /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`. - KeyValue { key: Symbol, value: Symbol }, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum DocExpr { - Invalid, - /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]` - Atom(DocAtom), - /// eg. `#[doc(alias("x", "y"))]` - Alias(Vec), -} - -impl From for DocExpr { - fn from(atom: DocAtom) -> Self { - DocExpr::Atom(atom) - } -} - -impl DocExpr { - fn parse(tt: &tt::TopSubtree) -> DocExpr { - next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid) - } - - pub fn aliases(&self) -> &[Symbol] { - match self { - DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => { - std::slice::from_ref(value) - } - DocExpr::Alias(aliases) => aliases, - _ => &[], - } - } -} - -fn next_doc_expr(mut it: TtIter<'_, S>) -> Option { - let name = match it.next() { - None => return None, - Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(), - Some(_) => return Some(DocExpr::Invalid), - }; - - // Peek - let ret = match it.peek() { - Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { - it.next(); - match it.next() { - Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal { - symbol: text, - kind: tt::LitKind::Str, - .. - }))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(), - _ => return Some(DocExpr::Invalid), - } - } - Some(TtElement::Subtree(_, subtree_iter)) => { - it.next(); - let subs = parse_comma_sep(subtree_iter); - match &name { - s if *s == sym::alias => DocExpr::Alias(subs), - _ => DocExpr::Invalid, - } - } - _ => DocAtom::Flag(name).into(), - }; - Some(ret) -} - -fn parse_comma_sep(iter: TtIter<'_, S>) -> Vec { - iter.filter_map(|tt| match tt { - TtElement::Leaf(tt::Leaf::Literal(tt::Literal { - kind: tt::LitKind::Str, symbol, .. - })) => Some(symbol.clone()), - _ => None, - }) - .collect() -} - -impl AttrsWithOwner { - pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self { - Self { attrs: db.attrs(owner), owner } - } - - pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs { - let _p = tracing::info_span!("attrs_query").entered(); - // FIXME: this should use `Trace` to avoid duplication in `source_map` below - match def { - AttrDefId::ModuleId(module) => { - let def_map = module.def_map(db); - let mod_data = &def_map[module.local_id]; - - let raw_attrs = match mod_data.origin { - ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => { - let decl_attrs = declaration_tree_id - .item_tree(db) - .raw_attrs(declaration.upcast()) - .clone(); - let tree = db.file_item_tree(definition.into()); - let def_attrs = tree.top_level_raw_attrs().clone(); - decl_attrs.merge(def_attrs) - } - ModuleOrigin::CrateRoot { definition } => { - let tree = db.file_item_tree(definition.into()); - tree.top_level_raw_attrs().clone() - } - ModuleOrigin::Inline { definition_tree_id, definition } => { - definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone() - } - ModuleOrigin::BlockExpr { id, .. } => { - let tree = block_item_tree_query(db, id); - tree.top_level_raw_attrs().clone() - } - }; - Attrs::expand_cfg_attr(db, module.krate, raw_attrs) - } - AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(), - AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::AdtId(it) => match it { - AdtId::StructId(it) => attrs_from_ast_id_loc(db, it), - AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it), - AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it), - }, - AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::MacroId(it) => match it { - MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it), - MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it), - MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it), - }, - AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::GenericParamId(it) => match it { - GenericParamId::ConstParamId(it) => { - let src = it.parent().child_source(db); - // FIXME: We should be never getting `None` here. - Attrs(match src.value.get(it.local_id()) { - Some(val) => RawAttrs::new_expanded( - db, - val, - db.span_map(src.file_id).as_ref(), - def.krate(db).cfg_options(db), - ), - None => RawAttrs::EMPTY, - }) - } - GenericParamId::TypeParamId(it) => { - let src = it.parent().child_source(db); - // FIXME: We should be never getting `None` here. - Attrs(match src.value.get(it.local_id()) { - Some(val) => RawAttrs::new_expanded( - db, - val, - db.span_map(src.file_id).as_ref(), - def.krate(db).cfg_options(db), - ), - None => RawAttrs::EMPTY, - }) - } - GenericParamId::LifetimeParamId(it) => { - let src = it.parent.child_source(db); - // FIXME: We should be never getting `None` here. - Attrs(match src.value.get(it.local_id) { - Some(val) => RawAttrs::new_expanded( - db, - val, - db.span_map(src.file_id).as_ref(), - def.krate(db).cfg_options(db), - ), - None => RawAttrs::EMPTY, - }) - } - }, - AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it), - AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it), - } - } - - pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap { - let owner = match self.owner { - AttrDefId::ModuleId(module) => { - // Modules can have 2 attribute owners (the `mod x;` item, and the module file itself). - - let def_map = module.def_map(db); - let mod_data = &def_map[module.local_id]; - match mod_data.declaration_source(db) { - Some(it) => { - let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value)); - if let InFile { file_id, value: ModuleSource::SourceFile(file) } = - mod_data.definition_source(db) - { - map.append_module_inline_attrs(AttrSourceMap::new(InFile::new( - file_id, &file, - ))); - } - return map; - } - None => { - let InFile { file_id, value } = mod_data.definition_source(db); - let attrs_owner = match &value { - ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs, - ModuleSource::Module(module) => module as &dyn ast::HasAttrs, - ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs, - }; - return AttrSourceMap::new(InFile::new(file_id, attrs_owner)); - } - } - } - AttrDefId::FieldId(id) => { - let map = db.fields_attrs_source_map(id.parent); - let file_id = id.parent.file_id(db); - let root = db.parse_or_expand(file_id); - let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root)); - InFile::new(file_id, owner) - } - AttrDefId::AdtId(adt) => match adt { - AdtId::StructId(id) => any_has_attrs(db, id), - AdtId::UnionId(id) => any_has_attrs(db, id), - AdtId::EnumId(id) => any_has_attrs(db, id), - }, - AttrDefId::FunctionId(id) => any_has_attrs(db, id), - AttrDefId::EnumVariantId(id) => any_has_attrs(db, id), - AttrDefId::StaticId(id) => any_has_attrs(db, id), - AttrDefId::ConstId(id) => any_has_attrs(db, id), - AttrDefId::TraitId(id) => any_has_attrs(db, id), - AttrDefId::TypeAliasId(id) => any_has_attrs(db, id), - AttrDefId::MacroId(id) => match id { - MacroId::Macro2Id(id) => any_has_attrs(db, id), - MacroId::MacroRulesId(id) => any_has_attrs(db, id), - MacroId::ProcMacroId(id) => any_has_attrs(db, id), - }, - AttrDefId::ImplId(id) => any_has_attrs(db, id), - AttrDefId::GenericParamId(id) => match id { - GenericParamId::ConstParamId(id) => id - .parent() - .child_source(db) - .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())), - GenericParamId::TypeParamId(id) => id - .parent() - .child_source(db) - .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())), - GenericParamId::LifetimeParamId(id) => id - .parent - .child_source(db) - .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())), - }, - AttrDefId::ExternBlockId(id) => any_has_attrs(db, id), - AttrDefId::ExternCrateId(id) => any_has_attrs(db, id), - AttrDefId::UseId(id) => any_has_attrs(db, id), - }; - - AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs)) - } -} - -#[derive(Debug)] -pub struct AttrSourceMap { - source: Vec>, - file_id: HirFileId, - /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site, - /// while `file_id` will be the one of the module declaration site. - /// The usize is the index into `source` from which point on the entries reside in the def site - /// file. - mod_def_site_file_id: Option<(HirFileId, usize)>, -} - -impl AttrSourceMap { - fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self { - Self { - source: collect_attrs(owner.value).map(|(_, it)| it).collect(), - file_id: owner.file_id, - mod_def_site_file_id: None, - } - } - - /// Append a second source map to this one, this is required for modules, whose outline and inline - /// attributes can reside in different files - fn append_module_inline_attrs(&mut self, other: Self) { - assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none()); - let len = self.source.len(); - self.source.extend(other.source); - if other.file_id != self.file_id { - self.mod_def_site_file_id = Some((other.file_id, len)); - } - } - - /// Maps the lowered `Attr` back to its original syntax node. - /// - /// `attr` must come from the `owner` used for AttrSourceMap - /// - /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of - /// the attribute represented by `Attr`. - pub fn source_of(&self, attr: &Attr) -> InFile<&Either> { - self.source_of_id(attr.id) - } - - pub fn source_of_id(&self, id: AttrId) -> InFile<&Either> { - let ast_idx = id.ast_index(); - let file_id = match self.mod_def_site_file_id { - Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id, - _ => self.file_id, - }; - - self.source - .get(ast_idx) - .map(|it| InFile::new(file_id, it)) - .unwrap_or_else(|| panic!("cannot find attr at index {id:?}")) - } -} - -#[derive(Debug, Clone)] -pub struct AttrQuery<'attr> { - attrs: &'attr Attrs, - key: Symbol, -} - -impl<'attr> AttrQuery<'attr> { - #[inline] - pub fn tt_values(self) -> impl Iterator { - self.attrs().filter_map(|attr| attr.token_tree_value()) - } - - #[inline] - pub fn string_value(self) -> Option<&'attr Symbol> { - self.attrs().find_map(|attr| attr.string_value()) - } - - #[inline] - pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> { - self.attrs().find_map(|attr| attr.string_value_with_span()) - } - - #[inline] - pub fn string_value_unescape(self) -> Option> { - self.attrs().find_map(|attr| attr.string_value_unescape()) - } - - #[inline] - pub fn exists(self) -> bool { - self.attrs().next().is_some() - } - - #[inline] - pub fn attrs(self) -> impl Iterator + Clone { - let key = self.key; - self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key)) - } - - /// Find string value for a specific key inside token tree - /// - /// ```ignore - /// #[doc(html_root_url = "url")] - /// ^^^^^^^^^^^^^ key - /// ``` - #[inline] - pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> { - self.tt_values().find_map(|tt| { - let name = tt.iter() - .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key)) - .nth(2); - - match name { - Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{ symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()), - _ => None - } - }) - } -} - -fn any_has_attrs<'db>( - db: &(dyn DefDatabase + 'db), - id: impl Lookup>, -) -> InFile { - id.lookup(db).source(db).map(ast::AnyHasAttrs::new) -} - -fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>( - db: &(dyn DefDatabase + 'db), - lookup: impl Lookup + HasModule>, -) -> Attrs { - let loc = lookup.lookup(db); - let source = loc.source(db); - let span_map = db.span_map(source.file_id); - let cfg_options = loc.krate(db).cfg_options(db); - Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options)) -} - -pub(crate) fn fields_attrs_source_map( - db: &dyn DefDatabase, - def: VariantId, -) -> Arc>>> { - let mut res = ArenaMap::default(); - let child_source = def.child_source(db); - - for (idx, variant) in child_source.value.iter() { - res.insert( - idx, - variant - .as_ref() - .either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()), - ); - } - - Arc::new(res) -} - -#[cfg(test)] -mod tests { - //! This module contains tests for doc-expression parsing. - //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. - - use intern::Symbol; - use span::EditionedFileId; - use triomphe::Arc; - - use hir_expand::span_map::{RealSpanMap, SpanMap}; - use span::FileId; - use syntax::{AstNode, TextRange, ast}; - use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree}; - - use crate::attr::{DocAtom, DocExpr}; - - fn assert_parse_result(input: &str, expected: DocExpr) { - let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute( - EditionedFileId::current_edition(FileId::from_raw(0)), - ))); - let tt = syntax_node_to_token_tree( - tt.syntax(), - map.as_ref(), - map.span_for_range(TextRange::empty(0.into())), - DocCommentDesugarMode::ProcMacro, - ); - let cfg = DocExpr::parse(&tt); - assert_eq!(cfg, expected); - } - - #[test] - fn test_doc_expr_parser() { - assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into()); - - assert_parse_result( - r#"#![doc(alias = "foo")]"#, - DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(), - ); - - assert_parse_result( - r#"#![doc(alias("foo"))]"#, - DocExpr::Alias([Symbol::intern("foo")].into()), - ); - assert_parse_result( - r#"#![doc(alias("foo", "bar", "baz"))]"#, - DocExpr::Alias( - [Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(), - ), - ); - - assert_parse_result( - r#" - #[doc(alias("Bar", "Qux"))] - struct Foo;"#, - DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()), - ); - } -} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs new file mode 100644 index 0000000000000..1897cb5205aaa --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs @@ -0,0 +1,1613 @@ +//! Attributes for anything that is not name resolution. +//! +//! The fundamental idea of this module stems from the observation that most "interesting" +//! attributes have a more memory-compact form than storing their full syntax, and +//! that most of the attributes are flags, and those that are not are rare. Therefore, +//! this module defines [`AttrFlags`], which is a bitflag enum that contains only a yes/no +//! answer to whether an attribute is present on an item. For most attributes, that's all +//! that is interesting us; for the rest of them, we define another query that extracts +//! their data. A key part is that every one of those queries will have a wrapper method +//! that queries (or is given) the `AttrFlags` and checks for the presence of the attribute; +//! if it is not present, we do not call the query, to prevent Salsa from needing to record +//! its value. This way, queries are only called on items that have the attribute, which is +//! usually only a few. +//! +//! An exception to this model that is also defined in this module is documentation (doc +//! comments and `#[doc = "..."]` attributes). But it also has a more compact form than +//! the attribute: a concatenated string of the full docs as well as a source map +//! to map it back to AST (which is needed for things like resolving links in doc comments +//! and highlight injection). The lowering and upmapping of doc comments is a bit complicated, +//! but it is encapsulated in the [`Docs`] struct. + +use std::{ + convert::Infallible, + iter::Peekable, + ops::{ControlFlow, Range}, +}; + +use base_db::Crate; +use cfg::{CfgExpr, CfgOptions}; +use either::Either; +use hir_expand::{ + HirFileId, InFile, Lookup, + attrs::{Meta, expand_cfg_attr, expand_cfg_attr_with_doc_comments}, +}; +use intern::Symbol; +use itertools::Itertools; +use la_arena::ArenaMap; +use rustc_abi::ReprOptions; +use rustc_hash::FxHashSet; +use smallvec::SmallVec; +use syntax::{ + AstNode, AstToken, NodeOrToken, SmolStr, SyntaxNode, SyntaxToken, T, + ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren}, +}; +use tt::{TextRange, TextSize}; + +use crate::{ + AdtId, AstIdLoc, AttrDefId, FieldId, FunctionId, GenericDefId, HasModule, InternedModuleId, + LifetimeParamId, LocalFieldId, MacroId, TypeOrConstParamId, VariantId, + db::DefDatabase, + hir::generics::{GenericParams, LocalLifetimeParamId, LocalTypeOrConstParamId}, + lang_item::LangItem, + nameres::ModuleOrigin, + src::{HasChildSource, HasSource}, +}; + +#[inline] +fn attrs_from_ast_id_loc>( + db: &dyn DefDatabase, + lookup: impl Lookup + HasModule>, +) -> (InFile, Crate) { + let loc = lookup.lookup(db); + let source = loc.source(db); + let krate = loc.krate(db); + (source.map(|it| it.into()), krate) +} + +#[inline] +fn extract_doc_tt_attr(attr_flags: &mut AttrFlags, tt: ast::TokenTree) { + for atom in DocAtom::parse(tt) { + match atom { + DocAtom::Flag(flag) => match &*flag { + "notable_trait" => attr_flags.insert(AttrFlags::IS_DOC_NOTABLE_TRAIT), + "hidden" => attr_flags.insert(AttrFlags::IS_DOC_HIDDEN), + _ => {} + }, + DocAtom::KeyValue { key, value: _ } => match &*key { + "alias" => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES), + "keyword" => attr_flags.insert(AttrFlags::HAS_DOC_KEYWORD), + _ => {} + }, + DocAtom::Alias(_) => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES), + } + } +} + +fn extract_ra_completions(attr_flags: &mut AttrFlags, tt: ast::TokenTree) { + let tt = TokenTreeChildren::new(&tt); + if let Ok(NodeOrToken::Token(option)) = tt.exactly_one() + && option.kind().is_any_identifier() + { + match option.text() { + "ignore_flyimport" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT), + "ignore_methods" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_METHODS), + "ignore_flyimport_methods" => { + attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) + } + _ => {} + } + } +} + +fn extract_rustc_skip_during_method_dispatch(attr_flags: &mut AttrFlags, tt: ast::TokenTree) { + let iter = TokenTreeChildren::new(&tt); + for kind in iter { + if let NodeOrToken::Token(kind) = kind + && kind.kind().is_any_identifier() + { + match kind.text() { + "array" => attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH), + "boxed_slice" => { + attr_flags.insert(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) + } + _ => {} + } + } + } +} + +#[inline] +fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow { + match attr { + Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() { + "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED), + "lang" => attr_flags.insert(AttrFlags::LANG_ITEM), + "path" => attr_flags.insert(AttrFlags::HAS_PATH), + "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE), + "export_name" => { + if let Some(value) = value + && let Some(value) = ast::String::cast(value) + && let Ok(value) = value.value() + && *value == *"main" + { + attr_flags.insert(AttrFlags::IS_EXPORT_NAME_MAIN); + } + } + _ => {} + }, + Meta::TokenTree { path, tt } => match path.segments.len() { + 1 => match path.segments[0].text() { + "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED), + "cfg" => attr_flags.insert(AttrFlags::HAS_CFG), + "doc" => extract_doc_tt_attr(attr_flags, tt), + "repr" => attr_flags.insert(AttrFlags::HAS_REPR), + "target_feature" => attr_flags.insert(AttrFlags::HAS_TARGET_FEATURE), + "proc_macro_derive" | "rustc_builtin_macro" => { + attr_flags.insert(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO) + } + "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE), + "rustc_layout_scalar_valid_range_start" | "rustc_layout_scalar_valid_range_end" => { + attr_flags.insert(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE) + } + "rustc_legacy_const_generics" => { + attr_flags.insert(AttrFlags::HAS_LEGACY_CONST_GENERICS) + } + "rustc_skip_during_method_dispatch" => { + extract_rustc_skip_during_method_dispatch(attr_flags, tt) + } + _ => {} + }, + 2 => match path.segments[0].text() { + "rust_analyzer" => match path.segments[1].text() { + "completions" => extract_ra_completions(attr_flags, tt), + _ => {} + }, + _ => {} + }, + _ => {} + }, + Meta::Path { path } => { + match path.segments.len() { + 1 => match path.segments[0].text() { + "rustc_has_incoherent_inherent_impls" => { + attr_flags.insert(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) + } + "rustc_allow_incoherent_impl" => { + attr_flags.insert(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) + } + "fundamental" => attr_flags.insert(AttrFlags::FUNDAMENTAL), + "no_std" => attr_flags.insert(AttrFlags::IS_NO_STD), + "may_dangle" => attr_flags.insert(AttrFlags::MAY_DANGLE), + "rustc_paren_sugar" => attr_flags.insert(AttrFlags::RUSTC_PAREN_SUGAR), + "rustc_coinductive" => attr_flags.insert(AttrFlags::RUSTC_COINDUCTIVE), + "rustc_force_inline" => attr_flags.insert(AttrFlags::RUSTC_FORCE_INLINE), + "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE), + "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED), + "macro_export" => attr_flags.insert(AttrFlags::IS_MACRO_EXPORT), + "no_mangle" => attr_flags.insert(AttrFlags::NO_MANGLE), + "non_exhaustive" => attr_flags.insert(AttrFlags::NON_EXHAUSTIVE), + "ignore" => attr_flags.insert(AttrFlags::IS_IGNORE), + "bench" => attr_flags.insert(AttrFlags::IS_BENCH), + "rustc_const_panic_str" => attr_flags.insert(AttrFlags::RUSTC_CONST_PANIC_STR), + "rustc_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_INTRINSIC), + "rustc_safe_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_SAFE_INTRINSIC), + "rustc_intrinsic_must_be_overridden" => { + attr_flags.insert(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN) + } + "rustc_allocator" => attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR), + "rustc_deallocator" => attr_flags.insert(AttrFlags::RUSTC_DEALLOCATOR), + "rustc_reallocator" => attr_flags.insert(AttrFlags::RUSTC_REALLOCATOR), + "rustc_allocator_zeroed" => { + attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR_ZEROED) + } + "rustc_reservation_impl" => { + attr_flags.insert(AttrFlags::RUSTC_RESERVATION_IMPL) + } + "rustc_deprecated_safe_2024" => { + attr_flags.insert(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) + } + "rustc_skip_array_during_method_dispatch" => { + attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) + } + _ => {} + }, + 2 => match path.segments[0].text() { + "rust_analyzer" => match path.segments[1].text() { + "skip" => attr_flags.insert(AttrFlags::RUST_ANALYZER_SKIP), + _ => {} + }, + _ => {} + }, + _ => {} + } + + if path.is_test { + attr_flags.insert(AttrFlags::IS_TEST); + } + } + _ => {} + }; + ControlFlow::Continue(()) +} + +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct AttrFlags: u64 { + const RUST_ANALYZER_SKIP = 1 << 0; + + const LANG_ITEM = 1 << 1; + + const HAS_DOC_ALIASES = 1 << 2; + const HAS_DOC_KEYWORD = 1 << 3; + const IS_DOC_NOTABLE_TRAIT = 1 << 4; + const IS_DOC_HIDDEN = 1 << 5; + + const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 6; + const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7; + const FUNDAMENTAL = 1 << 8; + const RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 9; + const RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 10; + const HAS_REPR = 1 << 11; + const HAS_TARGET_FEATURE = 1 << 12; + const RUSTC_DEPRECATED_SAFE_2024 = 1 << 13; + const HAS_LEGACY_CONST_GENERICS = 1 << 14; + const NO_MANGLE = 1 << 15; + const NON_EXHAUSTIVE = 1 << 16; + const RUSTC_RESERVATION_IMPL = 1 << 17; + const RUSTC_CONST_PANIC_STR = 1 << 18; + const MAY_DANGLE = 1 << 19; + + const RUSTC_INTRINSIC = 1 << 20; + const RUSTC_SAFE_INTRINSIC = 1 << 21; + const RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN = 1 << 22; + const RUSTC_ALLOCATOR = 1 << 23; + const RUSTC_DEALLOCATOR = 1 << 24; + const RUSTC_REALLOCATOR = 1 << 25; + const RUSTC_ALLOCATOR_ZEROED = 1 << 26; + + const IS_UNSTABLE = 1 << 27; + const IS_IGNORE = 1 << 28; + // FIXME: `IS_TEST` and `IS_BENCH` should be based on semantic information, not textual match. + const IS_BENCH = 1 << 29; + const IS_TEST = 1 << 30; + const IS_EXPORT_NAME_MAIN = 1 << 31; + const IS_MACRO_EXPORT = 1 << 32; + const IS_NO_STD = 1 << 33; + const IS_DERIVE_OR_BUILTIN_MACRO = 1 << 34; + const IS_DEPRECATED = 1 << 35; + const HAS_PATH = 1 << 36; + const HAS_CFG = 1 << 37; + + const COMPLETE_IGNORE_FLYIMPORT = 1 << 38; + const COMPLETE_IGNORE_FLYIMPORT_METHODS = 1 << 39; + const COMPLETE_IGNORE_METHODS = 1 << 40; + + const RUSTC_LAYOUT_SCALAR_VALID_RANGE = 1 << 41; + const RUSTC_PAREN_SUGAR = 1 << 42; + const RUSTC_COINDUCTIVE = 1 << 43; + const RUSTC_FORCE_INLINE = 1 << 44; + } +} + +fn attrs_source( + db: &dyn DefDatabase, + owner: AttrDefId, +) -> (InFile, Option>, Crate) { + let (owner, krate) = match owner { + AttrDefId::ModuleId(id) => { + let id = id.loc(db); + let def_map = id.def_map(db); + let (definition, declaration) = match def_map[id.local_id].origin { + ModuleOrigin::CrateRoot { definition } => { + let file = db.parse(definition).tree(); + (InFile::new(definition.into(), ast::AnyHasAttrs::from(file)), None) + } + ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => { + let declaration = InFile::new(declaration_tree_id.file_id(), declaration); + let declaration = declaration.with_value(declaration.to_node(db)); + let definition_source = db.parse(definition).tree(); + (InFile::new(definition.into(), definition_source.into()), Some(declaration)) + } + ModuleOrigin::Inline { definition_tree_id, definition } => { + let definition = InFile::new(definition_tree_id.file_id(), definition); + let definition = definition.with_value(definition.to_node(db).into()); + (definition, None) + } + ModuleOrigin::BlockExpr { block, .. } => { + let definition = block.to_node(db); + (block.with_value(definition.into()), None) + } + }; + return (definition, declaration, id.krate); + } + AttrDefId::AdtId(AdtId::StructId(it)) => attrs_from_ast_id_loc(db, it), + AttrDefId::AdtId(AdtId::UnionId(it)) => attrs_from_ast_id_loc(db, it), + AttrDefId::AdtId(AdtId::EnumId(it)) => attrs_from_ast_id_loc(db, it), + AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::MacroId(MacroId::MacroRulesId(it)) => attrs_from_ast_id_loc(db, it), + AttrDefId::MacroId(MacroId::Macro2Id(it)) => attrs_from_ast_id_loc(db, it), + AttrDefId::MacroId(MacroId::ProcMacroId(it)) => attrs_from_ast_id_loc(db, it), + AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it), + AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it), + }; + (owner, None, krate) +} + +fn collect_attrs( + db: &dyn DefDatabase, + owner: AttrDefId, + mut callback: impl FnMut(Meta) -> ControlFlow, +) -> Option { + let (source, outer_mod_decl, krate) = attrs_source(db, owner); + + let mut cfg_options = None; + expand_cfg_attr( + outer_mod_decl + .into_iter() + .flat_map(|it| it.value.attrs()) + .chain(ast::attrs_including_inner(&source.value)), + || cfg_options.get_or_insert_with(|| krate.cfg_options(db)), + move |meta, _, _, _| callback(meta), + ) +} + +fn collect_field_attrs( + db: &dyn DefDatabase, + variant: VariantId, + mut field_attrs: impl FnMut(&CfgOptions, InFile) -> T, +) -> ArenaMap { + let (variant_syntax, krate) = match variant { + VariantId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it), + VariantId::StructId(it) => attrs_from_ast_id_loc(db, it), + VariantId::UnionId(it) => attrs_from_ast_id_loc(db, it), + }; + let cfg_options = krate.cfg_options(db); + let variant_syntax = variant_syntax + .with_value(ast::VariantDef::cast(variant_syntax.value.syntax().clone()).unwrap()); + let fields = match &variant_syntax.value { + ast::VariantDef::Struct(it) => it.field_list(), + ast::VariantDef::Union(it) => it.record_field_list().map(ast::FieldList::RecordFieldList), + ast::VariantDef::Variant(it) => it.field_list(), + }; + let Some(fields) = fields else { + return ArenaMap::new(); + }; + + let mut result = ArenaMap::new(); + let mut idx = 0; + match fields { + ast::FieldList::RecordFieldList(fields) => { + for field in fields.fields() { + if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() { + result.insert( + la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)), + field_attrs(cfg_options, variant_syntax.with_value(field.into())), + ); + idx += 1; + } + } + } + ast::FieldList::TupleFieldList(fields) => { + for field in fields.fields() { + if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() { + result.insert( + la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)), + field_attrs(cfg_options, variant_syntax.with_value(field.into())), + ); + idx += 1; + } + } + } + } + result.shrink_to_fit(); + result +} + +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)] +pub struct RustcLayoutScalarValidRange { + pub start: Option, + pub end: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +struct DocsSourceMapLine { + /// The offset in [`Docs::docs`]. + string_offset: TextSize, + /// The offset in the AST of the text. + ast_offset: TextSize, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Docs { + /// The concatenated string of all `#[doc = "..."]` attributes and documentation comments. + docs: String, + /// A sorted map from an offset in `docs` to an offset in the source code. + docs_source_map: Vec, + /// If the item is an outlined module (`mod foo;`), `docs_source_map` store the concatenated + /// list of the outline and inline docs (outline first). Then, this field contains the [`HirFileId`] + /// of the outline declaration, and the index in `docs` from which the inline docs + /// begin. + outline_mod: Option<(HirFileId, usize)>, + inline_file: HirFileId, + /// The size the prepended prefix, which does not map to real doc comments. + prefix_len: TextSize, + /// The offset in `docs` from which the docs are inner attributes/comments. + inline_inner_docs_start: Option, + /// Like `inline_inner_docs_start`, but for `outline_mod`. This can happen only when merging `Docs` + /// (as outline modules don't have inner attributes). + outline_inner_docs_start: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum IsInnerDoc { + No, + Yes, +} + +impl IsInnerDoc { + #[inline] + pub fn yes(self) -> bool { + self == IsInnerDoc::Yes + } +} + +impl Docs { + #[inline] + pub fn docs(&self) -> &str { + &self.docs + } + + #[inline] + pub fn into_docs(self) -> String { + self.docs + } + + pub fn find_ast_range( + &self, + mut string_range: TextRange, + ) -> Option<(InFile, IsInnerDoc)> { + if string_range.start() < self.prefix_len { + return None; + } + string_range -= self.prefix_len; + + let mut file = self.inline_file; + let mut inner_docs_start = self.inline_inner_docs_start; + // Check whether the range is from the outline, the inline, or both. + let source_map = if let Some((outline_mod_file, outline_mod_end)) = self.outline_mod { + if let Some(first_inline) = self.docs_source_map.get(outline_mod_end) { + if string_range.end() <= first_inline.string_offset { + // The range is completely in the outline. + file = outline_mod_file; + inner_docs_start = self.outline_inner_docs_start; + &self.docs_source_map[..outline_mod_end] + } else if string_range.start() >= first_inline.string_offset { + // The range is completely in the inline. + &self.docs_source_map[outline_mod_end..] + } else { + // The range is combined from the outline and the inline - cannot map it back. + return None; + } + } else { + // There is no inline. + file = outline_mod_file; + inner_docs_start = self.outline_inner_docs_start; + &self.docs_source_map + } + } else { + // There is no outline. + &self.docs_source_map + }; + + let after_range = + source_map.partition_point(|line| line.string_offset <= string_range.start()) - 1; + let after_range = &source_map[after_range..]; + let line = after_range.first()?; + if after_range.get(1).is_some_and(|next_line| next_line.string_offset < string_range.end()) + { + // The range is combined from two lines - cannot map it back. + return None; + } + let ast_range = string_range - line.string_offset + line.ast_offset; + let is_inner = if inner_docs_start + .is_some_and(|inner_docs_start| string_range.start() >= inner_docs_start) + { + IsInnerDoc::Yes + } else { + IsInnerDoc::No + }; + Some((InFile::new(file, ast_range), is_inner)) + } + + #[inline] + pub fn shift_by(&mut self, offset: TextSize) { + self.prefix_len += offset; + } + + pub fn prepend_str(&mut self, s: &str) { + self.prefix_len += TextSize::of(s); + self.docs.insert_str(0, s); + } + + pub fn append_str(&mut self, s: &str) { + self.docs.push_str(s); + } + + pub fn append(&mut self, other: &Docs) { + let other_offset = TextSize::of(&self.docs); + + assert!( + self.outline_mod.is_none() && other.outline_mod.is_none(), + "cannot merge `Docs` that have `outline_mod` set" + ); + self.outline_mod = Some((self.inline_file, self.docs_source_map.len())); + self.inline_file = other.inline_file; + self.outline_inner_docs_start = self.inline_inner_docs_start; + self.inline_inner_docs_start = other.inline_inner_docs_start.map(|it| it + other_offset); + + self.docs.push_str(&other.docs); + self.docs_source_map.extend(other.docs_source_map.iter().map( + |&DocsSourceMapLine { string_offset, ast_offset }| DocsSourceMapLine { + ast_offset, + string_offset: string_offset + other_offset, + }, + )); + } + + fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize) { + let Some((doc, offset)) = comment.doc_comment() else { return }; + self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent); + } + + fn extend_with_doc_attr(&mut self, value: SyntaxToken, indent: &mut usize) { + let Some(value) = ast::String::cast(value) else { return }; + let Some(value_offset) = value.text_range_between_quotes() else { return }; + let value_offset = value_offset.start(); + let Ok(value) = value.value() else { return }; + // FIXME: Handle source maps for escaped text. + self.extend_with_doc_str(&value, value_offset, indent); + } + + fn extend_with_doc_str(&mut self, doc: &str, mut offset_in_ast: TextSize, indent: &mut usize) { + for line in doc.split('\n') { + self.docs_source_map.push(DocsSourceMapLine { + string_offset: TextSize::of(&self.docs), + ast_offset: offset_in_ast, + }); + offset_in_ast += TextSize::of(line) + TextSize::of("\n"); + + let line = line.trim_end(); + if let Some(line_indent) = line.chars().position(|ch| !ch.is_whitespace()) { + // Empty lines are handled because `position()` returns `None` for them. + *indent = std::cmp::min(*indent, line_indent); + } + self.docs.push_str(line); + self.docs.push('\n'); + } + } + + fn remove_indent(&mut self, indent: usize, start_source_map_index: usize) { + /// In case of panics, we want to avoid corrupted UTF-8 in `self.docs`, so we clear it. + struct Guard<'a>(&'a mut Docs); + impl Drop for Guard<'_> { + fn drop(&mut self) { + let Docs { + docs, + docs_source_map, + outline_mod, + inline_file: _, + prefix_len: _, + inline_inner_docs_start: _, + outline_inner_docs_start: _, + } = self.0; + // Don't use `String::clear()` here because it's not guaranteed to not do UTF-8-dependent things, + // and we may have temporarily broken the string's encoding. + unsafe { docs.as_mut_vec() }.clear(); + // This is just to avoid panics down the road. + docs_source_map.clear(); + *outline_mod = None; + } + } + + if self.docs.is_empty() { + return; + } + + let guard = Guard(self); + let source_map = &mut guard.0.docs_source_map[start_source_map_index..]; + let Some(&DocsSourceMapLine { string_offset: mut copy_into, .. }) = source_map.first() + else { + return; + }; + // We basically want to remove multiple ranges from a string. Doing this efficiently (without O(N^2) + // or allocations) requires unsafe. Basically, for each line, we copy the line minus the indent into + // consecutive to the previous line (which may have moved). Then at the end we truncate. + let mut accumulated_offset = TextSize::new(0); + for idx in 0..source_map.len() { + let string_end_offset = source_map + .get(idx + 1) + .map_or_else(|| TextSize::of(&guard.0.docs), |next_attr| next_attr.string_offset); + let line_source = &mut source_map[idx]; + let line_docs = + &guard.0.docs[TextRange::new(line_source.string_offset, string_end_offset)]; + let line_docs_len = TextSize::of(line_docs); + let indent_size = line_docs.char_indices().nth(indent).map_or_else( + || TextSize::of(line_docs) - TextSize::of("\n"), + |(offset, _)| TextSize::new(offset as u32), + ); + unsafe { guard.0.docs.as_bytes_mut() }.copy_within( + Range::::from(TextRange::new( + line_source.string_offset + indent_size, + string_end_offset, + )), + copy_into.into(), + ); + copy_into += line_docs_len - indent_size; + + if let Some(inner_attrs_start) = &mut guard.0.inline_inner_docs_start + && *inner_attrs_start == line_source.string_offset + { + *inner_attrs_start -= accumulated_offset; + } + // The removals in the string accumulate, but in the AST not, because it already points + // to the beginning of each attribute. + // Also, we need to shift the AST offset of every line, but the string offset of the first + // line should not get shifted (in general, the shift for the string offset is by the + // number of lines until the current one, excluding the current one). + line_source.string_offset -= accumulated_offset; + line_source.ast_offset += indent_size; + + accumulated_offset += indent_size; + } + // Don't use `String::truncate()` here because it's not guaranteed to not do UTF-8-dependent things, + // and we may have temporarily broken the string's encoding. + unsafe { guard.0.docs.as_mut_vec() }.truncate(copy_into.into()); + + std::mem::forget(guard); + } + + fn remove_last_newline(&mut self) { + self.docs.truncate(self.docs.len().saturating_sub(1)); + } + + fn shrink_to_fit(&mut self) { + let Docs { + docs, + docs_source_map, + outline_mod: _, + inline_file: _, + prefix_len: _, + inline_inner_docs_start: _, + outline_inner_docs_start: _, + } = self; + docs.shrink_to_fit(); + docs_source_map.shrink_to_fit(); + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct DeriveInfo { + pub trait_name: Symbol, + pub helpers: Box<[Symbol]>, +} + +fn extract_doc_aliases(result: &mut Vec, attr: Meta) -> ControlFlow { + if let Meta::TokenTree { path, tt } = attr + && path.is1("doc") + { + for atom in DocAtom::parse(tt) { + match atom { + DocAtom::Alias(aliases) => { + result.extend(aliases.into_iter().map(|alias| Symbol::intern(&alias))) + } + DocAtom::KeyValue { key, value } if key == "alias" => { + result.push(Symbol::intern(&value)) + } + _ => {} + } + } + } + ControlFlow::Continue(()) +} + +fn extract_cfgs(result: &mut Vec, attr: Meta) -> ControlFlow { + if let Meta::TokenTree { path, tt } = attr + && path.is1("cfg") + { + result.push(CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable())); + } + ControlFlow::Continue(()) +} + +fn extract_docs<'a>( + get_cfg_options: &dyn Fn() -> &'a CfgOptions, + source: InFile, + outer_mod_decl: Option>, + inner_attrs_node: Option, +) -> Option> { + let mut result = Docs { + docs: String::new(), + docs_source_map: Vec::new(), + outline_mod: None, + inline_file: source.file_id, + prefix_len: TextSize::new(0), + inline_inner_docs_start: None, + outline_inner_docs_start: None, + }; + + let mut cfg_options = None; + let mut extend_with_attrs = + |result: &mut Docs, node: &SyntaxNode, expect_inner_attrs, indent: &mut usize| { + expand_cfg_attr_with_doc_comments::<_, Infallible>( + AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr { + Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs, + Either::Right(comment) => comment.kind().doc.is_some_and(|kind| { + (kind == ast::CommentPlacement::Inner) == expect_inner_attrs + }), + }), + || cfg_options.get_or_insert_with(get_cfg_options), + |attr| { + match attr { + Either::Right(doc_comment) => { + result.extend_with_doc_comment(doc_comment, indent) + } + Either::Left((attr, _, _, _)) => match attr { + // FIXME: Handle macros: `#[doc = concat!("foo", "bar")]`. + Meta::NamedKeyValue { + name: Some(name), value: Some(value), .. + } if name.text() == "doc" => { + result.extend_with_doc_attr(value, indent); + } + _ => {} + }, + } + ControlFlow::Continue(()) + }, + ); + }; + + if let Some(outer_mod_decl) = outer_mod_decl { + let mut indent = usize::MAX; + extend_with_attrs(&mut result, outer_mod_decl.value.syntax(), false, &mut indent); + result.remove_indent(indent, 0); + result.outline_mod = Some((outer_mod_decl.file_id, result.docs_source_map.len())); + } + + let inline_source_map_start = result.docs_source_map.len(); + let mut indent = usize::MAX; + extend_with_attrs(&mut result, source.value.syntax(), false, &mut indent); + if let Some(inner_attrs_node) = &inner_attrs_node { + result.inline_inner_docs_start = Some(TextSize::of(&result.docs)); + extend_with_attrs(&mut result, inner_attrs_node, true, &mut indent); + } + result.remove_indent(indent, inline_source_map_start); + + result.remove_last_newline(); + + result.shrink_to_fit(); + + if result.docs.is_empty() { None } else { Some(Box::new(result)) } +} + +#[salsa::tracked] +impl AttrFlags { + #[salsa::tracked] + pub fn query(db: &dyn DefDatabase, owner: AttrDefId) -> AttrFlags { + let mut attr_flags = AttrFlags::empty(); + collect_attrs(db, owner, |attr| match_attr_flags(&mut attr_flags, attr)); + attr_flags + } + + #[inline] + pub fn query_field(db: &dyn DefDatabase, field: FieldId) -> AttrFlags { + return field_attr_flags(db, field.parent) + .get(field.local_id) + .copied() + .unwrap_or_else(AttrFlags::empty); + + #[salsa::tracked(returns(ref))] + fn field_attr_flags( + db: &dyn DefDatabase, + variant: VariantId, + ) -> ArenaMap { + collect_field_attrs(db, variant, |cfg_options, field| { + let mut attr_flags = AttrFlags::empty(); + expand_cfg_attr( + field.value.attrs(), + || cfg_options, + |attr, _, _, _| match_attr_flags(&mut attr_flags, attr), + ); + attr_flags + }) + } + } + + #[inline] + pub fn query_generic_params( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> &(ArenaMap, ArenaMap) + { + let generic_params = GenericParams::new(db, def); + let params_count_excluding_self = + generic_params.len() - usize::from(generic_params.trait_self_param().is_some()); + if params_count_excluding_self == 0 { + return const { &(ArenaMap::new(), ArenaMap::new()) }; + } + return generic_params_attr_flags(db, def); + + #[salsa::tracked(returns(ref))] + fn generic_params_attr_flags( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> (ArenaMap, ArenaMap) + { + let mut lifetimes = ArenaMap::new(); + let mut type_and_consts = ArenaMap::new(); + + let mut cfg_options = None; + let mut cfg_options = + || *cfg_options.get_or_insert_with(|| def.krate(db).cfg_options(db)); + + let lifetimes_source = HasChildSource::::child_source(&def, db); + for (lifetime_id, lifetime) in lifetimes_source.value.iter() { + let mut attr_flags = AttrFlags::empty(); + expand_cfg_attr(lifetime.attrs(), &mut cfg_options, |attr, _, _, _| { + match_attr_flags(&mut attr_flags, attr) + }); + if !attr_flags.is_empty() { + lifetimes.insert(lifetime_id, attr_flags); + } + } + + let type_and_consts_source = + HasChildSource::::child_source(&def, db); + for (type_or_const_id, type_or_const) in type_and_consts_source.value.iter() { + let mut attr_flags = AttrFlags::empty(); + expand_cfg_attr(type_or_const.attrs(), &mut cfg_options, |attr, _, _, _| { + match_attr_flags(&mut attr_flags, attr) + }); + if !attr_flags.is_empty() { + type_and_consts.insert(type_or_const_id, attr_flags); + } + } + + lifetimes.shrink_to_fit(); + type_and_consts.shrink_to_fit(); + (lifetimes, type_and_consts) + } + } + + #[inline] + pub fn query_lifetime_param(db: &dyn DefDatabase, owner: LifetimeParamId) -> AttrFlags { + AttrFlags::query_generic_params(db, owner.parent) + .0 + .get(owner.local_id) + .copied() + .unwrap_or_else(AttrFlags::empty) + } + #[inline] + pub fn query_type_or_const_param(db: &dyn DefDatabase, owner: TypeOrConstParamId) -> AttrFlags { + AttrFlags::query_generic_params(db, owner.parent) + .1 + .get(owner.local_id) + .copied() + .unwrap_or_else(AttrFlags::empty) + } + + pub(crate) fn is_cfg_enabled_for( + owner: &dyn HasAttrs, + cfg_options: &CfgOptions, + ) -> Result<(), CfgExpr> { + let attrs = ast::attrs_including_inner(owner); + let result = expand_cfg_attr( + attrs, + || cfg_options, + |attr, _, _, _| { + if let Meta::TokenTree { path, tt } = attr + && path.is1("cfg") + && let cfg = + CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable()) + && cfg_options.check(&cfg) == Some(false) + { + ControlFlow::Break(cfg) + } else { + ControlFlow::Continue(()) + } + }, + ); + match result { + Some(cfg) => Err(cfg), + None => Ok(()), + } + } + + #[inline] + pub fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option { + AttrFlags::query(db, owner).lang_item_with_attrs(db, owner) + } + + #[inline] + pub fn lang_item_with_attrs(self, db: &dyn DefDatabase, owner: AttrDefId) -> Option { + if !self.contains(AttrFlags::LANG_ITEM) { + // Don't create the query in case this is not a lang item, this wastes memory. + return None; + } + + return lang_item(db, owner); + + #[salsa::tracked] + fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option { + collect_attrs(db, owner, |attr| { + if let Meta::NamedKeyValue { name: Some(name), value: Some(value), .. } = attr + && name.text() == "lang" + && let Some(value) = ast::String::cast(value) + && let Ok(value) = value.value() + && let symbol = Symbol::intern(&value) + && let Some(lang_item) = LangItem::from_symbol(&symbol) + { + ControlFlow::Break(lang_item) + } else { + ControlFlow::Continue(()) + } + }) + } + } + + #[inline] + pub fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option { + if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_REPR) { + // Don't create the query in case this has no repr, this wastes memory. + return None; + } + + return repr(db, owner); + + #[salsa::tracked] + fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option { + let mut result = None; + collect_attrs::(db, owner.into(), |attr| { + if let Meta::TokenTree { path, tt } = attr + && path.is1("repr") + && let Some(repr) = parse_repr_tt(&tt) + { + match &mut result { + Some(existing) => merge_repr(existing, repr), + None => result = Some(repr), + } + } + ControlFlow::Continue(()) + }); + result + } + } + + /// Call this only if there are legacy const generics, to save memory. + #[salsa::tracked(returns(ref))] + pub(crate) fn legacy_const_generic_indices( + db: &dyn DefDatabase, + owner: FunctionId, + ) -> Option> { + let result = collect_attrs(db, owner.into(), |attr| { + if let Meta::TokenTree { path, tt } = attr + && path.is1("rustc_legacy_const_generics") + { + let result = parse_rustc_legacy_const_generics(tt); + ControlFlow::Break(result) + } else { + ControlFlow::Continue(()) + } + }); + result.filter(|it| !it.is_empty()) + } + + // There aren't typically many crates, so it's okay to always make this a query without a flag. + #[salsa::tracked(returns(ref))] + pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option { + let root_file_id = krate.root_file_id(db); + let syntax = db.parse(root_file_id).tree(); + + let mut cfg_options = None; + expand_cfg_attr( + syntax.attrs(), + || cfg_options.get_or_insert(krate.cfg_options(db)), + |attr, _, _, _| { + if let Meta::TokenTree { path, tt } = attr + && path.is1("doc") + && let Some(result) = DocAtom::parse(tt).into_iter().find_map(|atom| { + if let DocAtom::KeyValue { key, value } = atom + && key == "html_root_url" + { + Some(value) + } else { + None + } + }) + { + ControlFlow::Break(result) + } else { + ControlFlow::Continue(()) + } + }, + ) + } + + #[inline] + pub fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> &FxHashSet { + if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_TARGET_FEATURE) { + return const { &FxHashSet::with_hasher(rustc_hash::FxBuildHasher) }; + } + + return target_features(db, owner); + + #[salsa::tracked(returns(ref))] + fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> FxHashSet { + let mut result = FxHashSet::default(); + collect_attrs::(db, owner.into(), |attr| { + if let Meta::TokenTree { path, tt } = attr + && path.is1("target_feature") + && let mut tt = TokenTreeChildren::new(&tt) + && let Some(NodeOrToken::Token(enable_ident)) = tt.next() + && enable_ident.text() == "enable" + && let Some(NodeOrToken::Token(eq_token)) = tt.next() + && eq_token.kind() == T![=] + && let Some(NodeOrToken::Token(features)) = tt.next() + && let Some(features) = ast::String::cast(features) + && let Ok(features) = features.value() + && tt.next().is_none() + { + result.extend(features.split(',').map(Symbol::intern)); + } + ControlFlow::Continue(()) + }); + result.shrink_to_fit(); + result + } + } + + #[inline] + pub fn rustc_layout_scalar_valid_range( + db: &dyn DefDatabase, + owner: AdtId, + ) -> RustcLayoutScalarValidRange { + if !AttrFlags::query(db, owner.into()).contains(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE) + { + return RustcLayoutScalarValidRange::default(); + } + + return rustc_layout_scalar_valid_range(db, owner); + + #[salsa::tracked] + fn rustc_layout_scalar_valid_range( + db: &dyn DefDatabase, + owner: AdtId, + ) -> RustcLayoutScalarValidRange { + let mut result = RustcLayoutScalarValidRange::default(); + collect_attrs::(db, owner.into(), |attr| { + if let Meta::TokenTree { path, tt } = attr + && (path.is1("rustc_layout_scalar_valid_range_start") + || path.is1("rustc_layout_scalar_valid_range_end")) + && let tt = TokenTreeChildren::new(&tt) + && let Ok(NodeOrToken::Token(value)) = tt.exactly_one() + && let Some(value) = ast::IntNumber::cast(value) + && let Ok(value) = value.value() + { + if path.is1("rustc_layout_scalar_valid_range_start") { + result.start = Some(value) + } else { + result.end = Some(value); + } + } + ControlFlow::Continue(()) + }); + result + } + } + + #[inline] + pub fn doc_aliases(self, db: &dyn DefDatabase, owner: Either) -> &[Symbol] { + if !self.contains(AttrFlags::HAS_DOC_ALIASES) { + return &[]; + } + return match owner { + Either::Left(it) => doc_aliases(db, it), + Either::Right(field) => fields_doc_aliases(db, field.parent) + .get(field.local_id) + .map(|it| &**it) + .unwrap_or_default(), + }; + + #[salsa::tracked(returns(ref))] + fn doc_aliases(db: &dyn DefDatabase, owner: AttrDefId) -> Box<[Symbol]> { + let mut result = Vec::new(); + collect_attrs::(db, owner, |attr| extract_doc_aliases(&mut result, attr)); + result.into_boxed_slice() + } + + #[salsa::tracked(returns(ref))] + fn fields_doc_aliases( + db: &dyn DefDatabase, + variant: VariantId, + ) -> ArenaMap> { + collect_field_attrs(db, variant, |cfg_options, field| { + let mut result = Vec::new(); + expand_cfg_attr( + field.value.attrs(), + || cfg_options, + |attr, _, _, _| extract_doc_aliases(&mut result, attr), + ); + result.into_boxed_slice() + }) + } + } + + #[inline] + pub fn cfgs(self, db: &dyn DefDatabase, owner: Either) -> Option<&CfgExpr> { + if !self.contains(AttrFlags::HAS_CFG) { + return None; + } + return match owner { + Either::Left(it) => cfgs(db, it).as_ref(), + Either::Right(field) => { + fields_cfgs(db, field.parent).get(field.local_id).and_then(|it| it.as_ref()) + } + }; + + // We LRU this query because it is only used by IDE. + #[salsa::tracked(returns(ref), lru = 250)] + fn cfgs(db: &dyn DefDatabase, owner: AttrDefId) -> Option { + let mut result = Vec::new(); + collect_attrs::(db, owner, |attr| extract_cfgs(&mut result, attr)); + match result.len() { + 0 => None, + 1 => result.into_iter().next(), + _ => Some(CfgExpr::All(result.into_boxed_slice())), + } + } + + // We LRU this query because it is only used by IDE. + #[salsa::tracked(returns(ref), lru = 50)] + fn fields_cfgs( + db: &dyn DefDatabase, + variant: VariantId, + ) -> ArenaMap> { + collect_field_attrs(db, variant, |cfg_options, field| { + let mut result = Vec::new(); + expand_cfg_attr( + field.value.attrs(), + || cfg_options, + |attr, _, _, _| extract_cfgs(&mut result, attr), + ); + match result.len() { + 0 => None, + 1 => result.into_iter().next(), + _ => Some(CfgExpr::All(result.into_boxed_slice())), + } + }) + } + } + + #[inline] + pub fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option { + if !AttrFlags::query(db, AttrDefId::ModuleId(owner)).contains(AttrFlags::HAS_DOC_KEYWORD) { + return None; + } + return doc_keyword(db, owner); + + #[salsa::tracked] + fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option { + collect_attrs(db, AttrDefId::ModuleId(owner), |attr| { + if let Meta::TokenTree { path, tt } = attr + && path.is1("doc") + { + for atom in DocAtom::parse(tt) { + if let DocAtom::KeyValue { key, value } = atom + && key == "keyword" + { + return ControlFlow::Break(Symbol::intern(&value)); + } + } + } + ControlFlow::Continue(()) + }) + } + } + + // We LRU this query because it is only used by IDE. + #[salsa::tracked(returns(ref), lru = 250)] + pub fn docs(db: &dyn DefDatabase, owner: AttrDefId) -> Option> { + let (source, outer_mod_decl, krate) = attrs_source(db, owner); + let inner_attrs_node = source.value.inner_attributes_node(); + extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node) + } + + #[inline] + pub fn field_docs(db: &dyn DefDatabase, field: FieldId) -> Option<&Docs> { + return fields_docs(db, field.parent).get(field.local_id).and_then(|it| it.as_deref()); + + // We LRU this query because it is only used by IDE. + #[salsa::tracked(returns(ref), lru = 50)] + pub fn fields_docs( + db: &dyn DefDatabase, + variant: VariantId, + ) -> ArenaMap>> { + collect_field_attrs(db, variant, |cfg_options, field| { + extract_docs(&|| cfg_options, field, None, None) + }) + } + } + + #[inline] + pub fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<&DeriveInfo> { + if !AttrFlags::query(db, owner.into()).contains(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO) { + return None; + } + + return derive_info(db, owner).as_ref(); + + #[salsa::tracked(returns(ref))] + fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option { + collect_attrs(db, owner.into(), |attr| { + if let Meta::TokenTree { path, tt } = attr + && path.segments.len() == 1 + && matches!( + path.segments[0].text(), + "proc_macro_derive" | "rustc_builtin_macro" + ) + && let mut tt = TokenTreeChildren::new(&tt) + && let Some(NodeOrToken::Token(trait_name)) = tt.next() + && trait_name.kind().is_any_identifier() + { + let trait_name = Symbol::intern(trait_name.text()); + + let helpers = if let Some(NodeOrToken::Token(comma)) = tt.next() + && comma.kind() == T![,] + && let Some(NodeOrToken::Token(attributes)) = tt.next() + && attributes.text() == "attributes" + && let Some(NodeOrToken::Node(attributes)) = tt.next() + { + attributes + .syntax() + .children_with_tokens() + .filter_map(NodeOrToken::into_token) + .filter(|it| it.kind().is_any_identifier()) + .map(|it| Symbol::intern(it.text())) + .collect::>() + } else { + Box::new([]) + }; + + ControlFlow::Break(DeriveInfo { trait_name, helpers }) + } else { + ControlFlow::Continue(()) + } + }) + } + } +} + +fn merge_repr(this: &mut ReprOptions, other: ReprOptions) { + let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this; + flags.insert(other.flags); + *align = (*align).max(other.align); + *pack = match (*pack, other.pack) { + (Some(pack), None) | (None, Some(pack)) => Some(pack), + _ => (*pack).min(other.pack), + }; + if other.int.is_some() { + *int = other.int; + } +} + +fn parse_repr_tt(tt: &ast::TokenTree) -> Option { + use crate::builtin_type::{BuiltinInt, BuiltinUint}; + use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; + + let mut tts = TokenTreeChildren::new(tt).peekable(); + + let mut acc = ReprOptions::default(); + while let Some(tt) = tts.next() { + let NodeOrToken::Token(ident) = tt else { + continue; + }; + if !ident.kind().is_any_identifier() { + continue; + } + let repr = match ident.text() { + "packed" => { + let pack = if let Some(NodeOrToken::Node(tt)) = tts.peek() { + let tt = tt.clone(); + tts.next(); + let mut tt_iter = TokenTreeChildren::new(&tt); + if let Some(NodeOrToken::Token(lit)) = tt_iter.next() + && let Some(lit) = ast::IntNumber::cast(lit) + && let Ok(lit) = lit.value() + && let Ok(lit) = lit.try_into() + { + lit + } else { + 0 + } + } else { + 0 + }; + let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE)); + ReprOptions { pack, ..Default::default() } + } + "align" => { + let mut align = None; + if let Some(NodeOrToken::Node(tt)) = tts.peek() { + let tt = tt.clone(); + tts.next(); + let mut tt_iter = TokenTreeChildren::new(&tt); + if let Some(NodeOrToken::Token(lit)) = tt_iter.next() + && let Some(lit) = ast::IntNumber::cast(lit) + && let Ok(lit) = lit.value() + && let Ok(lit) = lit.try_into() + { + align = Align::from_bytes(lit).ok(); + } + } + ReprOptions { align, ..Default::default() } + } + "C" => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() }, + "transparent" => ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }, + "simd" => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() }, + repr => { + let mut int = None; + if let Some(builtin) = BuiltinInt::from_suffix(repr) + .map(Either::Left) + .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) + { + int = Some(match builtin { + Either::Left(bi) => match bi { + BuiltinInt::Isize => IntegerType::Pointer(true), + BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), + BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), + BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), + BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), + BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), + }, + Either::Right(bu) => match bu { + BuiltinUint::Usize => IntegerType::Pointer(false), + BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), + BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), + BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), + BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), + BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), + }, + }); + } + ReprOptions { int, ..Default::default() } + } + }; + merge_repr(&mut acc, repr); + } + + Some(acc) +} + +fn parse_rustc_legacy_const_generics(tt: ast::TokenTree) -> Box<[u32]> { + TokenTreeChildren::new(&tt) + .filter_map(|param| { + ast::IntNumber::cast(param.into_token()?)?.value().ok()?.try_into().ok() + }) + .collect() +} + +#[derive(Debug)] +enum DocAtom { + /// eg. `#[doc(hidden)]` + Flag(SmolStr), + /// eg. `#[doc(alias = "it")]` + /// + /// Note that a key can have multiple values that are all considered "active" at the same time. + /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`. + KeyValue { key: SmolStr, value: SmolStr }, + /// eg. `#[doc(alias("x", "y"))]` + Alias(Vec), +} + +impl DocAtom { + fn parse(tt: ast::TokenTree) -> SmallVec<[DocAtom; 1]> { + let mut iter = TokenTreeChildren::new(&tt).peekable(); + let mut result = SmallVec::new(); + while iter.peek().is_some() { + if let Some(expr) = next_doc_expr(&mut iter) { + result.push(expr); + } + } + result + } +} + +fn next_doc_expr(it: &mut Peekable) -> Option { + let name = match it.next() { + Some(NodeOrToken::Token(token)) if token.kind().is_any_identifier() => { + SmolStr::new(token.text()) + } + _ => return None, + }; + + let ret = match it.peek() { + Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => { + it.next(); + if let Some(NodeOrToken::Token(value)) = it.next() + && let Some(value) = ast::String::cast(value) + && let Ok(value) = value.value() + { + DocAtom::KeyValue { key: name, value: SmolStr::new(&*value) } + } else { + return None; + } + } + Some(NodeOrToken::Node(subtree)) => { + if name != "alias" { + return None; + } + let aliases = TokenTreeChildren::new(subtree) + .filter_map(|alias| { + Some(SmolStr::new(&*ast::String::cast(alias.into_token()?)?.value().ok()?)) + }) + .collect(); + it.next(); + DocAtom::Alias(aliases) + } + _ => DocAtom::Flag(name), + }; + Some(ret) +} + +#[cfg(test)] +mod tests { + use expect_test::expect; + use hir_expand::InFile; + use test_fixture::WithFixture; + use tt::{TextRange, TextSize}; + + use crate::attrs::IsInnerDoc; + use crate::{attrs::Docs, test_db::TestDB}; + + #[test] + fn docs() { + let (_db, file_id) = TestDB::with_single_file(""); + let mut docs = Docs { + docs: String::new(), + docs_source_map: Vec::new(), + outline_mod: None, + inline_file: file_id.into(), + prefix_len: TextSize::new(0), + inline_inner_docs_start: None, + outline_inner_docs_start: None, + }; + let mut indent = usize::MAX; + + let outer = " foo\n\tbar baz"; + let mut ast_offset = TextSize::new(123); + for line in outer.split('\n') { + docs.extend_with_doc_str(line, ast_offset, &mut indent); + ast_offset += TextSize::of(line) + TextSize::of("\n"); + } + + docs.inline_inner_docs_start = Some(TextSize::of(&docs.docs)); + ast_offset += TextSize::new(123); + let inner = " bar \n baz"; + for line in inner.split('\n') { + docs.extend_with_doc_str(line, ast_offset, &mut indent); + ast_offset += TextSize::of(line) + TextSize::of("\n"); + } + + assert_eq!(indent, 1); + expect![[r#" + [ + DocsSourceMapLine { + string_offset: 0, + ast_offset: 123, + }, + DocsSourceMapLine { + string_offset: 5, + ast_offset: 128, + }, + DocsSourceMapLine { + string_offset: 15, + ast_offset: 261, + }, + DocsSourceMapLine { + string_offset: 20, + ast_offset: 267, + }, + ] + "#]] + .assert_debug_eq(&docs.docs_source_map); + + docs.remove_indent(indent, 0); + + assert_eq!(docs.inline_inner_docs_start, Some(TextSize::new(13))); + + assert_eq!(docs.docs, "foo\nbar baz\nbar\nbaz\n"); + expect![[r#" + [ + DocsSourceMapLine { + string_offset: 0, + ast_offset: 124, + }, + DocsSourceMapLine { + string_offset: 4, + ast_offset: 129, + }, + DocsSourceMapLine { + string_offset: 13, + ast_offset: 262, + }, + DocsSourceMapLine { + string_offset: 17, + ast_offset: 268, + }, + ] + "#]] + .assert_debug_eq(&docs.docs_source_map); + + docs.append(&docs.clone()); + docs.prepend_str("prefix---"); + assert_eq!(docs.docs, "prefix---foo\nbar baz\nbar\nbaz\nfoo\nbar baz\nbar\nbaz\n"); + expect![[r#" + [ + DocsSourceMapLine { + string_offset: 0, + ast_offset: 124, + }, + DocsSourceMapLine { + string_offset: 4, + ast_offset: 129, + }, + DocsSourceMapLine { + string_offset: 13, + ast_offset: 262, + }, + DocsSourceMapLine { + string_offset: 17, + ast_offset: 268, + }, + DocsSourceMapLine { + string_offset: 21, + ast_offset: 124, + }, + DocsSourceMapLine { + string_offset: 25, + ast_offset: 129, + }, + DocsSourceMapLine { + string_offset: 34, + ast_offset: 262, + }, + DocsSourceMapLine { + string_offset: 38, + ast_offset: 268, + }, + ] + "#]] + .assert_debug_eq(&docs.docs_source_map); + + let range = |start, end| TextRange::new(TextSize::new(start), TextSize::new(end)); + let in_file = |range| InFile::new(file_id.into(), range); + assert_eq!(docs.find_ast_range(range(0, 2)), None); + assert_eq!(docs.find_ast_range(range(8, 10)), None); + assert_eq!( + docs.find_ast_range(range(9, 10)), + Some((in_file(range(124, 125)), IsInnerDoc::No)) + ); + assert_eq!(docs.find_ast_range(range(20, 23)), None); + assert_eq!( + docs.find_ast_range(range(23, 25)), + Some((in_file(range(263, 265)), IsInnerDoc::Yes)) + ); + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 4e1d598623abe..ad29900876727 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -1,23 +1,21 @@ //! Defines database & queries for name resolution. use base_db::{Crate, RootQueryDb, SourceDatabase}; -use either::Either; use hir_expand::{ EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind, db::ExpandDatabase, }; -use intern::sym; use la_arena::ArenaMap; -use syntax::{AstPtr, ast}; use triomphe::Arc; use crate::{ - AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, - EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, - FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc, - MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, - ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, - TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId, - attr::{Attrs, AttrsWithOwner}, + AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, CrateRootModuleId, DefWithBodyId, + EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, + ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, HasModule, ImplId, ImplLoc, + InternedModuleId, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, + MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, + StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, + VariantId, + attrs::AttrFlags, expr_store::{ Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes, }, @@ -30,7 +28,6 @@ use crate::{ ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature, StructSignature, TraitSignature, TypeAliasSignature, UnionSignature, }, - tt, visibility::{self, Visibility}, }; @@ -238,28 +235,11 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase { def: GenericDefId, ) -> (Arc, Arc, Arc); - // region:attrs - - #[salsa::invoke(Attrs::fields_attrs_query)] - fn fields_attrs(&self, def: VariantId) -> Arc>; - - // should this really be a query? - #[salsa::invoke(crate::attr::fields_attrs_source_map)] - fn fields_attrs_source_map( - &self, - def: VariantId, - ) -> Arc>>>; - - // FIXME: Make this a non-interned query. - #[salsa::invoke_interned(AttrsWithOwner::attrs_query)] - fn attrs(&self, def: AttrDefId) -> Attrs; - + // FIXME: Get rid of this, call `AttrFlags::lang_item()` directly. #[salsa::transparent] #[salsa::invoke(lang_item::lang_attr)] fn lang_attr(&self, def: AttrDefId) -> Option; - // endregion:attrs - #[salsa::invoke(ImportMap::import_map_query)] fn import_map(&self, krate: Crate) -> Arc; @@ -303,36 +283,9 @@ fn include_macro_invoc( } fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool { - let file = crate_id.data(db).root_file_id(db); - let item_tree = db.file_item_tree(file.into()); - let attrs = item_tree.top_level_raw_attrs(); - for attr in &**attrs { - match attr.path().as_ident() { - Some(ident) if *ident == sym::no_std => return true, - Some(ident) if *ident == sym::cfg_attr => {} - _ => continue, - } - - // This is a `cfg_attr`; check if it could possibly expand to `no_std`. - // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]` - let tt = match attr.token_tree_value() { - Some(tt) => tt.token_trees(), - None => continue, - }; - - let segments = - tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ',')); - for output in segments.skip(1) { - match output.flat_tokens() { - [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => { - return true; - } - _ => {} - } - } - } - - false + let root_module = CrateRootModuleId::from(crate_id).module(db); + let attrs = AttrFlags::query(db, AttrDefId::ModuleId(InternedModuleId::new(db, root_module))); + attrs.contains(AttrFlags::IS_NO_STD) } fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs index 23b9712d1e6c1..6a2f06b0a6f68 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs @@ -17,11 +17,10 @@ use syntax::{AstNode, Parse, ast}; use triomphe::Arc; use tt::TextRange; -use crate::attr::Attrs; -use crate::expr_store::HygieneId; -use crate::macro_call_as_call_id; -use crate::nameres::DefMap; -use crate::{MacroId, UnresolvedMacro, db::DefDatabase}; +use crate::{ + MacroId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::HygieneId, + macro_call_as_call_id, nameres::DefMap, +}; #[derive(Debug)] pub(super) struct Expander { @@ -70,11 +69,10 @@ impl Expander { pub(super) fn is_cfg_enabled( &self, - db: &dyn DefDatabase, - has_attrs: &dyn HasAttrs, + owner: &dyn HasAttrs, cfg_options: &CfgOptions, ) -> Result<(), cfg::CfgExpr> { - Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options) + AttrFlags::is_cfg_enabled_for(owner, cfg_options) } pub(super) fn call_syntax_ctx(&self) -> SyntaxContext { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index 3794cb18e9360..fbe0b1ab95965 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -12,7 +12,6 @@ use cfg::CfgOptions; use either::Either; use hir_expand::{ HirFileId, InFile, MacroDefId, - mod_path::tool_path, name::{AsName, Name}, span_map::SpanMapRef, }; @@ -34,6 +33,7 @@ use tt::TextRange; use crate::{ AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId, ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro, + attrs::AttrFlags, builtin_type::BuiltinUint, db::DefDatabase, expr_store::{ @@ -87,14 +87,16 @@ pub(super) fn lower_body( let mut params = vec![]; let mut collector = ExprCollector::new(db, module, current_file_id); - let skip_body = match owner { - DefWithBodyId::FunctionId(it) => db.attrs(it.into()), - DefWithBodyId::StaticId(it) => db.attrs(it.into()), - DefWithBodyId::ConstId(it) => db.attrs(it.into()), - DefWithBodyId::VariantId(it) => db.attrs(it.into()), - } - .rust_analyzer_tool() - .any(|attr| *attr.path() == tool_path![skip]); + let skip_body = AttrFlags::query( + db, + match owner { + DefWithBodyId::FunctionId(it) => it.into(), + DefWithBodyId::StaticId(it) => it.into(), + DefWithBodyId::ConstId(it) => it.into(), + DefWithBodyId::VariantId(it) => it.into(), + }, + ) + .contains(AttrFlags::RUST_ANALYZER_SKIP); // If #[rust_analyzer::skip] annotated, only construct enough information for the signature // and skip the body. if skip_body { @@ -2485,7 +2487,7 @@ impl ExprCollector<'_> { /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when /// not. fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool { - let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options); + let enabled = self.expander.is_cfg_enabled(owner, self.cfg_options); match enabled { Ok(()) => true, Err(cfg) => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs index 5b9da3c5e6680..e386e8d0c596c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs @@ -12,7 +12,8 @@ use span::Edition; use syntax::ast::HasName; use crate::{ - AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId, + AdtId, DefWithBodyId, FunctionId, GenericDefId, StructId, TypeParamId, VariantId, + attrs::AttrFlags, expr_store::path::{GenericArg, GenericArgs}, hir::{ Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement, @@ -167,7 +168,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi GenericDefId::AdtId(id) => match id { AdtId::StructId(id) => { let signature = db.struct_signature(id); - print_struct(db, &signature, edition) + print_struct(db, id, &signature, edition) } AdtId::UnionId(id) => { format!("unimplemented {id:?}") @@ -179,7 +180,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi GenericDefId::ConstId(id) => format!("unimplemented {id:?}"), GenericDefId::FunctionId(id) => { let signature = db.function_signature(id); - print_function(db, &signature, edition) + print_function(db, id, &signature, edition) } GenericDefId::ImplId(id) => format!("unimplemented {id:?}"), GenericDefId::StaticId(id) => format!("unimplemented {id:?}"), @@ -208,7 +209,8 @@ pub fn print_path( pub fn print_struct( db: &dyn DefDatabase, - StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature, + id: StructId, + StructSignature { name, generic_params, store, flags, shape }: &StructSignature, edition: Edition, ) -> String { let mut p = Printer { @@ -219,7 +221,7 @@ pub fn print_struct( line_format: LineFormat::Newline, edition, }; - if let Some(repr) = repr { + if let Some(repr) = AttrFlags::repr(db, id.into()) { if repr.c() { wln!(p, "#[repr(C)]"); } @@ -255,7 +257,8 @@ pub fn print_struct( pub fn print_function( db: &dyn DefDatabase, - FunctionSignature { + id: FunctionId, + signature @ FunctionSignature { name, generic_params, store, @@ -263,10 +266,10 @@ pub fn print_function( ret_type, abi, flags, - legacy_const_generics_indices, }: &FunctionSignature, edition: Edition, ) -> String { + let legacy_const_generics_indices = signature.legacy_const_generics_indices(db, id); let mut p = Printer { db, store, @@ -298,7 +301,7 @@ pub fn print_function( if i != 0 { w!(p, ", "); } - if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) { + if legacy_const_generics_indices.is_some_and(|idx| idx.contains(&(i as u32))) { w!(p, "const: "); } p.print_type_ref(*param); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs index c7707378a5b31..0cb9325b502e2 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs @@ -189,8 +189,8 @@ fn f() { } "#, expect![[r#" - BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::(1) } - BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::(0) } + BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::(1) } + BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::(0) } crate scope "#]], ); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs index b68674c7a74f4..2dac4e7fc84b6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs @@ -38,14 +38,24 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe match def { GenericDefId::AdtId(adt_id) => match adt_id { crate::AdtId::StructId(struct_id) => { - out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT); + out += &print_struct( + &db, + struct_id, + &db.struct_signature(struct_id), + Edition::CURRENT, + ); } crate::AdtId::UnionId(_id) => (), crate::AdtId::EnumId(_id) => (), }, GenericDefId::ConstId(_id) => (), GenericDefId::FunctionId(function_id) => { - out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT) + out += &print_function( + &db, + function_id, + &db.function_signature(function_id), + Edition::CURRENT, + ) } GenericDefId::ImplId(_id) => (), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index f31f355cfa5d7..67cf466276c5d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -13,7 +13,8 @@ use stdx::format_to; use triomphe::Arc; use crate::{ - AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId, + AssocItemId, AttrDefId, Complete, FxIndexMap, InternedModuleId, ModuleDefId, ModuleId, TraitId, + attrs::AttrFlags, db::DefDatabase, item_scope::{ImportOrExternCrate, ItemInNs}, nameres::{DefMap, assoc::TraitItems, crate_def_map}, @@ -165,17 +166,34 @@ impl ImportMap { } } else { match item { - ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(), + ItemInNs::Types(id) | ItemInNs::Values(id) => match id { + ModuleDefId::ModuleId(it) => { + Some(AttrDefId::ModuleId(InternedModuleId::new(db, it))) + } + ModuleDefId::FunctionId(it) => Some(it.into()), + ModuleDefId::AdtId(it) => Some(it.into()), + ModuleDefId::EnumVariantId(it) => Some(it.into()), + ModuleDefId::ConstId(it) => Some(it.into()), + ModuleDefId::StaticId(it) => Some(it.into()), + ModuleDefId::TraitId(it) => Some(it.into()), + ModuleDefId::TypeAliasId(it) => Some(it.into()), + ModuleDefId::MacroId(it) => Some(it.into()), + ModuleDefId::BuiltinType(_) => None, + }, ItemInNs::Macros(id) => Some(id.into()), } }; let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id { None => (false, false, Complete::Yes), Some(attr_id) => { - let attrs = db.attrs(attr_id); + let attrs = AttrFlags::query(db, attr_id); let do_not_complete = - Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs); - (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete) + Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), attrs); + ( + attrs.contains(AttrFlags::IS_DOC_HIDDEN), + attrs.contains(AttrFlags::IS_UNSTABLE), + do_not_complete, + ) } }; @@ -239,15 +257,15 @@ impl ImportMap { }; let attr_id = item.into(); - let attrs = &db.attrs(attr_id); + let attrs = AttrFlags::query(db, attr_id); let item_do_not_complete = Complete::extract(false, attrs); let do_not_complete = Complete::for_trait_item(trait_import_info.complete, item_do_not_complete); let assoc_item_info = ImportInfo { container: trait_import_info.container, name: assoc_item_name.clone(), - is_doc_hidden: attrs.has_doc_hidden(), - is_unstable: attrs.is_unstable(), + is_doc_hidden: attrs.contains(AttrFlags::IS_DOC_HIDDEN), + is_unstable: attrs.contains(AttrFlags::IS_UNSTABLE), complete: do_not_complete, }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index f35df8d3a7e11..2a104fff2b92c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -30,6 +30,7 @@ //! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its //! surface syntax. +mod attrs; mod lower; mod pretty; #[cfg(test)] @@ -43,10 +44,8 @@ use std::{ }; use ast::{AstNode, StructKind}; -use base_db::Crate; use hir_expand::{ ExpandTo, HirFileId, - attrs::RawAttrs, mod_path::{ModPath, PathKind}, name::Name, }; @@ -59,9 +58,12 @@ use syntax::{SyntaxKind, ast, match_ast}; use thin_vec::ThinVec; use triomphe::Arc; -use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase}; +use crate::{BlockId, Lookup, db::DefDatabase}; -pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast}; +pub(crate) use crate::item_tree::{ + attrs::*, + lower::{lower_use_tree, visibility_from_ast}, +}; #[derive(Copy, Clone, Eq, PartialEq)] pub(crate) struct RawVisibilityId(u32); @@ -96,7 +98,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - let top_attrs = RawAttrs::new(db, &file, ctx.span_map()); + let top_attrs = ctx.lower_attrs(&file); let mut item_tree = ctx.lower_module_items(&file); item_tree.top_attrs = top_attrs; item_tree @@ -132,7 +134,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> attrs: FxHashMap::default(), small_data: FxHashMap::default(), big_data: FxHashMap::default(), - top_attrs: RawAttrs::EMPTY, + top_attrs: AttrsOrCfg::empty(), vis: ItemVisibilities { arena: ThinVec::new() }, }) }) @@ -168,7 +170,7 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc attrs: FxHashMap::default(), small_data: FxHashMap::default(), big_data: FxHashMap::default(), - top_attrs: RawAttrs::EMPTY, + top_attrs: AttrsOrCfg::empty(), vis: ItemVisibilities { arena: ThinVec::new() }, }) }) @@ -182,8 +184,8 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc #[derive(Debug, Default, Eq, PartialEq)] pub struct ItemTree { top_level: Box<[ModItemId]>, - top_attrs: RawAttrs, - attrs: FxHashMap, RawAttrs>, + top_attrs: AttrsOrCfg, + attrs: FxHashMap, AttrsOrCfg>, vis: ItemVisibilities, big_data: FxHashMap, BigModItem>, small_data: FxHashMap, SmallModItem>, @@ -197,26 +199,12 @@ impl ItemTree { } /// Returns the inner attributes of the source file. - pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs { + pub(crate) fn top_level_attrs(&self) -> &AttrsOrCfg { &self.top_attrs } - /// Returns the inner attributes of the source file. - pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs { - Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone()) - } - - pub(crate) fn raw_attrs(&self, of: FileAstId) -> &RawAttrs { - self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY) - } - - pub(crate) fn attrs( - &self, - db: &dyn DefDatabase, - krate: Crate, - of: FileAstId, - ) -> Attrs { - Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone()) + pub(crate) fn attrs(&self, of: FileAstId) -> Option<&AttrsOrCfg> { + self.attrs.get(&of) } /// Returns a count of a few, expensive items. diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs new file mode 100644 index 0000000000000..5c635a4b3831c --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs @@ -0,0 +1,220 @@ +//! Defines attribute helpers for name resolution. +//! +//! Notice we don't preserve all attributes for name resolution, to save space: +//! for example, we skip doc comments (desugared to `#[doc = "..."]` attributes) +//! and `#[inline]`. The filtered attributes are listed in [`hir_expand::attrs`]. + +use std::{ + borrow::Cow, + convert::Infallible, + ops::{self, ControlFlow}, +}; + +use cfg::{CfgExpr, CfgOptions}; +use either::Either; +use hir_expand::{ + attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs}, + mod_path::ModPath, + name::Name, + span_map::SpanMapRef, +}; +use intern::{Interned, Symbol, sym}; +use syntax::{AstNode, T, ast}; +use syntax_bridge::DocCommentDesugarMode; +use tt::token_to_literal; + +use crate::{db::DefDatabase, item_tree::lower::Ctx}; + +#[derive(Debug, PartialEq, Eq)] +pub(crate) enum AttrsOrCfg { + Enabled { + attrs: AttrsOwned, + }, + /// This only collects the attributes up to the disabled `cfg` (this is what needed for crate-level attributes.) + CfgDisabled(Box<(CfgExpr, AttrsOwned)>), +} + +impl Default for AttrsOrCfg { + #[inline] + fn default() -> Self { + AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) } + } +} + +impl AttrsOrCfg { + pub(crate) fn lower<'a>( + db: &dyn DefDatabase, + owner: &dyn ast::HasAttrs, + cfg_options: &dyn Fn() -> &'a CfgOptions, + span_map: SpanMapRef<'_>, + ) -> AttrsOrCfg { + let mut attrs = Vec::new(); + let result = + collect_item_tree_attrs::(owner, cfg_options, |meta, container, _, _| { + // NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId` + // tracking. + let (span, path_range, input) = match meta { + Meta::NamedKeyValue { path_range, name: _, value } => { + let span = span_map.span_for_range(path_range); + let input = value.map(|value| { + Box::new(AttrInput::Literal(token_to_literal( + value.text(), + span_map.span_for_range(value.text_range()), + ))) + }); + (span, path_range, input) + } + Meta::TokenTree { path, tt } => { + let span = span_map.span_for_range(path.range); + let tt = syntax_bridge::syntax_node_to_token_tree( + tt.syntax(), + span_map, + span, + DocCommentDesugarMode::ProcMacro, + ); + let input = Some(Box::new(AttrInput::TokenTree(tt))); + (span, path.range, input) + } + Meta::Path { path } => { + let span = span_map.span_for_range(path.range); + (span, path.range, None) + } + }; + + let path = container.token_at_offset(path_range.start()).right_biased().and_then( + |first_path_token| { + let is_abs = matches!(first_path_token.kind(), T![:] | T![::]); + let segments = + std::iter::successors(Some(first_path_token), |it| it.next_token()) + .take_while(|it| it.text_range().end() <= path_range.end()) + .filter(|it| it.kind().is_any_identifier()); + ModPath::from_tokens( + db, + &mut |range| span_map.span_for_range(range).ctx, + is_abs, + segments, + ) + }, + ); + let path = path.unwrap_or_else(|| Name::missing().into()); + + attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx }); + ControlFlow::Continue(()) + }); + let attrs = AttrsOwned(attrs.into_boxed_slice()); + match result { + Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))), + None => AttrsOrCfg::Enabled { attrs }, + } + } +} + +#[derive(Debug, PartialEq, Eq)] +pub(crate) struct AttrsOwned(Box<[Attr]>); + +#[derive(Debug, Clone, Copy)] +pub(crate) struct Attrs<'a>(&'a [Attr]); + +impl ops::Deref for Attrs<'_> { + type Target = [Attr]; + + #[inline] + fn deref(&self) -> &Self::Target { + self.0 + } +} + +impl Ctx<'_> { + #[inline] + pub(super) fn lower_attrs(&self, owner: &dyn ast::HasAttrs) -> AttrsOrCfg { + AttrsOrCfg::lower(self.db, owner, &|| self.cfg_options(), self.span_map()) + } +} + +impl AttrsOwned { + #[inline] + pub(crate) fn as_ref(&self) -> Attrs<'_> { + Attrs(&self.0) + } +} + +impl<'a> Attrs<'a> { + pub(crate) const EMPTY: Self = Attrs(&[]); + + #[inline] + pub(crate) fn by_key(self, key: Symbol) -> AttrQuery<'a> { + AttrQuery { attrs: self, key } + } + + #[inline] + pub(crate) fn iter(self) -> impl Iterator { + self.0.iter().enumerate().map(|(id, attr)| (AttrId::from_item_tree_index(id as u32), attr)) + } + + #[inline] + pub(crate) fn iter_after( + self, + after: Option, + ) -> impl Iterator { + let skip = after.map_or(0, |after| after.item_tree_index() + 1); + self.0[skip as usize..] + .iter() + .enumerate() + .map(move |(id, attr)| (AttrId::from_item_tree_index(id as u32 + skip), attr)) + } + + #[inline] + pub(crate) fn is_proc_macro(&self) -> bool { + self.by_key(sym::proc_macro).exists() + } + + #[inline] + pub(crate) fn is_proc_macro_attribute(&self) -> bool { + self.by_key(sym::proc_macro_attribute).exists() + } +} +#[derive(Debug, Clone)] +pub(crate) struct AttrQuery<'attr> { + attrs: Attrs<'attr>, + key: Symbol, +} + +impl<'attr> AttrQuery<'attr> { + #[inline] + pub(crate) fn tt_values(self) -> impl Iterator { + self.attrs().filter_map(|attr| attr.token_tree_value()) + } + + #[inline] + pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> { + self.attrs().find_map(|attr| attr.string_value_with_span()) + } + + #[inline] + pub(crate) fn string_value_unescape(self) -> Option> { + self.attrs().find_map(|attr| attr.string_value_unescape()) + } + + #[inline] + pub(crate) fn exists(self) -> bool { + self.attrs().next().is_some() + } + + #[inline] + pub(crate) fn attrs(self) -> impl Iterator + Clone { + let key = self.key; + self.attrs.0.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key)) + } +} + +impl AttrsOrCfg { + #[inline] + pub(super) fn empty() -> Self { + AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) } + } + + #[inline] + pub(super) fn is_empty(&self) -> bool { + matches!(self, AttrsOrCfg::Enabled { attrs } if attrs.as_ref().is_empty()) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 454e06399583c..b50a75169158d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -1,8 +1,9 @@ //! AST -> `ItemTree` lowering code. -use std::{cell::OnceCell, collections::hash_map::Entry}; +use std::cell::OnceCell; use base_db::FxIndexSet; +use cfg::CfgOptions; use hir_expand::{ HirFileId, mod_path::PathKind, @@ -22,18 +23,19 @@ use crate::{ item_tree::{ BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod, - ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem, - Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, - VisibilityExplicitness, + ModItemId, ModKind, ModPath, RawVisibility, RawVisibilityId, SmallModItem, Static, Struct, + StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness, + attrs::AttrsOrCfg, }, }; pub(super) struct Ctx<'a> { - db: &'a dyn DefDatabase, + pub(super) db: &'a dyn DefDatabase, tree: ItemTree, source_ast_id_map: Arc, span_map: OnceCell, file: HirFileId, + cfg_options: OnceCell<&'a CfgOptions>, top_level: Vec, visibilities: FxIndexSet, } @@ -45,12 +47,18 @@ impl<'a> Ctx<'a> { tree: ItemTree::default(), source_ast_id_map: db.ast_id_map(file), file, + cfg_options: OnceCell::new(), span_map: OnceCell::new(), visibilities: FxIndexSet::default(), top_level: Vec::new(), } } + #[inline] + pub(super) fn cfg_options(&self) -> &'a CfgOptions { + self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db)) + } + pub(super) fn span_map(&self) -> SpanMapRef<'_> { self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref() } @@ -98,7 +106,7 @@ impl<'a> Ctx<'a> { } pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { - self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map()); + self.tree.top_attrs = self.lower_attrs(block); self.top_level = block .statements() .filter_map(|stmt| match stmt { @@ -144,22 +152,15 @@ impl<'a> Ctx<'a> { // FIXME: Handle `global_asm!()`. ast::Item::AsmExpr(_) => return None, }; - let attrs = RawAttrs::new(self.db, item, self.span_map()); + let attrs = self.lower_attrs(item); self.add_attrs(mod_item.ast_id(), attrs); Some(mod_item) } - fn add_attrs(&mut self, item: FileAstId, attrs: RawAttrs) { + fn add_attrs(&mut self, item: FileAstId, attrs: AttrsOrCfg) { if !attrs.is_empty() { - match self.tree.attrs.entry(item) { - Entry::Occupied(mut entry) => { - *entry.get_mut() = entry.get().merge(attrs); - } - Entry::Vacant(entry) => { - entry.insert(attrs); - } - } + self.tree.attrs.insert(item, attrs); } } @@ -352,7 +353,7 @@ impl<'a> Ctx<'a> { ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(), ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(), }; - let attrs = RawAttrs::new(self.db, &item, self.span_map()); + let attrs = self.lower_attrs(&item); self.add_attrs(mod_item.ast_id(), attrs); Some(mod_item) }) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 94a6cce3ce33a..66a2d14a734fe 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -7,8 +7,8 @@ use span::{Edition, ErasedFileAstId}; use crate::{ item_tree::{ Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree, - Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static, - Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, + Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawVisibilityId, Static, Struct, + Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, attrs::AttrsOrCfg, }, visibility::RawVisibility, }; @@ -85,9 +85,13 @@ impl Printer<'_> { } } - fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) { + fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) { + let AttrsOrCfg::Enabled { attrs } = attrs else { + w!(self, "#[cfg(false)]{separated_by}"); + return; + }; let inner = if inner { "!" } else { "" }; - for attr in &**attrs { + for attr in &*attrs.as_ref() { w!( self, "#{}[{}{}]{}", diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs index 91b42bef8f79f..a57432f33c3dc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs @@ -30,10 +30,8 @@ use crate::{A, B}; use a::{c, d::{e}}; "#, - expect![[r##" - #![doc = " file comment"] + expect![[r#" #![no_std] - #![doc = " another file comment"] // AstId: ExternCrate[070B, 0] pub(self) extern crate self as renamed; @@ -47,13 +45,12 @@ use a::{c, d::{e}}; // AstId: Use[0000, 1] pub(self) use globs::*; - #[doc = " docs on import"] // AstId: Use[0000, 2] pub(self) use crate::{A, B}; // AstId: Use[0000, 3] pub(self) use a::{c, d::{e}}; - "##]], + "#]], ); } @@ -195,8 +192,6 @@ mod inline { mod outline; "#, expect![[r##" - #[doc = " outer"] - #[doc = " inner"] // AstId: Module[03AE, 0] pub(self) mod inline { // AstId: Use[0000, 0] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index df0705bf90cbc..4f97baadd1834 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -10,6 +10,7 @@ use triomphe::Arc; use crate::{ AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, + attrs::AttrFlags, db::DefDatabase, expr_store::path::Path, nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map}, @@ -213,14 +214,14 @@ impl LangItems { T: Into + Copy, { let _p = tracing::info_span!("collect_lang_item").entered(); - if let Some(lang_item) = lang_attr(db, item.into()) { + if let Some(lang_item) = AttrFlags::lang_item(db, item.into()) { self.items.entry(lang_item).or_insert_with(|| constructor(item)); } } } pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option { - db.attrs(item).lang_item() + AttrFlags::lang_item(db, item) } pub(crate) fn notable_traits_in_deps(db: &dyn DefDatabase, krate: Crate) -> Arc<[Arc<[TraitId]>]> { @@ -240,7 +241,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option for (_, module_data) in crate_def_map.modules() { for def in module_data.scope.declarations() { if let ModuleDefId::TraitId(trait_) = def - && db.attrs(trait_.into()).has_doc_notable_trait() + && AttrFlags::query(db, trait_.into()).contains(AttrFlags::IS_DOC_NOTABLE_TRAIT) { traits.push(trait_); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index e5c213ca937c8..c3c9fc75252d1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -19,7 +19,7 @@ extern crate ra_ap_rustc_abi as rustc_abi; pub mod db; -pub mod attr; +pub mod attrs; pub mod builtin_type; pub mod item_scope; pub mod per_ns; @@ -45,7 +45,7 @@ pub mod find_path; pub mod import_map; pub mod visibility; -use intern::{Interned, Symbol, sym}; +use intern::{Interned, Symbol}; pub use rustc_abi as layout; use thin_vec::ThinVec; use triomphe::Arc; @@ -80,7 +80,7 @@ use syntax::{AstNode, ast}; pub use hir_expand::{Intern, Lookup, tt}; use crate::{ - attr::Attrs, + attrs::AttrFlags, builtin_type::BuiltinType, db::DefDatabase, expr_store::ExpressionStoreSourceMap, @@ -956,10 +956,16 @@ impl CallableDefId { } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +// FIXME: We probably should use this in more places. +/// This is used to avoid interning the whole `AttrDefId`, so we intern just modules and not everything. +#[salsa_macros::interned(debug, no_lifetime)] +pub struct InternedModuleId { + pub loc: ModuleId, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, salsa_macros::Supertype)] pub enum AttrDefId { - ModuleId(ModuleId), - FieldId(FieldId), + ModuleId(InternedModuleId), AdtId(AdtId), FunctionId(FunctionId), EnumVariantId(EnumVariantId), @@ -969,15 +975,12 @@ pub enum AttrDefId { TypeAliasId(TypeAliasId), MacroId(MacroId), ImplId(ImplId), - GenericParamId(GenericParamId), ExternBlockId(ExternBlockId), ExternCrateId(ExternCrateId), UseId(UseId), } impl_from!( - ModuleId, - FieldId, AdtId(StructId, EnumId, UnionId), EnumVariantId, StaticId, @@ -987,41 +990,11 @@ impl_from!( TypeAliasId, MacroId(Macro2Id, MacroRulesId, ProcMacroId), ImplId, - GenericParamId, ExternCrateId, UseId for AttrDefId ); -impl TryFrom for AttrDefId { - type Error = (); - - fn try_from(value: ModuleDefId) -> Result { - match value { - ModuleDefId::ModuleId(it) => Ok(it.into()), - ModuleDefId::FunctionId(it) => Ok(it.into()), - ModuleDefId::AdtId(it) => Ok(it.into()), - ModuleDefId::EnumVariantId(it) => Ok(it.into()), - ModuleDefId::ConstId(it) => Ok(it.into()), - ModuleDefId::StaticId(it) => Ok(it.into()), - ModuleDefId::TraitId(it) => Ok(it.into()), - ModuleDefId::TypeAliasId(it) => Ok(it.into()), - ModuleDefId::MacroId(id) => Ok(id.into()), - ModuleDefId::BuiltinType(_) => Err(()), - } - } -} - -impl From for AttrDefId { - fn from(acid: ItemContainerId) -> Self { - match acid { - ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid), - ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid), - ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid), - ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id), - } - } -} impl From for AttrDefId { fn from(assoc: AssocItemId) -> Self { match assoc { @@ -1262,8 +1235,7 @@ impl HasModule for GenericDefId { impl HasModule for AttrDefId { fn module(&self, db: &dyn DefDatabase) -> ModuleId { match self { - AttrDefId::ModuleId(it) => *it, - AttrDefId::FieldId(it) => it.parent.module(db), + AttrDefId::ModuleId(it) => it.loc(db), AttrDefId::AdtId(it) => it.module(db), AttrDefId::FunctionId(it) => it.module(db), AttrDefId::EnumVariantId(it) => it.module(db), @@ -1273,12 +1245,6 @@ impl HasModule for AttrDefId { AttrDefId::TypeAliasId(it) => it.module(db), AttrDefId::ImplId(it) => it.module(db), AttrDefId::ExternBlockId(it) => it.module(db), - AttrDefId::GenericParamId(it) => match it { - GenericParamId::TypeParamId(it) => it.parent(), - GenericParamId::ConstParamId(it) => it.parent(), - GenericParamId::LifetimeParamId(it) => it.parent, - } - .module(db), AttrDefId::MacroId(it) => it.module(db), AttrDefId::ExternCrateId(it) => it.module(db), AttrDefId::UseId(it) => it.module(db), @@ -1402,32 +1368,18 @@ pub enum Complete { } impl Complete { - pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete { - let mut do_not_complete = Complete::Yes; - for ra_attr in attrs.rust_analyzer_tool() { - let segments = ra_attr.path.segments(); - if segments.len() != 2 { - continue; - } - let action = segments[1].symbol(); - if *action == sym::completions { - match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) { - Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => { - if ident.sym == sym::ignore_flyimport { - do_not_complete = Complete::IgnoreFlyimport; - } else if is_trait { - if ident.sym == sym::ignore_methods { - do_not_complete = Complete::IgnoreMethods; - } else if ident.sym == sym::ignore_flyimport_methods { - do_not_complete = Complete::IgnoreFlyimportMethods; - } - } - } - _ => {} - } + #[inline] + pub fn extract(is_trait: bool, attrs: AttrFlags) -> Complete { + if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT) { + return Complete::IgnoreFlyimport; + } else if is_trait { + if attrs.contains(AttrFlags::COMPLETE_IGNORE_METHODS) { + return Complete::IgnoreMethods; + } else if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) { + return Complete::IgnoreFlyimportMethods; } } - do_not_complete + Complete::Yes } #[inline] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index c489c1f7c1dad..115b487b7ac80 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -300,21 +300,21 @@ fn match_by_first_token_literally() { check( r#" macro_rules! m { - ($i:ident) => ( mod $i {} ); + ($i:ident) => ( enum $i {} ); (= $i:ident) => ( fn $i() {} ); (+ $i:ident) => ( struct $i; ) } -m! { foo } +m! { Foo } m! { = bar } m! { + Baz } "#, expect![[r#" macro_rules! m { - ($i:ident) => ( mod $i {} ); + ($i:ident) => ( enum $i {} ); (= $i:ident) => ( fn $i() {} ); (+ $i:ident) => ( struct $i; ) } -mod foo {} +enum Foo {} fn bar() {} struct Baz; "#]], @@ -326,21 +326,21 @@ fn match_by_last_token_literally() { check( r#" macro_rules! m { - ($i:ident) => ( mod $i {} ); + ($i:ident) => ( enum $i {} ); ($i:ident =) => ( fn $i() {} ); ($i:ident +) => ( struct $i; ) } -m! { foo } +m! { Foo } m! { bar = } m! { Baz + } "#, expect![[r#" macro_rules! m { - ($i:ident) => ( mod $i {} ); + ($i:ident) => ( enum $i {} ); ($i:ident =) => ( fn $i() {} ); ($i:ident +) => ( struct $i; ) } -mod foo {} +enum Foo {} fn bar() {} struct Baz; "#]], @@ -352,21 +352,21 @@ fn match_by_ident() { check( r#" macro_rules! m { - ($i:ident) => ( mod $i {} ); + ($i:ident) => ( enum $i {} ); (spam $i:ident) => ( fn $i() {} ); (eggs $i:ident) => ( struct $i; ) } -m! { foo } +m! { Foo } m! { spam bar } m! { eggs Baz } "#, expect![[r#" macro_rules! m { - ($i:ident) => ( mod $i {} ); + ($i:ident) => ( enum $i {} ); (spam $i:ident) => ( fn $i() {} ); (eggs $i:ident) => ( struct $i; ) } -mod foo {} +enum Foo {} fn bar() {} struct Baz; "#]], @@ -378,12 +378,12 @@ fn match_by_separator_token() { check( r#" macro_rules! m { - ($($i:ident),*) => ($(mod $i {} )*); + ($($i:ident),*) => ($(enum $i {} )*); ($($i:ident)#*) => ($(fn $i() {} )*); ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; ) } -m! { foo, bar } +m! { Baz, Qux } m! { foo# bar } @@ -391,13 +391,13 @@ m! { Foo,# Bar } "#, expect![[r#" macro_rules! m { - ($($i:ident),*) => ($(mod $i {} )*); + ($($i:ident),*) => ($(enum $i {} )*); ($($i:ident)#*) => ($(fn $i() {} )*); ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; ) } -mod foo {} -mod bar {} +enum Baz {} +enum Qux {} fn foo() {} fn bar() {} @@ -1114,11 +1114,11 @@ fn test_single_item() { check( r#" macro_rules! m { ($i:item) => ( $i ) } -m! { mod c {} } +m! { struct C {} } "#, expect![[r#" macro_rules! m { ($i:item) => ( $i ) } -mod c {} +struct C {} "#]], ) } @@ -1144,6 +1144,7 @@ m! { type T = u8; } "#, + // The modules are counted twice, once because of the module and once because of the macro call. expect![[r#" macro_rules! m { ($($i:item)*) => ($($i )*) } extern crate a; @@ -1161,7 +1162,9 @@ trait J {} fn h() {} extern {} type T = u8; -"#]], + +mod b; +mod c {}"#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index e8ae499d27b26..74393411054e9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -245,6 +245,21 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } } + for (_, module) in def_map.modules() { + let Some(src) = module.declaration_source(&db) else { + continue; + }; + if let Some(macro_file) = src.file_id.macro_file() { + let pp = pretty_print_macro_expansion( + src.value.syntax().clone(), + db.span_map(macro_file.into()).as_ref(), + false, + false, + ); + format_to!(expanded_text, "\n{}", pp) + } + } + for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); if let Some(macro_file) = src.file_id.macro_file() diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 6952a9da10139..3f0afe61e0b85 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -9,37 +9,93 @@ use crate::macro_expansion_tests::{check, check_errors}; #[test] fn attribute_macro_attr_censoring() { - cov_mark::check!(attribute_macro_attr_censoring); check( r#" //- proc_macros: identity -#[attr1] #[proc_macros::identity] #[attr2] +//- minicore: derive +#[attr1] #[derive()] #[proc_macros::identity] #[attr2] struct S; + +/// Foo +#[cfg_attr(false, doc = "abc...", attr1)] +mod foo { + #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))] + #![cfg_attr(true, doc = "123...", attr2)] + #![attr3] + + #[cfg_attr(true, cfg(false))] + fn foo() {} + + #[cfg(true)] + fn bar() {} +} "#, - expect![[r#" -#[attr1] #[proc_macros::identity] #[attr2] + expect![[r##" +#[attr1] #[derive()] #[proc_macros::identity] #[attr2] struct S; +/// Foo +#[cfg_attr(false, doc = "abc...", attr1)] +mod foo { + #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))] + #![cfg_attr(true, doc = "123...", attr2)] + #![attr3] + + #[cfg_attr(true, cfg(false))] + fn foo() {} + + #[cfg(true)] + fn bar() {} +} + #[attr1] -#[attr2] struct S;"#]], +#[attr2] struct S; +#[doc = " Foo"] mod foo { + # ![foo] + # ![doc = "123..."] + # ![attr2] + # ![attr3] + #[cfg_attr(true , cfg(false ))] fn foo() {} + #[cfg(true )] fn bar() {} +}"##]], ); } #[test] fn derive_censoring() { - cov_mark::check!(derive_censoring); check( r#" //- proc_macros: derive_identity //- minicore:derive +use derive as my_cool_derive; #[attr1] #[derive(Foo)] #[derive(proc_macros::DeriveIdentity)] #[derive(Bar)] #[attr2] struct S; + +#[my_cool_derive()] +#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))] +#[my_cool_derive()] +struct Foo { + #[cfg_attr(false, cfg(false), attr2)] + v1: i32, + #[cfg_attr(true, cfg(false), attr2)] + v1: i32, + #[cfg_attr(true, attr3)] + v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32), + v3: Foo<{ + #[cfg(false)] + let foo = 123; + 456 + }>, + #[cfg(false)] + v4: bool // No comma here +} "#, expect![[r#" +use derive as my_cool_derive; #[attr1] #[derive(Foo)] #[derive(proc_macros::DeriveIdentity)] @@ -47,6 +103,32 @@ struct S; #[attr2] struct S; +#[my_cool_derive()] +#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))] +#[my_cool_derive()] +struct Foo { + #[cfg_attr(false, cfg(false), attr2)] + v1: i32, + #[cfg_attr(true, cfg(false), attr2)] + v1: i32, + #[cfg_attr(true, attr3)] + v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32), + v3: Foo<{ + #[cfg(false)] + let foo = 123; + 456 + }>, + #[cfg(false)] + v4: bool // No comma here +} + +#[attr1] +#[my_cool_derive()] struct Foo { + v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< { + 456 + } + >, +} #[attr1] #[derive(Bar)] #[attr2] struct S;"#]], @@ -87,7 +169,7 @@ fn foo() { bar.; blub } fn foo() { bar.; blub } fn foo() { - bar. ; + bar.; blub }"#]], ); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 7d5e627964eb1..e4b95a5a77a55 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -391,19 +391,14 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM ) .entered(); - let module_data = ModuleData::new( - ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) }, - Visibility::Public, - ); + let root_file_id = crate_id.root_file_id(db); + let module_data = + ModuleData::new(ModuleOrigin::CrateRoot { definition: root_file_id }, Visibility::Public); let def_map = DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None); - let (def_map, local_def_map) = collector::collect_defs( - db, - def_map, - TreeId::new(krate.root_file_id(db).into(), None), - None, - ); + let (def_map, local_def_map) = + collector::collect_defs(db, def_map, TreeId::new(root_file_id.into(), None), None); DefMapPair::new(db, def_map, local_def_map) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs index 8d2a386de8ecc..b67853347bdef 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs @@ -4,7 +4,8 @@ use std::mem; use cfg::CfgOptions; use hir_expand::{ - AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind, + AstId, AttrMacroAttrIds, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, + MacroDefKind, mod_path::ModPath, name::{AsName, Name}, span_map::SpanMap, @@ -21,8 +22,8 @@ use triomphe::Arc; use crate::{ AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId, ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc, - attr::Attrs, db::DefDatabase, + item_tree::AttrsOrCfg, macro_call_as_call_id, nameres::{ DefMap, LocalDefMap, MacroSubNs, @@ -191,19 +192,22 @@ impl<'a> AssocItemCollector<'a> { fn collect_item(&mut self, item: ast::AssocItem) { let ast_id = self.ast_id_map.ast_id(&item); - let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options); - if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) { - self.diagnostics.push(DefDiagnostic::unconfigured_code( - self.module_id.local_id, - InFile::new(self.file_id, ast_id.erase()), - cfg, - self.cfg_options.clone(), - )); - return; - } + let attrs = + match AttrsOrCfg::lower(self.db, &item, &|| self.cfg_options, self.span_map.as_ref()) { + AttrsOrCfg::Enabled { attrs } => attrs, + AttrsOrCfg::CfgDisabled(cfg) => { + self.diagnostics.push(DefDiagnostic::unconfigured_code( + self.module_id.local_id, + InFile::new(self.file_id, ast_id.erase()), + cfg.0, + self.cfg_options.clone(), + )); + return; + } + }; let ast_id = InFile::new(self.file_id, ast_id.upcast()); - 'attrs: for attr in &*attrs { + 'attrs: for (attr_id, attr) in attrs.as_ref().iter() { let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id }; match self.def_map.resolve_attr_macro( @@ -212,6 +216,7 @@ impl<'a> AssocItemCollector<'a> { self.module_id.local_id, ast_id_with_path, attr, + attr_id, ) { Ok(ResolvedAttr::Macro(call_id)) => { let loc = self.db.lookup_intern_macro_call(call_id); @@ -240,8 +245,12 @@ impl<'a> AssocItemCollector<'a> { Err(_) => { self.diagnostics.push(DefDiagnostic::unresolved_macro_call( self.module_id.local_id, - MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id }, - attr.path().clone(), + MacroCallKind::Attr { + ast_id, + attr_args: None, + censored_attr_ids: AttrMacroAttrIds::from_one(attr_id), + }, + (*attr.path).clone(), )); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs index 2f56d608fcbf4..fb755026c3e08 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs @@ -2,7 +2,7 @@ use base_db::Crate; use hir_expand::{ - MacroCallId, MacroCallKind, MacroDefId, + AttrMacroAttrIds, MacroCallId, MacroCallKind, MacroDefId, attrs::{Attr, AttrId, AttrInput}, inert_attr_macro::find_builtin_attr_idx, mod_path::{ModPath, PathKind}, @@ -28,6 +28,7 @@ pub enum ResolvedAttr { } impl DefMap { + /// This cannot be used to resolve items that allow derives. pub(crate) fn resolve_attr_macro( &self, local_def_map: &LocalDefMap, @@ -35,6 +36,7 @@ impl DefMap { original_module: LocalModuleId, ast_id: AstIdWithPath, attr: &Attr, + attr_id: AttrId, ) -> Result { // NB: does not currently work for derive helpers as they aren't recorded in the `DefMap` @@ -68,6 +70,9 @@ impl DefMap { db, &ast_id, attr, + // There aren't any active attributes before this one, because attribute macros + // replace their input, and derive macros are not allowed in this function. + AttrMacroAttrIds::from_one(attr_id), self.krate, db.macro_def(def), ))) @@ -102,6 +107,7 @@ pub(super) fn attr_macro_as_call_id( db: &dyn DefDatabase, item_attr: &AstIdWithPath, macro_attr: &Attr, + censored_attr_ids: AttrMacroAttrIds, krate: Crate, def: MacroDefId, ) -> MacroCallId { @@ -121,7 +127,7 @@ pub(super) fn attr_macro_as_call_id( MacroCallKind::Attr { ast_id: item_attr.ast_id, attr_args: arg.map(Arc::new), - invoc_attr_index: macro_attr.id, + censored_attr_ids, }, macro_attr.ctxt, ) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index a2ce538356515..c3b272b403bb9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -3,14 +3,14 @@ //! `DefCollector::collect` contains the fixed-point iteration loop which //! resolves imports and expands macros. -use std::{cmp::Ordering, iter, mem, ops::Not}; +use std::{cmp::Ordering, iter, mem}; use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin}; use cfg::{CfgAtom, CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ - EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, - MacroDefId, MacroDefKind, + AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, + MacroCallKind, MacroDefId, MacroDefKind, attrs::{Attr, AttrId}, builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro}, mod_path::{ModPath, PathKind}, @@ -18,9 +18,10 @@ use hir_expand::{ proc_macro::CustomProcMacroExpander, }; use intern::{Interned, sym}; -use itertools::{Itertools, izip}; +use itertools::izip; use la_arena::Idx; use rustc_hash::{FxHashMap, FxHashSet}; +use smallvec::SmallVec; use span::{Edition, FileAstId, SyntaxContext}; use syntax::ast; use triomphe::Arc; @@ -32,12 +33,11 @@ use crate::{ MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId, UseLoc, - attr::Attrs, db::DefDatabase, item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports}, item_tree::{ - self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall, - MacroRules, Mod, ModItemId, ModKind, TreeId, + self, Attrs, AttrsOrCfg, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, + Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, TreeId, }, macro_call_as_call_id, nameres::{ @@ -102,6 +102,7 @@ pub(super) fn collect_defs( proc_macros, from_glob_import: Default::default(), skip_attrs: Default::default(), + prev_active_attrs: Default::default(), unresolved_extern_crates: Default::default(), is_proc_macro: krate.is_proc_macro, }; @@ -206,6 +207,7 @@ enum MacroDirectiveKind<'db> { }, Attr { ast_id: AstIdWithPath, + attr_id: AttrId, attr: Attr, mod_item: ModItemId, /* is this needed? */ tree: TreeId, @@ -246,28 +248,27 @@ struct DefCollector<'db> { /// This also stores the attributes to skip when we resolve derive helpers and non-macro /// non-builtin attributes in general. // FIXME: There has to be a better way to do this - skip_attrs: FxHashMap>, AttrId>, + skip_attrs: FxHashMap, AttrId>, + /// When we expand attributes, we need to censor all previous active attributes + /// on the same item. Therefore, this holds all active attributes that we already + /// expanded. + prev_active_attrs: FxHashMap, SmallVec<[AttrId; 1]>>, } impl<'db> DefCollector<'db> { fn seed_with_top_level(&mut self) { let _p = tracing::info_span!("seed_with_top_level").entered(); - let file_id = self.def_map.krate.data(self.db).root_file_id(self.db); + let file_id = self.def_map.krate.root_file_id(self.db); let item_tree = self.db.file_item_tree(file_id.into()); - let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate); + let attrs = match item_tree.top_level_attrs() { + AttrsOrCfg::Enabled { attrs } => attrs.as_ref(), + AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(), + }; let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); - let mut process = true; - // Process other crate-level attributes. for attr in &*attrs { - if let Some(cfg) = attr.cfg() - && self.cfg_options.check(&cfg) == Some(false) - { - process = false; - break; - } let Some(attr_name) = attr.path.as_ident() else { continue }; match () { @@ -291,7 +292,7 @@ impl<'db> DefCollector<'db> { () if *attr_name == sym::feature => { let features = attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map( - |(feat, _)| match feat.segments() { + |(feat, _, _)| match feat.segments() { [name] => Some(name.symbol().clone()), _ => None, }, @@ -344,7 +345,7 @@ impl<'db> DefCollector<'db> { self.inject_prelude(); - if !process { + if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) { return; } @@ -362,10 +363,7 @@ impl<'db> DefCollector<'db> { fn seed_with_inner(&mut self, tree_id: TreeId) { let item_tree = tree_id.item_tree(self.db); - let is_cfg_enabled = item_tree - .top_level_attrs(self.db, self.def_map.krate) - .cfg() - .is_none_or(|cfg| self.cfg_options.check(&cfg) != Some(false)); + let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. }); if is_cfg_enabled { self.inject_prelude(); @@ -456,18 +454,18 @@ impl<'db> DefCollector<'db> { self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive .kind { - MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => { + MacroDirectiveKind::Attr { ast_id, mod_item, attr_id, attr, tree, item_tree } => { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( directive.module_id, MacroCallKind::Attr { ast_id: ast_id.ast_id, attr_args: None, - invoc_attr_index: attr.id, + censored_attr_ids: AttrMacroAttrIds::from_one(*attr_id), }, - attr.path().clone(), + (*attr.path).clone(), )); - self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id); + self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), *attr_id); Some((idx, directive, *mod_item, *tree, *item_tree)) } @@ -1350,6 +1348,7 @@ impl<'db> DefCollector<'db> { MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, + attr_id, attr, tree, item_tree, @@ -1362,7 +1361,7 @@ impl<'db> DefCollector<'db> { let mod_dir = collector.mod_dirs[&directive.module_id].clone(); collector .skip_attrs - .insert(InFile::new(file_id, mod_item.ast_id()), attr.id); + .insert(InFile::new(file_id, mod_item.ast_id()), *attr_id); ModCollector { def_collector: collector, @@ -1398,7 +1397,6 @@ impl<'db> DefCollector<'db> { // being cfg'ed out). // Ideally we will just expand them to nothing here. But we are only collecting macro calls, // not expanding them, so we have no way to do that. - // If you add an ignored attribute here, also add it to `Semantics::might_be_inside_macro_call()`. if matches!( def.kind, MacroDefKind::BuiltInAttr(_, expander) @@ -1410,8 +1408,18 @@ impl<'db> DefCollector<'db> { } } - let call_id = || { - attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def) + let mut call_id = || { + let active_attrs = self.prev_active_attrs.entry(ast_id).or_default(); + active_attrs.push(*attr_id); + + attr_macro_as_call_id( + self.db, + file_ast_id, + attr, + AttrMacroAttrIds::from_many(active_attrs), + self.def_map.krate, + def, + ) }; if matches!(def, MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. } @@ -1429,7 +1437,7 @@ impl<'db> DefCollector<'db> { let diag = DefDiagnostic::invalid_derive_target( directive.module_id, ast_id, - attr.id, + *attr_id, ); self.def_map.diagnostics.push(diag); return recollect_without(self); @@ -1442,7 +1450,7 @@ impl<'db> DefCollector<'db> { Some(derive_macros) => { let call_id = call_id(); let mut len = 0; - for (idx, (path, call_site)) in derive_macros.enumerate() { + for (idx, (path, call_site, _)) in derive_macros.enumerate() { let ast_id = AstIdWithPath::new( file_id, ast_id.value, @@ -1453,7 +1461,7 @@ impl<'db> DefCollector<'db> { depth: directive.depth + 1, kind: MacroDirectiveKind::Derive { ast_id, - derive_attr: attr.id, + derive_attr: *attr_id, derive_pos: idx, ctxt: call_site.ctx, derive_macro_id: call_id, @@ -1469,13 +1477,13 @@ impl<'db> DefCollector<'db> { // Check the comment in [`builtin_attr_macro`]. self.def_map.modules[directive.module_id] .scope - .init_derive_attribute(ast_id, attr.id, call_id, len + 1); + .init_derive_attribute(ast_id, *attr_id, call_id, len + 1); } None => { let diag = DefDiagnostic::malformed_derive( directive.module_id, ast_id, - attr.id, + *attr_id, ); self.def_map.diagnostics.push(diag); } @@ -1712,16 +1720,17 @@ impl ModCollector<'_, '_> { }; let mut process_mod_item = |item: ModItemId| { - let attrs = self.item_tree.attrs(db, krate, item.ast_id()); - if let Some(cfg) = attrs.cfg() - && !self.is_cfg_enabled(&cfg) - { - let ast_id = item.ast_id().erase(); - self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg); - return; - } + let attrs = match self.item_tree.attrs(item.ast_id()) { + Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(), + None => Attrs::EMPTY, + Some(AttrsOrCfg::CfgDisabled(cfg)) => { + let ast_id = item.ast_id().erase(); + self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg.0); + return; + } + }; - if let Err(()) = self.resolve_attributes(&attrs, item, container) { + if let Err(()) = self.resolve_attributes(attrs, item, container) { // Do not process the item. It has at least one non-builtin attribute, so the // fixed-point algorithm is required to resolve the rest of them. return; @@ -1733,7 +1742,7 @@ impl ModCollector<'_, '_> { self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map); match item { - ModItemId::Mod(m) => self.collect_module(m, &attrs), + ModItemId::Mod(m) => self.collect_module(m, attrs), ModItemId::Use(item_tree_id) => { let id = UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) } @@ -2006,7 +2015,7 @@ impl ModCollector<'_, '_> { ); return; }; - for (path, _) in paths { + for (path, _, _) in paths { if let Some(name) = path.as_ident() { single_imports.push(name.clone()); } @@ -2020,7 +2029,7 @@ impl ModCollector<'_, '_> { ); } - fn collect_module(&mut self, module_ast_id: ItemTreeAstId, attrs: &Attrs) { + fn collect_module(&mut self, module_ast_id: ItemTreeAstId, attrs: Attrs<'_>) { let path_attr = attrs.by_key(sym::path).string_value_unescape(); let is_macro_use = attrs.by_key(sym::macro_use).exists(); let module = &self.item_tree[module_ast_id]; @@ -2061,23 +2070,18 @@ impl ModCollector<'_, '_> { self.file_id(), &module.name, path_attr.as_deref(), + self.def_collector.def_map.krate, ) { Ok((file_id, is_mod_rs, mod_dir)) => { let item_tree = db.file_item_tree(file_id.into()); - let krate = self.def_collector.def_map.krate; - let is_enabled = item_tree - .top_level_attrs(db, krate) - .cfg() - .and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg)) - .map_or(Ok(()), Err); - match is_enabled { - Err(cfg) => { + match item_tree.top_level_attrs() { + AttrsOrCfg::CfgDisabled(cfg) => { self.emit_unconfigured_diagnostic( InFile::new(self.file_id(), module_ast_id.erase()), - &cfg, + &cfg.0, ); } - Ok(()) => { + AttrsOrCfg::Enabled { attrs } => { let module_id = self.push_child_module( module.name.clone(), ast_id.value, @@ -2093,11 +2097,8 @@ impl ModCollector<'_, '_> { mod_dir, } .collect_in_top_module(item_tree.top_level_items()); - let is_macro_use = is_macro_use - || item_tree - .top_level_attrs(db, krate) - .by_key(sym::macro_use) - .exists(); + let is_macro_use = + is_macro_use || attrs.as_ref().by_key(sym::macro_use).exists(); if is_macro_use { self.import_all_legacy_macros(module_id); } @@ -2185,36 +2186,16 @@ impl ModCollector<'_, '_> { /// assumed to be resolved already. fn resolve_attributes( &mut self, - attrs: &Attrs, + attrs: Attrs<'_>, mod_item: ModItemId, container: ItemContainerId, ) -> Result<(), ()> { - let mut ignore_up_to = self + let ignore_up_to = self .def_collector .skip_attrs .get(&InFile::new(self.file_id(), mod_item.ast_id())) .copied(); - let iter = attrs - .iter() - .dedup_by(|a, b| { - // FIXME: this should not be required, all attributes on an item should have a - // unique ID! - // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes: - // #[cfg_attr(not(off), unresolved, unresolved)] - // struct S; - // We should come up with a different way to ID attributes. - a.id == b.id - }) - .skip_while(|attr| match ignore_up_to { - Some(id) if attr.id == id => { - ignore_up_to = None; - true - } - Some(_) => true, - None => false, - }); - - for attr in iter { + for (attr_id, attr) in attrs.iter_after(ignore_up_to) { if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) { continue; } @@ -2229,6 +2210,7 @@ impl ModCollector<'_, '_> { depth: self.macro_depth + 1, kind: MacroDirectiveKind::Attr { ast_id, + attr_id, attr: attr.clone(), mod_item, tree: self.tree_id, @@ -2244,9 +2226,14 @@ impl ModCollector<'_, '_> { } fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId, module: ModuleId) { - let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[ast_id]; - let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast()); + let attrs = match self.item_tree.attrs(ast_id.upcast()) { + Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(), + None => Attrs::EMPTY, + Some(AttrsOrCfg::CfgDisabled(_)) => { + unreachable!("we only get here if the macro is not cfg'ed out") + } + }; let f_ast_id = InFile::new(self.file_id(), ast_id.upcast()); let export_attr = || attrs.by_key(sym::macro_export); @@ -2326,9 +2313,14 @@ impl ModCollector<'_, '_> { } fn collect_macro_def(&mut self, ast_id: ItemTreeAstId, module: ModuleId) { - let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[ast_id]; - let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast()); + let attrs = match self.item_tree.attrs(ast_id.upcast()) { + Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(), + None => Attrs::EMPTY, + Some(AttrsOrCfg::CfgDisabled(_)) => { + unreachable!("we only get here if the macro is not cfg'ed out") + } + }; let f_ast_id = InFile::new(self.file_id(), ast_id.upcast()); // Case 1: builtin macros @@ -2514,10 +2506,6 @@ impl ModCollector<'_, '_> { Some((a, b)) } - fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool { - self.def_collector.cfg_options.check(cfg) != Some(false) - } - fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) { self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id, @@ -2557,6 +2545,7 @@ mod tests { proc_macros: Default::default(), from_glob_import: Default::default(), skip_attrs: Default::default(), + prev_active_attrs: Default::default(), is_proc_macro: false, unresolved_extern_crates: Default::default(), }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs index c495a07449196..6a07c56aeebef 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs @@ -17,8 +17,8 @@ pub enum DefDiagnosticKind { UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions }, UnresolvedMacroCall { ast: MacroCallKind, path: ModPath }, UnimplementedBuiltinMacro { ast: AstId }, - InvalidDeriveTarget { ast: AstId, id: usize }, - MalformedDerive { ast: AstId, id: usize }, + InvalidDeriveTarget { ast: AstId, id: AttrId }, + MalformedDerive { ast: AstId, id: AttrId }, MacroDefError { ast: AstId, message: String }, MacroError { ast: AstId, path: ModPath, err: ExpandErrorKind }, } @@ -119,10 +119,7 @@ impl DefDiagnostic { ast: AstId, id: AttrId, ) -> Self { - Self { - in_module: container, - kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() }, - } + Self { in_module: container, kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id } } } pub(super) fn malformed_derive( @@ -130,9 +127,6 @@ impl DefDiagnostic { ast: AstId, id: AttrId, ) -> Self { - Self { - in_module: container, - kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() }, - } + Self { in_module: container, kind: DefDiagnosticKind::MalformedDerive { ast, id } } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs index 0c50f13edfb6c..140b77ac002f9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,6 +1,6 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; -use base_db::AnchoredPath; +use base_db::{AnchoredPath, Crate}; use hir_expand::{EditionedFileId, name::Name}; use crate::{HirFileId, db::DefDatabase}; @@ -62,6 +62,7 @@ impl ModDir { file_id: HirFileId, name: &Name, attr_path: Option<&str>, + krate: Crate, ) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> { let name = name.as_str(); @@ -91,7 +92,7 @@ impl ModDir { if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) { return Ok(( // FIXME: Edition, is this rightr? - EditionedFileId::new(db, file_id, orig_file_id.edition(db)), + EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate), is_mod_rs, mod_dir, )); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs index cd8882183bb4d..cd45afe57d7cd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs @@ -3,8 +3,10 @@ use hir_expand::name::{AsName, Name}; use intern::sym; -use crate::attr::Attrs; -use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement}; +use crate::{ + item_tree::Attrs, + tt::{Leaf, TokenTree, TopSubtree, TtElement}, +}; #[derive(Debug, PartialEq, Eq)] pub struct ProcMacroDef { @@ -29,8 +31,8 @@ impl ProcMacroKind { } } -impl Attrs { - pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option { +impl Attrs<'_> { + pub(crate) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option { if self.is_proc_macro() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang }) } else if self.is_proc_macro_attribute() { @@ -51,15 +53,10 @@ impl Attrs { } } - pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> { + pub(crate) fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> { let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?; parse_macro_name_and_helper_attrs(derive) } - - pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> { - let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?; - parse_macro_name_and_helper_attrs(derive) - } } // This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have @@ -84,14 +81,11 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?; let helpers = helpers .iter() - .filter( - |tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','), - ) - .map(|tt| match tt { + .filter_map(|tt| match tt { TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()), _ => None, }) - .collect::>>()?; + .collect::>(); Some((trait_name.as_name(), helpers)) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs index ebbf87cad668b..c9e8955ad68c6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs @@ -21,7 +21,7 @@ use triomphe::Arc; use crate::{ ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId, ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId, - attr::Attrs, + attrs::AttrFlags, db::DefDatabase, expr_store::{ ExpressionStore, ExpressionStoreSourceMap, @@ -48,12 +48,13 @@ pub struct StructSignature { pub store: Arc, pub flags: StructFlags, pub shape: FieldsShape, - pub repr: Option, } bitflags! { #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct StructFlags: u8 { + /// Indicates whether this struct has `#[repr]`. + const HAS_REPR = 1 << 0; /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute. const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1; /// Indicates whether the struct has a `#[fundamental]` attribute. @@ -75,16 +76,19 @@ impl StructSignature { pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc, Arc) { let loc = id.lookup(db); let InFile { file_id, value: source } = loc.source(db); - let attrs = db.attrs(id.into()); + let attrs = AttrFlags::query(db, id.into()); let mut flags = StructFlags::empty(); - if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } - if attrs.by_key(sym::fundamental).exists() { + if attrs.contains(AttrFlags::FUNDAMENTAL) { flags |= StructFlags::FUNDAMENTAL; } - if let Some(lang) = attrs.lang_item() { + if attrs.contains(AttrFlags::HAS_REPR) { + flags |= StructFlags::HAS_REPR; + } + if let Some(lang) = attrs.lang_item_with_attrs(db, id.into()) { match lang { LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA, LangItem::OwnedBox => flags |= StructFlags::IS_BOX, @@ -94,7 +98,6 @@ impl StructSignature { _ => (), } } - let repr = attrs.repr(); let shape = adt_shape(source.kind()); let (store, generic_params, source_map) = lower_generic_params( @@ -112,11 +115,19 @@ impl StructSignature { flags, shape, name: as_name_opt(source.name()), - repr, }), Arc::new(source_map), ) } + + #[inline] + pub fn repr(&self, db: &dyn DefDatabase, id: StructId) -> Option { + if self.flags.contains(StructFlags::HAS_REPR) { + AttrFlags::repr(db, id.into()) + } else { + None + } + } } #[inline] @@ -134,22 +145,22 @@ pub struct UnionSignature { pub generic_params: Arc, pub store: Arc, pub flags: StructFlags, - pub repr: Option, } impl UnionSignature { pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc, Arc) { let loc = id.lookup(db); - let attrs = db.attrs(id.into()); + let attrs = AttrFlags::query(db, id.into()); let mut flags = StructFlags::empty(); - if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } - if attrs.by_key(sym::fundamental).exists() { + if attrs.contains(AttrFlags::FUNDAMENTAL) { flags |= StructFlags::FUNDAMENTAL; } - - let repr = attrs.repr(); + if attrs.contains(AttrFlags::HAS_REPR) { + flags |= StructFlags::HAS_REPR; + } let InFile { file_id, value: source } = loc.source(db); let (store, generic_params, source_map) = lower_generic_params( @@ -165,7 +176,6 @@ impl UnionSignature { generic_params, store, flags, - repr, name: as_name_opt(source.name()), }), Arc::new(source_map), @@ -186,20 +196,17 @@ pub struct EnumSignature { pub generic_params: Arc, pub store: Arc, pub flags: EnumFlags, - pub repr: Option, } impl EnumSignature { pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc, Arc) { let loc = id.lookup(db); - let attrs = db.attrs(id.into()); + let attrs = AttrFlags::query(db, id.into()); let mut flags = EnumFlags::empty(); - if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } - let repr = attrs.repr(); - let InFile { file_id, value: source } = loc.source(db); let (store, generic_params, source_map) = lower_generic_params( db, @@ -215,15 +222,14 @@ impl EnumSignature { generic_params, store, flags, - repr, name: as_name_opt(source.name()), }), Arc::new(source_map), ) } - pub fn variant_body_type(&self) -> IntegerType { - match self.repr { + pub fn variant_body_type(db: &dyn DefDatabase, id: EnumId) -> IntegerType { + match AttrFlags::repr(db, id.into()) { Some(ReprOptions { int: Some(builtin), .. }) => builtin, _ => IntegerType::Pointer(true), } @@ -251,9 +257,9 @@ impl ConstSignature { let loc = id.lookup(db); let module = loc.container.module(db); - let attrs = db.attrs(id.into()); + let attrs = AttrFlags::query(db, id.into()); let mut flags = ConstFlags::empty(); - if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL; } let source = loc.source(db); @@ -306,9 +312,9 @@ impl StaticSignature { let loc = id.lookup(db); let module = loc.container.module(db); - let attrs = db.attrs(id.into()); + let attrs = AttrFlags::query(db, id.into()); let mut flags = StaticFlags::empty(); - if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL; } @@ -433,7 +439,7 @@ impl TraitSignature { let loc = id.lookup(db); let mut flags = TraitFlags::empty(); - let attrs = db.attrs(id.into()); + let attrs = AttrFlags::query(db, id.into()); let source = loc.source(db); if source.value.auto_token().is_some() { flags.insert(TraitFlags::AUTO); @@ -444,34 +450,23 @@ impl TraitSignature { if source.value.eq_token().is_some() { flags.insert(TraitFlags::ALIAS); } - if attrs.by_key(sym::fundamental).exists() { + if attrs.contains(AttrFlags::FUNDAMENTAL) { flags |= TraitFlags::FUNDAMENTAL; } - if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS; } - if attrs.by_key(sym::rustc_paren_sugar).exists() { + if attrs.contains(AttrFlags::RUSTC_PAREN_SUGAR) { flags |= TraitFlags::RUSTC_PAREN_SUGAR; } - if attrs.by_key(sym::rustc_coinductive).exists() { + if attrs.contains(AttrFlags::RUSTC_COINDUCTIVE) { flags |= TraitFlags::COINDUCTIVE; } - let mut skip_array_during_method_dispatch = - attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists(); - let mut skip_boxed_slice_during_method_dispatch = false; - for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() { - for tt in tt.iter() { - if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt { - skip_array_during_method_dispatch |= ident.sym == sym::array; - skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice; - } - } - } - if skip_array_during_method_dispatch { + if attrs.contains(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) { flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH; } - if skip_boxed_slice_during_method_dispatch { + if attrs.contains(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) { flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH; } @@ -503,7 +498,8 @@ bitflags! { const HAS_TARGET_FEATURE = 1 << 9; const DEPRECATED_SAFE_2024 = 1 << 10; const EXPLICIT_SAFE = 1 << 11; - const RUSTC_INTRINSIC = 1 << 12; + const HAS_LEGACY_CONST_GENERICS = 1 << 12; + const RUSTC_INTRINSIC = 1 << 13; } } @@ -516,8 +512,6 @@ pub struct FunctionSignature { pub ret_type: Option, pub abi: Option, pub flags: FnFlags, - // FIXME: we should put this behind a fn flags + query to avoid bloating the struct - pub legacy_const_generics_indices: Option>>, } impl FunctionSignature { @@ -529,23 +523,26 @@ impl FunctionSignature { let module = loc.container.module(db); let mut flags = FnFlags::empty(); - let attrs = db.attrs(id.into()); - if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + let attrs = AttrFlags::query(db, id.into()); + if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL); } - if attrs.by_key(sym::target_feature).exists() { + if attrs.contains(AttrFlags::HAS_TARGET_FEATURE) { flags.insert(FnFlags::HAS_TARGET_FEATURE); } - if attrs.by_key(sym::rustc_intrinsic).exists() { + + if attrs.contains(AttrFlags::RUSTC_INTRINSIC) { flags.insert(FnFlags::RUSTC_INTRINSIC); } - let legacy_const_generics_indices = attrs.rustc_legacy_const_generics(); + if attrs.contains(AttrFlags::HAS_LEGACY_CONST_GENERICS) { + flags.insert(FnFlags::HAS_LEGACY_CONST_GENERICS); + } let source = loc.source(db); if source.value.unsafe_token().is_some() { - if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() { + if attrs.contains(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) { flags.insert(FnFlags::DEPRECATED_SAFE_2024); } else { flags.insert(FnFlags::UNSAFE); @@ -587,7 +584,6 @@ impl FunctionSignature { ret_type, abi, flags, - legacy_const_generics_indices, name, }), Arc::new(source_map), @@ -636,6 +632,19 @@ impl FunctionSignature { self.flags.contains(FnFlags::HAS_TARGET_FEATURE) } + #[inline] + pub fn legacy_const_generics_indices<'db>( + &self, + db: &'db dyn DefDatabase, + id: FunctionId, + ) -> Option<&'db [u32]> { + if !self.flags.contains(FnFlags::HAS_LEGACY_CONST_GENERICS) { + return None; + } + + AttrFlags::legacy_const_generic_indices(db, id).as_deref() + } + pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool { let data = db.function_signature(id); data.flags.contains(FnFlags::RUSTC_INTRINSIC) @@ -679,11 +688,11 @@ impl TypeAliasSignature { let loc = id.lookup(db); let mut flags = TypeAliasFlags::empty(); - let attrs = db.attrs(id.into()); - if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() { + let attrs = AttrFlags::query(db, id.into()); + if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) { flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL); } - if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() { + if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) { flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL); } if matches!(loc.container, ItemContainerId::ExternBlockId(_)) { @@ -866,7 +875,7 @@ fn lower_fields( let mut has_fields = false; for (ty, field) in fields.value { has_fields = true; - match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) { + match AttrFlags::is_cfg_enabled_for(&field, cfg_options) { Ok(()) => { let type_ref = col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator); @@ -928,7 +937,6 @@ impl EnumVariants { let loc = e.lookup(db); let source = loc.source(db); let ast_id_map = db.ast_id_map(source.file_id); - let span_map = db.span_map(source.file_id); let mut diagnostics = ThinVec::new(); let cfg_options = loc.container.krate.cfg_options(db); @@ -940,7 +948,7 @@ impl EnumVariants { .variants() .filter_map(|variant| { let ast_id = ast_id_map.ast_id(&variant); - match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) { + match AttrFlags::is_cfg_enabled_for(&variant, cfg_options) { Ok(()) => { let enum_variant = EnumVariantLoc { id: source.with_value(ast_id), parent: e, index } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs index 367b543cf9080..153fd195f0ad8 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs @@ -7,7 +7,7 @@ use syntax::{AstNode, AstPtr, ast}; use crate::{ AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, - UseId, VariantId, attr::Attrs, db::DefDatabase, + UseId, VariantId, attrs::AttrFlags, db::DefDatabase, }; pub trait HasSource { @@ -145,15 +145,13 @@ impl HasChildSource for VariantId { (lookup.source(db).map(|it| it.kind()), lookup.container) } }; - let span_map = db.span_map(src.file_id); let mut map = ArenaMap::new(); match &src.value { ast::StructKind::Tuple(fl) => { let cfg_options = container.krate.cfg_options(db); let mut idx = 0; for fd in fl.fields() { - let enabled = - Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok(); + let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok(); if !enabled { continue; } @@ -168,8 +166,7 @@ impl HasChildSource for VariantId { let cfg_options = container.krate.cfg_options(db); let mut idx = 0; for fd in fl.fields() { - let enabled = - Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok(); + let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok(); if !enabled { continue; } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index 12a1c1554cc12..3bb9c361b3c80 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -190,7 +190,15 @@ impl TestDB { let mut res = DefMap::ROOT; for (module, data) in def_map.modules() { let src = data.definition_source(self); - if src.file_id != position.file_id { + // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because + // `position.file_id` is created before the def map, causing it to have to wrong crate + // attached often, which means it won't compare equal. This should not be a problem in real + // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only + // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map. + let Some(file_id) = src.file_id.file_id() else { + continue; + }; + if file_id.file_id(self) != position.file_id.file_id(self) { continue; } @@ -230,7 +238,15 @@ impl TestDB { let mut fn_def = None; for (_, module) in def_map.modules() { let file_id = module.definition_source(self).file_id; - if file_id != position.file_id { + // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because + // `position.file_id` is created before the def map, causing it to have to wrong crate + // attached often, which means it won't compare equal. This should not be a problem in real + // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only + // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map. + let Some(file_id) = file_id.file_id() else { + continue; + }; + if file_id.file_id(self) != position.file_id.file_id(self) { continue; } for decl in module.scope.declarations() { @@ -253,26 +269,25 @@ impl TestDB { }; if size != Some(new_size) { size = Some(new_size); - fn_def = Some(it); + fn_def = Some((it, file_id)); } } } } // Find the innermost block expression that has a `DefMap`. - let def_with_body = fn_def?.into(); + let (def_with_body, file_id) = fn_def?; + let def_with_body = def_with_body.into(); let source_map = self.body_with_source_map(def_with_body).1; let scopes = self.expr_scopes(def_with_body); - let root_syntax_node = self.parse(position.file_id).syntax_node(); + let root_syntax_node = self.parse(file_id).syntax_node(); let scope_iter = algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| { let block = ast::BlockExpr::cast(node)?; let expr = ast::Expr::from(block); - let expr_id = source_map - .node_expr(InFile::new(position.file_id.into(), &expr))? - .as_expr() - .unwrap(); + let expr_id = + source_map.node_expr(InFile::new(file_id.into(), &expr))?.as_expr().unwrap(); let scope = scopes.scope_for(expr_id).unwrap(); Some(scope) }); diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml index 80a3c08486531..4fa476afb64a3 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml @@ -23,6 +23,8 @@ triomphe.workspace = true query-group.workspace = true salsa.workspace = true salsa-macros.workspace = true +arrayvec.workspace = true +thin-vec.workspace = true # local deps stdx.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index 986f8764f5c9e..e1807cd2e1e9d 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -1,200 +1,397 @@ -//! A higher level attributes based on TokenTree, with also some shortcuts. -use std::iter; -use std::{borrow::Cow, fmt, ops}; +//! Defines the basics of attributes lowering. +//! +//! The heart and soul of this module is [`expand_cfg_attr()`], alongside its sibling +//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering +//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map +//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes +//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines +//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different +//! things from [`Meta`], therefore it contains many parts. The basic idea is: +//! +//! - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`. +//! - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep +//! the path only if it has up to 2 segments, or one segment for `path = value`. +//! We also only keep the value in `path = value` if it is a literal. However, we always +//! save the all relevant ranges of attributes (the path range, and the full attribute range) +//! for parts of r-a (e.g. name resolution) that need a faithful representation of the +//! attribute. +//! +//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list +//! all attributes. +//! +//! Another thing to note is that we need to be able to map an attribute back to a range +//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate +//! in name resolution. An attribute is mapped back by its [`AttrId`], which is just an +//! index into the item tree attributes list. To minimize the risk of bugs, we have one +//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether +//! an attribute participate in name resolution. + +use std::{ + borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow, +}; +use ::tt::{TextRange, TextSize}; +use arrayvec::ArrayVec; use base_db::Crate; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use intern::{Interned, Symbol, sym}; - +use intern::{Interned, Symbol}; use mbe::{DelimiterKind, Punct}; -use smallvec::{SmallVec, smallvec}; -use span::{Span, SyntaxContext}; -use syntax::unescape; -use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast}; -use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree}; -use triomphe::ThinArc; +use parser::T; +use smallvec::SmallVec; +use span::{RealSpanMap, Span, SyntaxContext}; +use syntax::{ + AstNode, NodeOrToken, SyntaxNode, SyntaxToken, + ast::{self, TokenTreeChildren}, + unescape, +}; +use syntax_bridge::DocCommentDesugarMode; use crate::{ + AstId, db::ExpandDatabase, mod_path::ModPath, - name::Name, span_map::SpanMapRef, - tt::{self, TopSubtree, token_to_literal}, + tt::{self, TopSubtree}, }; -/// Syntactical attributes, without filtering of `cfg_attr`s. -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub struct RawAttrs { - // FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted. - entries: Option>, -} - -impl ops::Deref for RawAttrs { - type Target = [Attr]; - - fn deref(&self) -> &[Attr] { - match &self.entries { - Some(it) => &it.slice, - None => &[], - } - } +#[derive(Debug)] +pub struct AttrPath { + /// This can be empty if the path is not of 1 or 2 segments exactly. + pub segments: ArrayVec, + pub range: TextRange, + // FIXME: This shouldn't be textual, `#[test]` needs name resolution. + // And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros + // fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def + // attrs can't find it. But this will mean we have to push every up-to-4-segments path, which + // may impact perf. So it was easier to just hack it here. + pub is_test: bool, } -impl RawAttrs { - pub const EMPTY: Self = Self { entries: None }; - - pub fn new( - db: &dyn ExpandDatabase, - owner: &dyn ast::HasAttrs, - span_map: SpanMapRef<'_>, - ) -> Self { - let entries: Vec<_> = Self::attrs_iter::(db, owner, span_map).collect(); - - let entries = if entries.is_empty() { - None - } else { - Some(ThinArc::from_header_and_iter((), entries.into_iter())) - }; - - RawAttrs { entries } - } - - /// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded. - pub fn new_expanded( - db: &dyn ExpandDatabase, - owner: &dyn ast::HasAttrs, - span_map: SpanMapRef<'_>, - cfg_options: &CfgOptions, - ) -> Self { - let entries: Vec<_> = - Self::attrs_iter_expanded::(db, owner, span_map, cfg_options).collect(); - - let entries = if entries.is_empty() { - None - } else { - Some(ThinArc::from_header_and_iter((), entries.into_iter())) - }; - - RawAttrs { entries } - } - - pub fn attrs_iter( - db: &dyn ExpandDatabase, - owner: &dyn ast::HasAttrs, - span_map: SpanMapRef<'_>, - ) -> impl Iterator { - collect_attrs(owner).filter_map(move |(id, attr)| match attr { - Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) +impl AttrPath { + #[inline] + fn extract(path: &ast::Path) -> Self { + let mut is_test = false; + let segments = (|| { + let mut segments = ArrayVec::new(); + let segment2 = path.segment()?.name_ref()?.syntax().first_token()?; + if segment2.text() == "test" { + // `#[test]` or `#[core::prelude::vX::test]`. + is_test = true; } - Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| { - let span = span_map.span_for_range(comment.syntax().text_range()); - let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro); - Attr { - id, - input: Some(Box::new(AttrInput::Literal(tt::Literal { - symbol: text, - span, - kind, - suffix: None, - }))), - path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))), - ctxt: span.ctx, + let segment1 = path.qualifier(); + if let Some(segment1) = segment1 { + if segment1.qualifier().is_some() { + None + } else { + let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?; + segments.push(segment1); + segments.push(segment2); + Some(segments) } - }), - Either::Right(_) => None, - }) + } else { + segments.push(segment2); + Some(segments) + } + })(); + AttrPath { + segments: segments.unwrap_or(ArrayVec::new()), + range: path.syntax().text_range(), + is_test, + } } - pub fn attrs_iter_expanded( - db: &dyn ExpandDatabase, - owner: &dyn ast::HasAttrs, - span_map: SpanMapRef<'_>, - cfg_options: &CfgOptions, - ) -> impl Iterator { - Self::attrs_iter::(db, owner, span_map) - .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options)) + #[inline] + pub fn is1(&self, segment: &str) -> bool { + self.segments.len() == 1 && self.segments[0].text() == segment } +} - pub fn merge(&self, other: Self) -> Self { - match (&self.entries, other.entries) { - (None, None) => Self::EMPTY, - (None, entries @ Some(_)) => Self { entries }, - (Some(entries), None) => Self { entries: Some(entries.clone()) }, - (Some(a), Some(b)) => { - let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1); - let items = a - .slice - .iter() - .cloned() - .chain(b.slice.iter().map(|it| { - let mut it = it.clone(); - let id = it.id.ast_index() + last_ast_index; - it.id = AttrId::new(id, it.id.is_inner_attr()); - it - })) - .collect::>(); - Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) } - } +#[derive(Debug)] +pub enum Meta { + /// `name` is `None` if not a single token. `value` is a literal or `None`. + NamedKeyValue { + path_range: TextRange, + name: Option, + value: Option, + }, + TokenTree { + path: AttrPath, + tt: ast::TokenTree, + }, + Path { + path: AttrPath, + }, +} + +impl Meta { + #[inline] + pub fn path_range(&self) -> TextRange { + match self { + Meta::NamedKeyValue { path_range, .. } => *path_range, + Meta::TokenTree { path, .. } | Meta::Path { path } => path.range, } } - /// Processes `cfg_attr`s - pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs { - let has_cfg_attrs = - self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr)); - if !has_cfg_attrs { - return self; + fn extract(iter: &mut Peekable) -> Option<(Self, TextSize)> { + let mut start_offset = None; + if let Some(NodeOrToken::Token(colon1)) = iter.peek() + && colon1.kind() == T![:] + { + start_offset = Some(colon1.text_range().start()); + iter.next(); + iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:])); + } + let first_segment = iter + .next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))? + .into_token()?; + let mut is_test = first_segment.text() == "test"; + let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start()); + + let mut segments_len = 1; + let mut second_segment = None; + let mut path_range = first_segment.text_range(); + while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:]) + && let _ = iter.next() + && iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:]) + && let _ = iter.next() + && let Some(NodeOrToken::Token(segment)) = iter.peek() + && segment.kind().is_any_identifier() + { + segments_len += 1; + is_test = segment.text() == "test"; + second_segment = Some(segment.clone()); + path_range = TextRange::new(path_range.start(), segment.text_range().end()); + iter.next(); } - let cfg_options = krate.cfg_options(db); - let new_attrs = self - .iter() - .cloned() - .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options)) - .collect::>(); - let entries = if new_attrs.is_empty() { - None - } else { - Some(ThinArc::from_header_and_iter((), new_attrs.into_iter())) + let segments = |first, second| { + let mut segments = ArrayVec::new(); + if segments_len <= 2 { + segments.push(first); + if let Some(second) = second { + segments.push(second); + } + } + segments + }; + let meta = match iter.peek() { + Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => { + iter.next(); + let value = match iter.peek() { + Some(NodeOrToken::Token(token)) if token.kind().is_literal() => { + // No need to consume it, it will be consumed by `extract_and_eat_comma()`. + Some(token.clone()) + } + _ => None, + }; + let name = if second_segment.is_none() { Some(first_segment) } else { None }; + Meta::NamedKeyValue { path_range, name, value } + } + Some(NodeOrToken::Node(tt)) => Meta::TokenTree { + path: AttrPath { + segments: segments(first_segment, second_segment), + range: path_range, + is_test, + }, + tt: tt.clone(), + }, + _ => Meta::Path { + path: AttrPath { + segments: segments(first_segment, second_segment), + range: path_range, + is_test, + }, + }, }; - RawAttrs { entries } + Some((meta, start_offset)) } - pub fn is_empty(&self) -> bool { - self.entries.is_none() + fn extract_possibly_unsafe( + iter: &mut Peekable, + container: &ast::TokenTree, + ) -> Option<(Self, TextRange)> { + if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) { + iter.next(); + let tt = iter.next()?.into_node()?; + let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map( + |(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))), + ); + while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {} + result + } else { + Self::extract(iter).map(|(meta, start_offset)| { + let end_offset = 'find_end_offset: { + for it in iter { + if let NodeOrToken::Token(it) = it + && it.kind() == T![,] + { + break 'find_end_offset it.text_range().start(); + } + } + tt_end_offset(container) + }; + (meta, TextRange::new(start_offset, end_offset)) + }) + } } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct AttrId { - id: u32, +fn tt_end_offset(tt: &ast::TokenTree) -> TextSize { + tt.syntax().last_token().unwrap().text_range().start() } -// FIXME: This only handles a single level of cfg_attr nesting -// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again -impl AttrId { - const INNER_ATTR_SET_BIT: u32 = 1 << 31; +/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it +/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute, +/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`. +#[inline] +pub fn expand_cfg_attr<'a, BreakValue>( + attrs: impl Iterator, + cfg_options: impl FnMut() -> &'a CfgOptions, + mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow, +) -> Option { + expand_cfg_attr_with_doc_comments::( + attrs.map(Either::Left), + cfg_options, + move |Either::Left((meta, container, range, top_attr))| { + callback(meta, container, range, top_attr) + }, + ) +} - pub fn new(id: usize, is_inner: bool) -> Self { - assert!(id <= !Self::INNER_ATTR_SET_BIT as usize); - let id = id as u32; - Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } } - } +#[inline] +pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>( + mut attrs: impl Iterator>, + mut cfg_options: impl FnMut() -> &'a CfgOptions, + mut callback: impl FnMut( + Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>, + ) -> ControlFlow, +) -> Option { + let mut stack = SmallVec::<[_; 1]>::new(); + let result = attrs.try_for_each(|top_attr| { + let top_attr = match top_attr { + Either::Left(it) => it, + Either::Right(comment) => return callback(Either::Right(comment)), + }; + if let Some((attr_name, tt)) = top_attr.as_simple_call() + && attr_name == "cfg_attr" + { + let mut tt_iter = TokenTreeChildren::new(&tt).peekable(); + let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter); + if cfg_options().check(&cfg) != Some(false) { + stack.push((tt_iter, tt)); + while let Some((tt_iter, tt)) = stack.last_mut() { + let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else { + stack.pop(); + continue; + }; + if let Meta::TokenTree { path, tt: nested_tt } = &attr + && path.is1("cfg_attr") + { + let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable(); + let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter); + if cfg_options().check(&cfg) != Some(false) { + stack.push((nested_tt_iter, nested_tt.clone())); + } + } else { + callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?; + } + } + } + } else if let Some(ast_meta) = top_attr.meta() + && let Some(path) = ast_meta.path() + { + let path = AttrPath::extract(&path); + let meta = if let Some(tt) = ast_meta.token_tree() { + Meta::TokenTree { path, tt } + } else if let Some(value) = ast_meta.expr() { + let value = + if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None }; + let name = + if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None }; + Meta::NamedKeyValue { name, value, path_range: path.range } + } else { + Meta::Path { path } + }; + callback(Either::Left(( + meta, + ast_meta.syntax(), + ast_meta.syntax().text_range(), + &top_attr, + )))?; + } + ControlFlow::Continue(()) + }); + result.break_value() +} - pub fn ast_index(&self) -> usize { - (self.id & !Self::INNER_ATTR_SET_BIT) as usize - } +#[inline] +pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool { + matches!( + name, + "doc" + | "stable" + | "unstable" + | "target_feature" + | "allow" + | "expect" + | "warn" + | "deny" + | "forbid" + | "repr" + | "inline" + | "track_caller" + | "must_use" + ) +} - pub fn is_inner_attr(&self) -> bool { - self.id & Self::INNER_ATTR_SET_BIT != 0 - } +/// This collects attributes exactly as the item tree needs them. This is used for the item tree, +/// as well as for resolving [`AttrId`]s. +pub fn collect_item_tree_attrs<'a, BreakValue>( + owner: &dyn ast::HasAttrs, + cfg_options: impl Fn() -> &'a CfgOptions, + mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow, +) -> Option> { + let attrs = ast::attrs_including_inner(owner); + expand_cfg_attr( + attrs, + || cfg_options(), + |attr, container, range, top_attr| { + // We filter builtin attributes that we don't need for nameres, because this saves memory. + // I only put the most common attributes, but if some attribute becomes common feel free to add it. + // Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro! + let filter = match &attr { + Meta::NamedKeyValue { name: Some(name), .. } => { + is_item_tree_filtered_attr(name.text()) + } + Meta::TokenTree { path, tt } if path.segments.len() == 1 => { + let name = path.segments[0].text(); + if name == "cfg" { + let cfg = + CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable()); + if cfg_options().check(&cfg) == Some(false) { + return ControlFlow::Break(Either::Right(cfg)); + } + true + } else { + is_item_tree_filtered_attr(name) + } + } + Meta::Path { path } => { + path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text()) + } + _ => false, + }; + if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) { + return ControlFlow::Break(Either::Left(v)); + } + ControlFlow::Continue(()) + }, + ) } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Attr { - pub id: AttrId, pub path: Interned, pub input: Option>, pub ctxt: SyntaxContext, @@ -217,131 +414,6 @@ impl fmt::Display for AttrInput { } } -impl Attr { - fn from_src( - db: &dyn ExpandDatabase, - ast: ast::Meta, - span_map: SpanMapRef<'_>, - id: AttrId, - ) -> Option { - let path = ast.path()?; - let range = path.syntax().text_range(); - let path = Interned::new(ModPath::from_src(db, path, &mut |range| { - span_map.span_for_range(range).ctx - })?); - let span = span_map.span_for_range(range); - let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { - let token = lit.token(); - Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span)))) - } else if let Some(tt) = ast.token_tree() { - let tree = syntax_node_to_token_tree( - tt.syntax(), - span_map, - span, - DocCommentDesugarMode::ProcMacro, - ); - Some(Box::new(AttrInput::TokenTree(tree))) - } else { - None - }; - Some(Attr { id, path, input, ctxt: span.ctx }) - } - - fn from_tt( - db: &dyn ExpandDatabase, - mut tt: tt::TokenTreesView<'_>, - id: AttrId, - ) -> Option { - if matches!(tt.flat_tokens(), - [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..] - if *sym == sym::unsafe_ - ) { - match tt.iter().nth(1) { - Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(), - _ => return None, - } - } - let first = tt.flat_tokens().first()?; - let ctxt = first.first_span().ctx; - let (path, input) = { - let mut iter = tt.iter(); - let start = iter.savepoint(); - let mut input = tt::TokenTreesView::new(&[]); - let mut path = iter.from_savepoint(start); - let mut path_split_savepoint = iter.savepoint(); - while let Some(tt) = iter.next() { - path = iter.from_savepoint(start); - if !matches!( - tt, - tt::TtElement::Leaf( - tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_), - ) - ) { - input = path_split_savepoint.remaining(); - break; - } - path_split_savepoint = iter.savepoint(); - } - (path, input) - }; - - let path = Interned::new(ModPath::from_tt(db, path)?); - - let input = match (input.flat_tokens().first(), input.try_into_subtree()) { - (_, Some(tree)) => { - Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree)))) - } - (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => { - match input.flat_tokens().get(1) { - Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => { - Some(Box::new(AttrInput::Literal(lit.clone()))) - } - _ => None, - } - } - _ => None, - }; - Some(Attr { id, path, input, ctxt }) - } - - pub fn path(&self) -> &ModPath { - &self.path - } - - pub fn expand_cfg_attr( - self, - db: &dyn ExpandDatabase, - cfg_options: &CfgOptions, - ) -> impl IntoIterator { - let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr); - if !is_cfg_attr { - return smallvec![self]; - } - - let subtree = match self.token_tree_value() { - Some(it) => it, - _ => return smallvec![self.clone()], - }; - - let (cfg, parts) = match parse_cfg_attr_input(subtree) { - Some(it) => it, - None => return smallvec![self.clone()], - }; - let index = self.id; - let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index)); - - let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg); - let cfg = CfgExpr::parse(&cfg); - if cfg_options.check(&cfg) == Some(false) { - smallvec![] - } else { - cov_mark::hit!(cfg_attr_active); - - attrs.collect::>() - } - } -} - impl Attr { /// #[path = "string"] pub fn string_value(&self) -> Option<&Symbol> { @@ -403,30 +475,26 @@ impl Attr { pub fn parse_path_comma_token_tree<'a>( &'a self, db: &'a dyn ExpandDatabase, - ) -> Option + 'a> { + ) -> Option)> + 'a> { let args = self.token_tree_value()?; if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis { return None; } - let paths = args - .token_trees() - .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) - .filter_map(move |tts| { - let span = tts.flat_tokens().first()?.first_span(); - Some((ModPath::from_tt(db, tts)?, span)) - }); - - Some(paths) + Some(parse_path_comma_token_tree(db, args)) } +} - pub fn cfg(&self) -> Option { - if *self.path.as_ident()? == sym::cfg { - self.token_tree_value().map(CfgExpr::parse) - } else { - None - } - } +fn parse_path_comma_token_tree<'a>( + db: &'a dyn ExpandDatabase, + args: &'a tt::TopSubtree, +) -> impl Iterator)> { + args.token_trees() + .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) + .filter_map(move |tts| { + let span = tts.flat_tokens().first()?.first_span(); + Some((ModPath::from_tt(db, tts)?, span, tts)) + }) } fn unescape(s: &str) -> Option> { @@ -455,58 +523,104 @@ fn unescape(s: &str) -> Option> { } } -pub fn collect_attrs( - owner: &dyn ast::HasAttrs, -) -> impl Iterator)> { - let inner_attrs = - inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true)); - let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax()) - .filter(|el| match el { - Either::Left(attr) => attr.kind().is_outer(), - Either::Right(comment) => comment.is_outer(), - }) - .zip(iter::repeat(false)); - outer_attrs - .chain(inner_attrs) - .enumerate() - .map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr)) +/// This is an index of an attribute *that always points to the item tree attributes*. +/// +/// Outer attributes are counted first, then inner attributes. This does not support +/// out-of-line modules, which may have attributes spread across 2 files! +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct AttrId { + id: u32, } -fn inner_attributes( - syntax: &SyntaxNode, -) -> Option>> { - let node = match_ast! { - match syntax { - ast::SourceFile(_) => syntax.clone(), - ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(), - ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(), - ast::Impl(it) => it.assoc_item_list()?.syntax().clone(), - ast::Module(it) => it.item_list()?.syntax().clone(), - ast::BlockExpr(it) => { - if !it.may_carry_attributes() { - return None +impl AttrId { + #[inline] + pub fn from_item_tree_index(id: u32) -> Self { + Self { id } + } + + #[inline] + pub fn item_tree_index(self) -> u32 { + self.id + } + + /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due + /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the + /// attribute, and its desugared [`Meta`]. + pub fn find_attr_range( + self, + db: &dyn ExpandDatabase, + krate: Crate, + owner: AstId, + ) -> (ast::Attr, SyntaxNode, TextRange, Meta) { + self.find_attr_range_with_source(db, krate, &owner.to_node(db)) + } + + /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due + /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the + /// attribute, and its desugared [`Meta`]. + pub fn find_attr_range_with_source( + self, + db: &dyn ExpandDatabase, + krate: Crate, + owner: &dyn ast::HasAttrs, + ) -> (ast::Attr, SyntaxNode, TextRange, Meta) { + let cfg_options = OnceCell::new(); + let mut index = 0; + let result = collect_item_tree_attrs( + owner, + || cfg_options.get_or_init(|| krate.cfg_options(db)), + |meta, container, top_attr, range| { + if index == self.id { + return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta)); } - syntax.clone() + index += 1; + ControlFlow::Continue(()) }, - _ => return None, + ); + match result { + Some(Either::Left(it)) => it, + _ => { + panic!("used an incorrect `AttrId`; crate={krate:?}, attr_id={self:?}"); + } } - }; - - let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el { - Either::Left(attr) => attr.kind().is_inner(), - Either::Right(comment) => comment.is_inner(), - }); - Some(attrs) -} + } -// Input subtree is: `(cfg, $(attr),+)` -// Split it up into a `cfg` subtree and the `attr` subtrees. -fn parse_cfg_attr_input( - subtree: &TopSubtree, -) -> Option<(tt::TokenTreesView<'_>, impl Iterator>)> { - let mut parts = subtree - .token_trees() - .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))); - let cfg = parts.next()?; - Some((cfg, parts.filter(|it| !it.is_empty()))) + pub fn find_derive_range( + self, + db: &dyn ExpandDatabase, + krate: Crate, + owner: AstId, + derive_index: u32, + ) -> TextRange { + let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner); + let Meta::TokenTree { tt, .. } = derive_attr else { + return derive_attr_range; + }; + // Fake the span map, as we don't really need spans here, just the offsets of the node in the file. + let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition( + span::FileId::from_raw(0), + )); + let tt = syntax_bridge::syntax_node_to_token_tree( + tt.syntax(), + SpanMapRef::RealSpanMap(&span_map), + span_map.span_for_range(tt.syntax().text_range()), + DocCommentDesugarMode::ProcMacro, + ); + let Some((_, _, derive_tts)) = + parse_path_comma_token_tree(db, &tt).nth(derive_index as usize) + else { + return derive_attr_range; + }; + let (Some(first_tt), Some(last_tt)) = + (derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last()) + else { + return derive_attr_range; + }; + let start = first_tt.first_span().range.start(); + let end = match last_tt { + tt::TokenTree::Leaf(it) => it.span().range.end(), + tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(), + }; + TextRange::new(start, end) + } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 6fe63f249cd4a..92bcd378149ed 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -772,7 +772,7 @@ fn relative_file( if res == call_site && !allow_recursion { Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`"))) } else { - Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition)) + Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate)) } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs index d5ebd6ee19f5c..8b82671ed4a08 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs @@ -1,373 +1,343 @@ //! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro -use std::iter::Peekable; +use std::{cell::OnceCell, ops::ControlFlow}; +use ::tt::TextRange; use base_db::Crate; -use cfg::{CfgAtom, CfgExpr}; -use intern::{Symbol, sym}; -use rustc_hash::FxHashSet; +use cfg::CfgExpr; +use parser::T; +use smallvec::SmallVec; use syntax::{ - AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T, - ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList}, + AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent, + ast::{self, HasAttrs, TokenTreeChildren}, }; -use tracing::{debug, warn}; +use syntax_bridge::DocCommentDesugarMode; -use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind}; +use crate::{ + attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr}, + db::ExpandDatabase, + fixup::{self, SyntaxFixupUndoInfo}, + span_map::SpanMapRef, + tt::{self, DelimSpan, Span}, +}; -fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option { - if !attr.simple_name().as_deref().map(|v| v == "cfg")? { - return None; - } - let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?; - let enabled = krate.cfg_options(db).check(&cfg) != Some(false); - Some(enabled) +struct ItemIsCfgedOut; + +#[derive(Debug)] +struct ExpandedAttrToProcess { + range: TextRange, } -fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option { - if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? { - return None; - } - check_cfg_attr_value(db, &attr.token_tree()?, krate) +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum NextExpandedAttrState { + NotStarted, + InTheMiddle, } -pub fn check_cfg_attr_value( - db: &dyn ExpandDatabase, - attr: &TokenTree, - krate: Crate, -) -> Option { - let cfg_expr = parse_from_attr_token_tree(attr)?; - let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false); - Some(enabled) +#[derive(Debug)] +struct AstAttrToProcess { + range: TextRange, + expanded_attrs: SmallVec<[ExpandedAttrToProcess; 1]>, + expanded_attrs_idx: usize, + next_expanded_attr: NextExpandedAttrState, + pound_span: Span, + brackets_span: DelimSpan, + /// If `Some`, this is an inner attribute. + excl_span: Option, } -fn process_has_attrs_with_possible_comma( +fn macro_input_callback( db: &dyn ExpandDatabase, - items: impl Iterator, + is_derive: bool, + censor_item_tree_attr_ids: &[AttrId], krate: Crate, - remove: &mut FxHashSet, -) -> Option<()> { - for item in items { - let field_attrs = item.attrs(); - 'attrs: for attr in field_attrs { - if let Some(enabled) = check_cfg(db, &attr, krate) { - if enabled { - debug!("censoring {:?}", attr.syntax()); - remove.insert(attr.syntax().clone().into()); - } else { - debug!("censoring {:?}", item.syntax()); - remove.insert(item.syntax().clone().into()); - // We need to remove the , as well - remove_possible_comma(&item, remove); - break 'attrs; - } - } + default_span: Span, + span_map: SpanMapRef<'_>, +) -> impl FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec) { + let cfg_options = OnceCell::new(); + let cfg_options = move || *cfg_options.get_or_init(|| krate.cfg_options(db)); - if let Some(enabled) = check_cfg_attr(db, &attr, krate) { - if enabled { - debug!("Removing cfg_attr tokens {:?}", attr); - let meta = attr.meta()?; - let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; - remove.extend(removes_from_cfg_attr); - } else { - debug!("censoring type cfg_attr {:?}", item.syntax()); - remove.insert(attr.syntax().clone().into()); - } + let mut should_strip_attr = { + let mut item_tree_attr_id = 0; + let mut censor_item_tree_attr_ids_index = 0; + move || { + let mut result = false; + if let Some(&next_censor_attr_id) = + censor_item_tree_attr_ids.get(censor_item_tree_attr_ids_index) + && next_censor_attr_id.item_tree_index() == item_tree_attr_id + { + censor_item_tree_attr_ids_index += 1; + result = true; } + item_tree_attr_id += 1; + result } - } - Some(()) -} + }; -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -enum CfgExprStage { - /// Stripping the CFGExpr part of the attribute - StrippigCfgExpr, - /// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute - FoundComma, - /// Everything following the attribute. This could be another attribute or the end of the attribute. - // FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute - // Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110 - EverythingElse, -} + let mut attrs = Vec::new(); + let mut attrs_idx = 0; + let mut has_inner_attrs_owner = false; + let mut in_attr = false; + let mut done_with_attrs = false; + let mut did_top_attrs = false; + move |preorder, event| { + match event { + WalkEvent::Enter(SyntaxElement::Node(node)) => { + if done_with_attrs { + return (true, Vec::new()); + } -/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input. -fn remove_tokens_within_cfg_attr(meta: Meta) -> Option> { - let mut remove: FxHashSet = FxHashSet::default(); - debug!("Enabling attribute {}", meta); - let meta_path = meta.path()?; - debug!("Removing {:?}", meta_path.syntax()); - remove.insert(meta_path.syntax().clone().into()); + if ast::Attr::can_cast(node.kind()) { + in_attr = true; + let node_range = node.text_range(); + while attrs + .get(attrs_idx) + .is_some_and(|it: &AstAttrToProcess| it.range != node_range) + { + attrs_idx += 1; + } + } else if let Some(has_attrs) = ast::AnyHasAttrs::cast(node.clone()) { + if has_inner_attrs_owner { + has_inner_attrs_owner = false; + return (true, Vec::new()); + } - let meta_tt = meta.token_tree()?; - debug!("meta_tt {}", meta_tt); - let mut stage = CfgExprStage::StrippigCfgExpr; - for tt in meta_tt.token_trees_and_tokens() { - debug!("Checking {:?}. Stage: {:?}", tt, stage); - match (stage, tt) { - (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => { - remove.insert(node.syntax().clone().into()); - } - (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => { - if token.kind() == T![,] { - stage = CfgExprStage::FoundComma; - } - remove.insert(token.into()); - } - (CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token)) - if (token.kind() == T![,] || token.kind() == T![')']) => - { - // The end of the attribute or separator for the next attribute - stage = CfgExprStage::EverythingElse; - remove.insert(token.into()); - } - (CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => { - remove.insert(node.syntax().clone().into()); - } - (CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => { - remove.insert(token.into()); - } - // This is an actual attribute - _ => {} - } - } - if stage != CfgExprStage::EverythingElse { - warn!("Invalid cfg_attr attribute. {:?}", meta_tt); - return None; - } - Some(remove) -} -/// Removes a possible comma after the [AstNode] -fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet) { - if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) { - res.insert(comma); - } -} -fn process_enum( - db: &dyn ExpandDatabase, - variants: VariantList, - krate: Crate, - remove: &mut FxHashSet, -) -> Option<()> { - 'variant: for variant in variants.variants() { - for attr in variant.attrs() { - if let Some(enabled) = check_cfg(db, &attr, krate) { - if enabled { - debug!("censoring {:?}", attr.syntax()); - remove.insert(attr.syntax().clone().into()); - } else { - // Rustc does not strip the attribute if it is enabled. So we will leave it - debug!("censoring type {:?}", variant.syntax()); - remove.insert(variant.syntax().clone().into()); - // We need to remove the , as well - remove_possible_comma(&variant, remove); - continue 'variant; - } - } + if did_top_attrs && !is_derive { + // Derives need all attributes handled, but attribute macros need only the top attributes handled. + done_with_attrs = true; + return (true, Vec::new()); + } + did_top_attrs = true; + + if let Some(inner_attrs_node) = has_attrs.inner_attributes_node() + && inner_attrs_node != *node + { + has_inner_attrs_owner = true; + } + + let node_attrs = ast::attrs_including_inner(&has_attrs); + + attrs.clear(); + node_attrs.clone().for_each(|attr| { + let span_for = |token: Option| { + token + .map(|token| span_map.span_for_range(token.text_range())) + .unwrap_or(default_span) + }; + attrs.push(AstAttrToProcess { + range: attr.syntax().text_range(), + pound_span: span_for(attr.pound_token()), + brackets_span: DelimSpan { + open: span_for(attr.l_brack_token()), + close: span_for(attr.r_brack_token()), + }, + excl_span: attr + .excl_token() + .map(|token| span_map.span_for_range(token.text_range())), + expanded_attrs: SmallVec::new(), + expanded_attrs_idx: 0, + next_expanded_attr: NextExpandedAttrState::NotStarted, + }); + }); + + attrs_idx = 0; + let strip_current_item = expand_cfg_attr( + node_attrs, + &cfg_options, + |attr, _container, range, top_attr| { + // Find the attr. + while attrs[attrs_idx].range != top_attr.syntax().text_range() { + attrs_idx += 1; + } + + let mut strip_current_attr = false; + match attr { + Meta::NamedKeyValue { name, .. } => { + if name + .is_none_or(|name| !is_item_tree_filtered_attr(name.text())) + { + strip_current_attr = should_strip_attr(); + } + } + Meta::TokenTree { path, tt } => { + if path.segments.len() != 1 + || !is_item_tree_filtered_attr(path.segments[0].text()) + { + strip_current_attr = should_strip_attr(); + } + + if path.segments.len() == 1 { + let name = path.segments[0].text(); + + if name == "cfg" { + let cfg_expr = CfgExpr::parse_from_ast( + &mut TokenTreeChildren::new(&tt).peekable(), + ); + if cfg_options().check(&cfg_expr) == Some(false) { + return ControlFlow::Break(ItemIsCfgedOut); + } + strip_current_attr = true; + } + } + } + Meta::Path { path } => { + if path.segments.len() != 1 + || !is_item_tree_filtered_attr(path.segments[0].text()) + { + strip_current_attr = should_strip_attr(); + } + } + } + + if !strip_current_attr { + attrs[attrs_idx] + .expanded_attrs + .push(ExpandedAttrToProcess { range }); + } - if let Some(enabled) = check_cfg_attr(db, &attr, krate) { - if enabled { - debug!("Removing cfg_attr tokens {:?}", attr); - let meta = attr.meta()?; - let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; - remove.extend(removes_from_cfg_attr); - } else { - debug!("censoring type cfg_attr {:?}", variant.syntax()); - remove.insert(attr.syntax().clone().into()); + ControlFlow::Continue(()) + }, + ); + attrs_idx = 0; + + if strip_current_item.is_some() { + preorder.skip_subtree(); + attrs.clear(); + + 'eat_comma: { + // If there is a comma after this node, eat it too. + let mut events_until_comma = 0; + for event in preorder.clone() { + match event { + WalkEvent::Enter(SyntaxElement::Node(_)) + | WalkEvent::Leave(_) => {} + WalkEvent::Enter(SyntaxElement::Token(token)) => { + let kind = token.kind(); + if kind == T![,] { + break; + } else if !kind.is_trivia() { + break 'eat_comma; + } + } + } + events_until_comma += 1; + } + preorder.nth(events_until_comma); + } + + return (false, Vec::new()); + } } } - } - if let Some(fields) = variant.field_list() { - match fields { - ast::FieldList::RecordFieldList(fields) => { - process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?; - } - ast::FieldList::TupleFieldList(fields) => { - process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?; + WalkEvent::Leave(SyntaxElement::Node(node)) => { + if ast::Attr::can_cast(node.kind()) { + in_attr = false; + attrs_idx += 1; } } - } - } - Some(()) -} + WalkEvent::Enter(SyntaxElement::Token(token)) => { + if !in_attr { + return (true, Vec::new()); + } -pub(crate) fn process_cfg_attrs( - db: &dyn ExpandDatabase, - node: &SyntaxNode, - loc: &MacroCallLoc, -) -> Option> { - // FIXME: #[cfg_eval] is not implemented. But it is not stable yet - let is_derive = match loc.def.kind { - MacroDefKind::BuiltInDerive(..) - | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true, - MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(), - _ => false, - }; - let mut remove = FxHashSet::default(); + let Some(ast_attr) = attrs.get_mut(attrs_idx) else { + return (true, Vec::new()); + }; + let token_range = token.text_range(); + let Some(expanded_attr) = ast_attr.expanded_attrs.get(ast_attr.expanded_attrs_idx) + else { + // No expanded attributes in this `ast::Attr`, or we finished them all already, either way + // the remaining tokens should be discarded. + return (false, Vec::new()); + }; + match ast_attr.next_expanded_attr { + NextExpandedAttrState::NotStarted => { + if token_range.start() >= expanded_attr.range.start() { + // We started the next attribute. + let mut insert_tokens = Vec::with_capacity(3); + insert_tokens.push(tt::Leaf::Punct(tt::Punct { + char: '#', + spacing: tt::Spacing::Alone, + span: ast_attr.pound_span, + })); + if let Some(span) = ast_attr.excl_span { + insert_tokens.push(tt::Leaf::Punct(tt::Punct { + char: '!', + spacing: tt::Spacing::Alone, + span, + })); + } + insert_tokens.push(tt::Leaf::Punct(tt::Punct { + char: '[', + spacing: tt::Spacing::Alone, + span: ast_attr.brackets_span.open, + })); - let item = ast::Item::cast(node.clone())?; - for attr in item.attrs() { - if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) { - if enabled { - debug!("Removing cfg_attr tokens {:?}", attr); - let meta = attr.meta()?; - let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?; - remove.extend(removes_from_cfg_attr); - } else { - debug!("Removing type cfg_attr {:?}", item.syntax()); - remove.insert(attr.syntax().clone().into()); - } - } - } + ast_attr.next_expanded_attr = NextExpandedAttrState::InTheMiddle; - if is_derive { - // Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level - // (cfg_attr is handled above, cfg is handled in the def map). - match item { - ast::Item::Struct(it) => match it.field_list()? { - ast::FieldList::RecordFieldList(fields) => { - process_has_attrs_with_possible_comma( - db, - fields.fields(), - loc.krate, - &mut remove, - )?; - } - ast::FieldList::TupleFieldList(fields) => { - process_has_attrs_with_possible_comma( - db, - fields.fields(), - loc.krate, - &mut remove, - )?; - } - }, - ast::Item::Enum(it) => { - process_enum(db, it.variant_list()?, loc.krate, &mut remove)?; - } - ast::Item::Union(it) => { - process_has_attrs_with_possible_comma( - db, - it.record_field_list()?.fields(), - loc.krate, - &mut remove, - )?; - } - // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now - _ => {} - } - } - Some(remove) -} -/// Parses a `cfg` attribute from the meta -fn parse_from_attr_token_tree(tt: &TokenTree) -> Option { - let mut iter = tt - .token_trees_and_tokens() - .filter(is_not_whitespace) - .skip(1) - .take_while(is_not_closing_paren) - .peekable(); - next_cfg_expr_from_syntax(&mut iter) -} + return (true, insert_tokens); + } else { + // Before any attribute or between the attributes. + return (false, Vec::new()); + } + } + NextExpandedAttrState::InTheMiddle => { + if token_range.start() >= expanded_attr.range.end() { + // Finished the current attribute. + let insert_tokens = vec![tt::Leaf::Punct(tt::Punct { + char: ']', + spacing: tt::Spacing::Alone, + span: ast_attr.brackets_span.close, + })]; -fn is_not_closing_paren(element: &NodeOrToken) -> bool { - !matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')'])) -} -fn is_not_whitespace(element: &NodeOrToken) -> bool { - !matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE)) -} + ast_attr.next_expanded_attr = NextExpandedAttrState::NotStarted; + ast_attr.expanded_attrs_idx += 1; -fn next_cfg_expr_from_syntax(iter: &mut Peekable) -> Option -where - I: Iterator>, -{ - let name = match iter.next() { - None => return None, - Some(NodeOrToken::Token(element)) => match element.kind() { - syntax::T![ident] => Symbol::intern(element.text()), - _ => return Some(CfgExpr::Invalid), - }, - Some(_) => return Some(CfgExpr::Invalid), - }; - let result = match &name { - s if [&sym::all, &sym::any, &sym::not].contains(&s) => { - let mut preds = Vec::new(); - let Some(NodeOrToken::Node(tree)) = iter.next() else { - return Some(CfgExpr::Invalid); - }; - let mut tree_iter = tree - .token_trees_and_tokens() - .filter(is_not_whitespace) - .skip(1) - .take_while(is_not_closing_paren) - .peekable(); - while tree_iter.peek().is_some() { - let pred = next_cfg_expr_from_syntax(&mut tree_iter); - if let Some(pred) = pred { - preds.push(pred); - } - } - let group = match &name { - s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()), - s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()), - s if *s == sym::not => { - CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))) - } - _ => unreachable!(), - }; - Some(group) - } - _ => match iter.peek() { - Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => { - iter.next(); - match iter.next() { - Some(NodeOrToken::Token(value_token)) - if (value_token.kind() == syntax::SyntaxKind::STRING) => - { - let value = value_token.text(); - Some(CfgExpr::Atom(CfgAtom::KeyValue { - key: name, - value: Symbol::intern(value.trim_matches('"')), - })) + // It's safe to ignore the current token because between attributes + // there is always at least one token we skip - either the closing bracket + // in `#[]` or the comma in case of multiple attrs in `cfg_attr` expansion. + return (false, insert_tokens); + } else { + // Still in the middle. + return (true, Vec::new()); + } } - _ => None, } } - _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))), - }, - }; - if let Some(NodeOrToken::Token(element)) = iter.peek() - && element.kind() == syntax::T![,] - { - iter.next(); + WalkEvent::Leave(SyntaxElement::Token(_)) => {} + } + (true, Vec::new()) } - result } -#[cfg(test)] -mod tests { - use cfg::DnfExpr; - use expect_test::{Expect, expect}; - use syntax::{AstNode, SourceFile, ast::Attr}; - - use crate::cfg_process::parse_from_attr_token_tree; - fn check_dnf_from_syntax(input: &str, expect: Expect) { - let parse = SourceFile::parse(input, span::Edition::CURRENT); - let node = match parse.tree().syntax().descendants().find_map(Attr::cast) { - Some(it) => it, - None => { - let node = std::any::type_name::(); - panic!("Failed to make ast node `{node}` from text {input}") - } - }; - let node = node.clone_subtree(); - assert_eq!(node.syntax().text_range().start(), 0.into()); +pub(crate) fn attr_macro_input_to_token_tree( + db: &dyn ExpandDatabase, + node: &SyntaxNode, + span_map: SpanMapRef<'_>, + span: Span, + is_derive: bool, + censor_item_tree_attr_ids: &[AttrId], + krate: Crate, +) -> (tt::TopSubtree, SyntaxFixupUndoInfo) { + let fixups = fixup::fixup_syntax(span_map, node, span, DocCommentDesugarMode::ProcMacro); + ( + syntax_bridge::syntax_node_to_token_tree_modified( + node, + span_map, + fixups.append, + fixups.remove, + span, + DocCommentDesugarMode::ProcMacro, + macro_input_callback(db, is_derive, censor_item_tree_attr_ids, krate, span, span_map), + ), + fixups.undo_info, + ) +} - let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap(); - let actual = format!("#![cfg({})]", DnfExpr::new(&cfg)); - expect.assert_eq(&actual); - } - #[test] - fn cfg_from_attr() { - check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]); - check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]); - } +pub fn check_cfg_attr_value( + db: &dyn ExpandDatabase, + attr: &ast::TokenTree, + krate: Crate, +) -> Option { + let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable()); + krate.cfg_options(db).check(&cfg_expr) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 888c1405a6bb1..6b5aa39fa6bf1 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -1,11 +1,9 @@ //! Defines database & queries for macro expansion. use base_db::{Crate, RootQueryDb}; -use either::Either; use mbe::MatchedArmIndex; -use rustc_hash::FxHashSet; use span::{AstIdMap, Edition, Span, SyntaxContext}; -use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast}; +use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast}; use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree}; use triomphe::Arc; @@ -13,9 +11,9 @@ use crate::{ AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, - attrs::{AttrId, AttrInput, RawAttrs, collect_attrs}, + attrs::Meta, builtin::pseudo_derive_attr_expansion, - cfg_process, + cfg_process::attr_macro_input_to_token_tree, declarative::DeclarativeMacroExpander, fixup::{self, SyntaxFixupUndoInfo}, hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt}, @@ -177,7 +175,7 @@ pub fn expand_speculative( let span_map = SpanMapRef::RealSpanMap(&span_map); // Build the subtree and token mapping for the speculative args - let (mut tt, undo_info) = match loc.kind { + let (mut tt, undo_info) = match &loc.kind { MacroCallKind::FnLike { .. } => ( syntax_bridge::syntax_node_to_token_tree( speculative_args, @@ -200,48 +198,35 @@ pub fn expand_speculative( ), SyntaxFixupUndoInfo::NONE, ), - MacroCallKind::Derive { derive_attr_index: index, .. } - | MacroCallKind::Attr { invoc_attr_index: index, .. } => { - let censor = if let MacroCallKind::Derive { .. } = loc.kind { - censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?) - } else { - attr_source(index, &ast::Item::cast(speculative_args.clone())?) - .into_iter() - .map(|it| it.syntax().clone().into()) - .collect() + MacroCallKind::Derive { derive_macro_id, .. } => { + let MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } = + &derive_macro_id.loc(db).kind + else { + unreachable!("`derive_macro_id` should be `MacroCallKind::Attr`"); }; - - let censor_cfg = - cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default(); - let mut fixups = fixup::fixup_syntax( - span_map, + attr_macro_input_to_token_tree( + db, speculative_args, + span_map, span, - DocCommentDesugarMode::ProcMacro, - ); - fixups.append.retain(|it, _| match it { - syntax::NodeOrToken::Token(_) => true, - it => !censor.contains(it) && !censor_cfg.contains(it), - }); - fixups.remove.extend(censor); - fixups.remove.extend(censor_cfg); - - ( - syntax_bridge::syntax_node_to_token_tree_modified( - speculative_args, - span_map, - fixups.append, - fixups.remove, - span, - DocCommentDesugarMode::ProcMacro, - ), - fixups.undo_info, + true, + attr_ids, + loc.krate, ) } + MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => attr_macro_input_to_token_tree( + db, + speculative_args, + span_map, + span, + false, + attr_ids, + loc.krate, + ), }; - let attr_arg = match loc.kind { - MacroCallKind::Attr { invoc_attr_index, .. } => { + let attr_arg = match &loc.kind { + MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => { if loc.def.is_attribute_derive() { // for pseudo-derive expansion we actually pass the attribute itself only ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map( @@ -260,18 +245,21 @@ pub fn expand_speculative( // Attributes may have an input token tree, build the subtree and map for this as well // then try finding a token id for our token if it is inside this input subtree. let item = ast::Item::cast(speculative_args.clone())?; - let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db)); - attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| { - match attr.input.as_deref()? { - AttrInput::TokenTree(tt) => { - let mut attr_arg = tt.clone(); - attr_arg.top_subtree_delimiter_mut().kind = - tt::DelimiterKind::Invisible; - Some(attr_arg) - } - AttrInput::Literal(_) => None, + let (_, _, _, meta) = + attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item); + match meta { + Meta::TokenTree { tt, .. } => { + let mut attr_arg = syntax_bridge::syntax_node_to_token_tree( + tt.syntax(), + span_map, + span, + DocCommentDesugarMode::ProcMacro, + ); + attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible; + Some(attr_arg) } - }) + _ => None, + } } } _ => None, @@ -433,7 +421,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); - let (censor, item_node, span) = match loc.kind { + let (is_derive, censor_item_tree_attr_ids, item_node, span) = match &loc.kind { MacroCallKind::FnLike { ast_id, .. } => { let node = &ast_id.to_ptr(db).to_node(&root); let path_range = node @@ -501,53 +489,29 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { MacroCallKind::Derive { .. } => { unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`") } - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { let node = ast_id.to_ptr(db).to_node(&root); - let attr_source = attr_source(invoc_attr_index, &node); - - let span = map.span_for_range( - attr_source - .as_ref() - .and_then(|it| it.path()) - .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()), - ); - // If derive attribute we need to censor the derive input - if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive()) - && ast::Adt::can_cast(node.syntax().kind()) - { - let adt = ast::Adt::cast(node.syntax().clone()).unwrap(); - let censor_derive_input = censor_derive_input(invoc_attr_index, &adt); - (censor_derive_input, node, span) - } else { - (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span) - } + let range = attr_ids + .invoc_attr() + .find_attr_range_with_source(db, loc.krate, &node) + .3 + .path_range(); + let span = map.span_for_range(range); + + let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive()); + (is_derive, &**attr_ids, node, span) } }; - let (mut tt, undo_info) = { - let syntax = item_node.syntax(); - let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default(); - let mut fixups = - fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro); - fixups.append.retain(|it, _| match it { - syntax::NodeOrToken::Token(_) => true, - it => !censor.contains(it) && !censor_cfg.contains(it), - }); - fixups.remove.extend(censor); - fixups.remove.extend(censor_cfg); - - ( - syntax_bridge::syntax_node_to_token_tree_modified( - syntax, - map, - fixups.append, - fixups.remove, - span, - DocCommentDesugarMode::ProcMacro, - ), - fixups.undo_info, - ) - }; + let (mut tt, undo_info) = attr_macro_input_to_token_tree( + db, + item_node.syntax(), + map.as_ref(), + span, + is_derive, + censor_item_tree_attr_ids, + loc.krate, + ); if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included @@ -557,31 +521,6 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { (Arc::new(tt), undo_info, span) } -// FIXME: Censoring info should be calculated by the caller! Namely by name resolution -/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped -fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet { - // FIXME: handle `cfg_attr` - cov_mark::hit!(derive_censoring); - collect_attrs(node) - .take(derive_attr_index.ast_index() + 1) - .filter_map(|(_, attr)| Either::left(attr)) - // FIXME, this resolution should not be done syntactically - // derive is a proper macro now, no longer builtin - // But we do not have resolution at this stage, this means - // we need to know about all macro calls for the given ast item here - // so we require some kind of mapping... - .filter(|attr| attr.simple_name().as_deref() == Some("derive")) - .map(|it| it.syntax().clone().into()) - .collect() -} - -/// Attributes expect the invoking attribute to be stripped -fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option { - // FIXME: handle `cfg_attr` - cov_mark::hit!(attribute_macro_attr_censoring); - collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr)) -} - impl TokenExpander { fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { match id.kind { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs index 0d100c1364ab1..3fb9aca9649ef 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs @@ -1,16 +1,20 @@ //! Compiled declarative macro expanders (`macro_rules!` and `macro`) +use std::{cell::OnceCell, ops::ControlFlow}; + use base_db::Crate; -use intern::sym; use span::{Edition, Span, SyntaxContext}; use stdx::TupleExt; -use syntax::{AstNode, ast}; +use syntax::{ + AstNode, AstToken, + ast::{self, HasAttrs}, +}; use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId, - attrs::RawAttrs, + attrs::{Meta, expand_cfg_attr}, db::ExpandDatabase, hygiene::{Transparency, apply_mark}, tt, @@ -80,29 +84,28 @@ impl DeclarativeMacroExpander { let (root, map) = crate::db::parse_with_map(db, id.file_id); let root = root.syntax_node(); - let transparency = |node| { - // ... would be nice to have the item tree here - let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db)); - match attrs - .iter() - .find(|it| { - it.path - .as_ident() - .map(|it| *it == sym::rustc_macro_transparency) - .unwrap_or(false) - })? - .token_tree_value()? - .token_trees() - .flat_tokens() - { - [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym { - s if *s == sym::transparent => Some(Transparency::Transparent), - s if *s == sym::semitransparent => Some(Transparency::SemiTransparent), - s if *s == sym::opaque => Some(Transparency::Opaque), - _ => None, + let transparency = |node: ast::AnyHasAttrs| { + let cfg_options = OnceCell::new(); + expand_cfg_attr( + node.attrs(), + || cfg_options.get_or_init(|| def_crate.cfg_options(db)), + |attr, _, _, _| { + if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr + && name.text() == "rustc_macro_transparency" + && let Some(value) = value.and_then(ast::String::cast) + && let Ok(value) = value.value() + { + match &*value { + "transparent" => ControlFlow::Break(Transparency::Transparent), + "semitransparent" => ControlFlow::Break(Transparency::SemiTransparent), + "opaque" => ControlFlow::Break(Transparency::Opaque), + _ => ControlFlow::Continue(()), + } + } else { + ControlFlow::Continue(()) + } }, - _ => None, - } + ) }; let ctx_edition = |ctx: SyntaxContext| { if ctx.is_root() { @@ -133,7 +136,8 @@ impl DeclarativeMacroExpander { "expected a token tree".into(), )), }, - transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent), + transparency(ast::AnyHasAttrs::from(macro_rules)) + .unwrap_or(Transparency::SemiTransparent), ), ast::Macro::MacroDef(macro_def) => ( match macro_def.body() { @@ -161,7 +165,7 @@ impl DeclarativeMacroExpander { "expected a token tree".into(), )), }, - transparency(¯o_def).unwrap_or(Transparency::Opaque), + transparency(macro_def.into()).unwrap_or(Transparency::Opaque), ), }; let edition = ctx_edition(match id.file_id { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index a7f3e27a45539..fe557d68023d8 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -55,30 +55,6 @@ impl From for HirFilePosition { } } -impl FilePositionWrapper { - pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition { - FilePositionWrapper { - file_id: EditionedFileId::new(db, self.file_id, edition), - offset: self.offset, - } - } -} - -impl FileRangeWrapper { - pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange { - FileRangeWrapper { - file_id: EditionedFileId::new(db, self.file_id, edition), - range: self.range, - } - } -} - -impl InFileWrapper { - pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile { - InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value } - } -} - impl HirFileRange { pub fn file_range(self) -> Option { Some(FileRange { file_id: self.file_id.file_id()?, range: self.range }) @@ -407,7 +383,7 @@ impl InFile { // Fall back to whole macro call. let loc = db.lookup_intern_macro_call(mac_file); - loc.kind.original_call_range(db) + loc.kind.original_call_range(db, loc.krate) } } } @@ -453,7 +429,10 @@ impl InFile { Some(it) => it, None => { let loc = db.lookup_intern_macro_call(mac_file); - (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition)) + ( + loc.kind.original_call_range(db, loc.krate), + SyntaxContext::root(loc.def.edition), + ) } } } @@ -468,7 +447,7 @@ impl InFile { Some(it) => it, _ => { let loc = db.lookup_intern_macro_call(mac_file); - loc.kind.original_call_range(db) + loc.kind.original_call_range(db, loc.krate) } } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index fe77e1565987f..cba1c7c1d4b05 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -523,6 +523,7 @@ mod tests { fixups.remove, span_map.span_for_range(TextRange::empty(0.into())), DocCommentDesugarMode::Mbe, + |_, _| (true, Vec::new()), ); let actual = format!("{tt}\n"); @@ -698,7 +699,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {a . __ra_fixup ;} +fn foo () {a .__ra_fixup ;} "#]], ) } @@ -713,7 +714,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {a . __ra_fixup ; bar () ;} +fn foo () {a .__ra_fixup ; bar () ;} "#]], ) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 472ec83ffef5b..e1103ef43e0f3 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -25,18 +25,17 @@ mod cfg_process; mod fixup; mod prettify_macro_expansion_; -use attrs::collect_attrs; -use rustc_hash::FxHashMap; use salsa::plumbing::{AsId, FromId}; use stdx::TupleExt; +use thin_vec::ThinVec; use triomphe::Arc; use core::fmt; -use std::hash::Hash; +use std::{hash::Hash, ops}; use base_db::Crate; use either::Either; -use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext}; +use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext}; use syntax::{ SyntaxNode, SyntaxToken, TextRange, TextSize, ast::{self, AstNode}, @@ -317,9 +316,6 @@ pub enum MacroCallKind { Derive { ast_id: AstId, /// Syntactical index of the invoking `#[derive]` attribute. - /// - /// Outer attributes are counted first, then inner attributes. This does not support - /// out-of-line modules, which may have attributes spread across 2 files! derive_attr_index: AttrId, /// Index of the derive macro in the derive attribute derive_index: u32, @@ -329,17 +325,68 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index` - // but we need to fix the `cfg_attr` handling first. + // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`. attr_args: Option>, - /// Syntactical index of the invoking `#[attribute]`. + /// This contains the list of all *active* attributes (derives and attr macros) preceding this + /// attribute, including this attribute. You can retrieve the [`AttrId`] of the current attribute + /// by calling [`invoc_attr()`] on this. + /// + /// The macro should not see the attributes here. /// - /// Outer attributes are counted first, then inner attributes. This does not support - /// out-of-line modules, which may have attributes spread across 2 files! - invoc_attr_index: AttrId, + /// [`invoc_attr()`]: AttrMacroAttrIds::invoc_attr + censored_attr_ids: AttrMacroAttrIds, }, } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AttrMacroAttrIds(AttrMacroAttrIdsRepr); + +impl AttrMacroAttrIds { + #[inline] + pub fn from_one(id: AttrId) -> Self { + Self(AttrMacroAttrIdsRepr::One(id)) + } + + #[inline] + pub fn from_many(ids: &[AttrId]) -> Self { + if let &[id] = ids { + Self(AttrMacroAttrIdsRepr::One(id)) + } else { + Self(AttrMacroAttrIdsRepr::ManyDerives(ids.iter().copied().collect())) + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +enum AttrMacroAttrIdsRepr { + One(AttrId), + ManyDerives(ThinVec), +} + +impl ops::Deref for AttrMacroAttrIds { + type Target = [AttrId]; + + #[inline] + fn deref(&self) -> &Self::Target { + match &self.0 { + AttrMacroAttrIdsRepr::One(one) => std::slice::from_ref(one), + AttrMacroAttrIdsRepr::ManyDerives(many) => many, + } + } +} + +impl AttrMacroAttrIds { + #[inline] + pub fn invoc_attr(&self) -> AttrId { + match &self.0 { + AttrMacroAttrIdsRepr::One(it) => *it, + AttrMacroAttrIdsRepr::ManyDerives(it) => { + *it.last().expect("should always have at least one `AttrId`") + } + } + } +} + impl HirFileId { pub fn edition(self, db: &dyn ExpandDatabase) -> Edition { match self { @@ -583,34 +630,20 @@ impl MacroDefId { impl MacroCallLoc { pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile { - match self.kind { + match &self.kind { MacroCallKind::FnLike { ast_id, .. } => { ast_id.with_value(ast_id.to_node(db).syntax().clone()) } MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: handle `cfg_attr` - ast_id.with_value(ast_id.to_node(db)).map(|it| { - collect_attrs(&it) - .nth(derive_attr_index.ast_index()) - .and_then(|it| match it.1 { - Either::Left(attr) => Some(attr.syntax().clone()), - Either::Right(_) => None, - }) - .unwrap_or_else(|| it.syntax().clone()) - }) + let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id); + ast_id.with_value(attr.syntax().clone()) } - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { if self.def.is_attribute_derive() { - // FIXME: handle `cfg_attr` - ast_id.with_value(ast_id.to_node(db)).map(|it| { - collect_attrs(&it) - .nth(invoc_attr_index.ast_index()) - .and_then(|it| match it.1 { - Either::Left(attr) => Some(attr.syntax().clone()), - Either::Right(_) => None, - }) - .unwrap_or_else(|| it.syntax().clone()) - }) + let (attr, _, _, _) = + attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id); + ast_id.with_value(attr.syntax().clone()) } else { ast_id.with_value(ast_id.to_node(db).syntax().clone()) } @@ -715,7 +748,7 @@ impl MacroCallKind { /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the /// attribute's range, and derives get only the specific derive that is being referred to. - pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange { + pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileRange { let mut kind = self; let file_id = loop { match kind.file_id() { @@ -737,24 +770,11 @@ impl MacroCallKind { } MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: should be the range of the macro name, not the whole derive - // FIXME: handle `cfg_attr` - collect_attrs(&ast_id.to_node(db)) - .nth(derive_attr_index.ast_index()) - .expect("missing derive") - .1 - .expect_left("derive is a doc comment?") - .syntax() - .text_range() + derive_attr_index.find_attr_range(db, krate, ast_id).2 } // FIXME: handle `cfg_attr` - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - collect_attrs(&ast_id.to_node(db)) - .nth(invoc_attr_index.ast_index()) - .expect("missing attribute") - .1 - .expect_left("attribute macro is a doc comment?") - .syntax() - .text_range() + MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { + attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2 } }; @@ -873,7 +893,8 @@ impl ExpansionInfo { let span = self.exp_map.span_at(token.start()); match &self.arg_map { SpanMap::RealSpanMap(_) => { - let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into(); + let file_id = + EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into(); let anchor_offset = db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start(); InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] } @@ -929,7 +950,7 @@ pub fn map_node_range_up_rooted( start = start.min(span.range.start()); end = end.max(span.range.end()); } - let file_id = EditionedFileId::from_span(db, anchor.file_id); + let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id); let anchor_offset = db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }) @@ -955,36 +976,12 @@ pub fn map_node_range_up( start = start.min(span.range.start()); end = end.max(span.range.end()); } - let file_id = EditionedFileId::from_span(db, anchor.file_id); + let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id); let anchor_offset = db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx)) } -/// Maps up the text range out of the expansion hierarchy back into the original file its from. -/// This version will aggregate the ranges of all spans with the same anchor and syntax context. -pub fn map_node_range_up_aggregated( - db: &dyn ExpandDatabase, - exp_map: &ExpansionSpanMap, - range: TextRange, -) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> { - let mut map = FxHashMap::default(); - for span in exp_map.spans_for_range(range) { - let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range); - *range = TextRange::new( - range.start().min(span.range.start()), - range.end().max(span.range.end()), - ); - } - for ((anchor, _), range) in &mut map { - let file_id = EditionedFileId::from_span(db, anchor.file_id); - let anchor_offset = - db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start(); - *range += anchor_offset; - } - map -} - /// Looks up the span at the given offset. pub fn span_for_offset( db: &dyn ExpandDatabase, @@ -992,7 +989,7 @@ pub fn span_for_offset( offset: TextSize, ) -> (FileRange, SyntaxContext) { let span = exp_map.span_at(offset); - let file_id = EditionedFileId::from_span(db, span.anchor.file_id); + let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id); let anchor_offset = db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start(); (FileRange { file_id, range: span.range + anchor_offset }, span.ctx) @@ -1062,7 +1059,7 @@ impl ExpandTo { } } -intern::impl_internable!(ModPath, attrs::AttrInput); +intern::impl_internable!(ModPath); #[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)] #[doc(alias = "MacroFileId")] @@ -1125,6 +1122,14 @@ impl HirFileId { HirFileId::MacroFile(_) => None, } } + + #[inline] + pub fn krate(self, db: &dyn ExpandDatabase) -> Crate { + match self { + HirFileId::FileId(it) => it.krate(db), + HirFileId::MacroFile(it) => it.loc(db).krate, + } + } } impl PartialEq for HirFileId { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index d84d978cdb7ed..e9805e3f86b8c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -2,7 +2,7 @@ use std::{ fmt::{self, Display as _}, - iter, + iter::{self, Peekable}, }; use crate::{ @@ -12,10 +12,11 @@ use crate::{ tt, }; use base_db::Crate; -use intern::sym; +use intern::{Symbol, sym}; +use parser::T; use smallvec::SmallVec; use span::{Edition, SyntaxContext}; -use syntax::{AstNode, ast}; +use syntax::{AstNode, SyntaxToken, ast}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct ModPath { @@ -64,6 +65,58 @@ impl ModPath { ModPath { kind, segments: SmallVec::new_const() } } + pub fn from_tokens( + db: &dyn ExpandDatabase, + span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext, + is_abs: bool, + segments: impl Iterator, + ) -> Option { + let mut segments = segments.peekable(); + let mut result = SmallVec::new_const(); + let path_kind = if is_abs { + PathKind::Abs + } else { + let first = segments.next()?; + match first.kind() { + T![crate] => PathKind::Crate, + T![self] => PathKind::Super(handle_super(&mut segments)), + T![super] => PathKind::Super(1 + handle_super(&mut segments)), + T![ident] => { + let first_text = first.text(); + if first_text == "$crate" { + let ctxt = span_for_range(first.text_range()); + resolve_crate_root(db, ctxt) + .map(PathKind::DollarCrate) + .unwrap_or(PathKind::Crate) + } else { + result.push(Name::new_symbol_root(Symbol::intern(first_text))); + PathKind::Plain + } + } + _ => return None, + } + }; + for segment in segments { + if segment.kind() != T![ident] { + return None; + } + result.push(Name::new_symbol_root(Symbol::intern(segment.text()))); + } + if result.is_empty() { + return None; + } + result.shrink_to_fit(); + return Some(ModPath { kind: path_kind, segments: result }); + + fn handle_super(segments: &mut Peekable>) -> u8 { + let mut result = 0; + while segments.next_if(|it| it.kind() == T![super]).is_some() { + result += 1; + } + result + } + } + pub fn segments(&self) -> &[Name] { &self.segments } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs index e5a778a95c7c9..8b0c0d72cd49d 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs @@ -1,13 +1,12 @@ //! Span maps for real files and macro expansions. use span::{Span, SyntaxContext}; -use stdx::TupleExt; use syntax::{AstNode, TextRange, ast}; use triomphe::Arc; pub use span::RealSpanMap; -use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase}; +use crate::{HirFileId, MacroCallId, db::ExpandDatabase}; pub type ExpansionSpanMap = span::SpanMap; @@ -110,26 +109,24 @@ pub(crate) fn real_span_map( // them anchors too, but only if they have no attributes attached, as those might be proc-macros // and using different anchors inside of them will prevent spans from being joinable. tree.items().for_each(|item| match &item { - ast::Item::ExternBlock(it) - if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => - { + ast::Item::ExternBlock(it) if ast::attrs_including_inner(it).next().is_none() => { if let Some(extern_item_list) = it.extern_item_list() { pairs.extend( extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry), ); } } - ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => { + ast::Item::Impl(it) if ast::attrs_including_inner(it).next().is_none() => { if let Some(assoc_item_list) = it.assoc_item_list() { pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry)); } } - ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => { + ast::Item::Module(it) if ast::attrs_including_inner(it).next().is_none() => { if let Some(item_list) = it.item_list() { pairs.extend(item_list.items().map(item_to_entry)); } } - ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => { + ast::Item::Trait(it) if ast::attrs_including_inner(it).next().is_none() => { if let Some(assoc_item_list) = it.assoc_item_list() { pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry)); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 18ebe7d7a5395..0a6458562e15e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -6,6 +6,7 @@ mod tests; use base_db::Crate; use hir_def::{ EnumVariantId, GeneralConstId, HasModule, StaticId, + attrs::AttrFlags, expr_store::Body, hir::{Expr, ExprId}, type_ref::LiteralConstRef, @@ -198,7 +199,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>( return Ok(value); } - let repr = db.enum_signature(loc.parent).repr; + let repr = AttrFlags::repr(db, loc.parent.into()); let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed()); let mir_body = db.monomorphized_mir_body( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 0815e62f87eef..c0e223380bca8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -17,8 +17,8 @@ use std::fmt; use hir_def::{ AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, - ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat, - item_tree::FieldsShape, signatures::StaticFlags, src::HasSource, + ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags, + db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource, }; use hir_expand::{ HirFileId, @@ -201,7 +201,7 @@ impl<'a> DeclValidator<'a> { // Don't run the lint on extern "[not Rust]" fn items with the // #[no_mangle] attribute. - let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists(); + let no_mangle = AttrFlags::query(self.db, func.into()).contains(AttrFlags::NO_MANGLE); if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) { cov_mark::hit!(extern_func_no_mangle_ignored); } else { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index fb942e336e659..c70c6b6119446 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -2,7 +2,9 @@ use std::{cell::LazyCell, fmt}; -use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; +use hir_def::{ + EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId, attrs::AttrFlags, +}; use intern::sym; use rustc_pattern_analysis::{ IndexVec, PatCx, PrivateUninhabitedField, @@ -118,7 +120,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> { /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`. fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool { let is_local = adt.krate(self.db) == self.module.krate(); - !is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists() + !is_local && AttrFlags::query(self.db, adt.into()).contains(AttrFlags::NON_EXHAUSTIVE) } fn variant_id_for_adt( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 53524d66a33c2..8ac7ab19cd3bf 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -144,7 +144,7 @@ struct UnsafeVisitor<'db> { inside_assignment: bool, inside_union_destructure: bool, callback: &'db mut dyn FnMut(UnsafeDiagnostic), - def_target_features: TargetFeatures, + def_target_features: TargetFeatures<'db>, // FIXME: This needs to be the edition of the span of each call. edition: Edition, /// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when @@ -162,7 +162,7 @@ impl<'db> UnsafeVisitor<'db> { ) -> Self { let resolver = def.resolver(db); let def_target_features = match def { - DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())), + DefWithBodyId::FunctionId(func) => TargetFeatures::from_fn(db, func), _ => TargetFeatures::default(), }; let krate = resolver.module().krate(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 9891f3f248bd8..03ae970acaa78 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -38,7 +38,7 @@ use hir_def::{ lang_item::{LangItem, LangItemTarget, lang_item}, layout::Integer, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, - signatures::{ConstSignature, StaticSignature}, + signatures::{ConstSignature, EnumSignature, StaticSignature}, type_ref::{ConstRef, LifetimeRefId, TypeRefId}, }; use hir_expand::{mod_path::ModPath, name::Name}; @@ -104,7 +104,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc ctx.collect_const(c, &db.const_signature(c)), DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)), DefWithBodyId::VariantId(v) => { - ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() { + ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) { hir_def::layout::IntegerType::Pointer(signed) => match signed { true => ctx.types.isize, false => ctx.types.usize, @@ -759,7 +759,7 @@ pub(crate) struct InferenceContext<'body, 'db> { /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext /// and resolve the path via its methods. This will ensure proper error reporting. pub(crate) resolver: Resolver<'db>, - target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, + target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>, pub(crate) generic_def: GenericDefId, table: unify::InferenceTable<'db>, /// The traits in scope, disregarding block modules. This is used for caching purposes. @@ -903,14 +903,14 @@ impl<'body, 'db> InferenceContext<'body, 'db> { } fn target_features<'a>( - db: &dyn HirDatabase, - target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>, + db: &'db dyn HirDatabase, + target_features: &'a OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>, owner: DefWithBodyId, krate: Crate, - ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) { + ) -> (&'a TargetFeatures<'db>, TargetFeatureIsSafeInTarget) { let (target_features, target_feature_is_safe) = target_features.get_or_init(|| { let target_features = match owner { - DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())), + DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(db, id), _ => TargetFeatures::default(), }; let target_feature_is_safe = match &krate.workspace_data(db).target { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 78889ccb89a28..9b95eef0e0d6a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -37,11 +37,11 @@ use hir_def::{ CallableDefId, + attrs::AttrFlags, hir::{ExprId, ExprOrPatId}, lang_item::LangItem, signatures::FunctionSignature, }; -use intern::sym; use rustc_ast_ir::Mutability; use rustc_type_ir::{ BoundVar, TypeAndMut, @@ -76,7 +76,7 @@ use crate::{ struct Coerce<'a, 'b, 'db> { table: &'a mut InferenceTable<'db>, has_errors: &'a mut bool, - target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget), + target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures<'db>, TargetFeatureIsSafeInTarget), use_lub: bool, /// Determines whether or not allow_two_phase_borrow is set on any /// autoref adjustments we create while coercing. We don't want to @@ -864,14 +864,14 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> { return Err(TypeError::IntrinsicCast); } - let attrs = self.table.db.attrs(def_id.into()); - if attrs.by_key(sym::rustc_force_inline).exists() { + let attrs = AttrFlags::query(self.table.db, def_id.into()); + if attrs.contains(AttrFlags::RUSTC_FORCE_INLINE) { return Err(TypeError::ForceInlineCast); } - if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() { + if b_hdr.safety.is_safe() && attrs.contains(AttrFlags::HAS_TARGET_FEATURE) { let fn_target_features = - TargetFeatures::from_attrs_no_implications(&attrs); + TargetFeatures::from_fn_no_implications(self.table.db, def_id); // Allow the coercion if the current function has all the features that would be // needed to call the coercee safely. let (target_features, target_feature_is_safe) = @@ -1056,7 +1056,7 @@ impl<'db> InferenceContext<'_, 'db> { let is_force_inline = |ty: Ty<'db>| { if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() { - self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists() + AttrFlags::query(self.db, did.into()).contains(AttrFlags::RUSTC_FORCE_INLINE) } else { false } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index efb7244ff6375..a1d99a45287d8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -2365,9 +2365,11 @@ impl<'db> InferenceContext<'_, 'db> { }; let data = self.db.function_signature(func); - let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else { + let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func) + else { return Default::default(); }; + let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices); // only use legacy const generics if the param count matches with them if data.params.len() + legacy_const_generics_indices.len() != args.len() { @@ -2376,9 +2378,8 @@ impl<'db> InferenceContext<'_, 'db> { } else { // there are more parameters than there should be without legacy // const params; use them - let mut indices = legacy_const_generics_indices.as_ref().clone(); - indices.sort(); - return indices; + legacy_const_generics_indices.sort_unstable(); + return legacy_const_generics_indices; } } @@ -2391,9 +2392,8 @@ impl<'db> InferenceContext<'_, 'db> { self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes); // FIXME: evaluate and unify with the const } - let mut indices = legacy_const_generics_indices.as_ref().clone(); - indices.sort(); - indices + legacy_const_generics_indices.sort_unstable(); + legacy_const_generics_indices } /// Dereferences a single level of immutable referencing. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index fc0b9d30b3333..b650f5c1a16a6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -4,6 +4,7 @@ use std::fmt; use hir_def::{ AdtId, LocalFieldId, StructId, + attrs::AttrFlags, layout::{LayoutCalculatorError, LayoutData}, }; use la_arena::{Idx, RawIdx}; @@ -174,8 +175,7 @@ pub fn layout_of_ty_query<'db>( TyKind::Adt(def, args) => { match def.inner().id { hir_def::AdtId::StructId(s) => { - let data = db.struct_signature(s); - let repr = data.repr.unwrap_or_default(); + let repr = AttrFlags::repr(db, s.into()).unwrap_or_default(); if repr.simd() { return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index a8f04bf8c132e..ecebf7935d06e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -4,9 +4,9 @@ use std::{cmp, ops::Bound}; use hir_def::{ AdtId, VariantId, + attrs::AttrFlags, signatures::{StructFlags, VariantFields}, }; -use intern::sym; use rustc_abi::{Integer, ReprOptions, TargetDataLayout}; use rustc_index::IndexVec; use smallvec::SmallVec; @@ -44,15 +44,15 @@ pub fn layout_of_adt_query<'db>( r.push(handle_variant(s.into(), s.fields(db))?); ( r, - sig.repr.unwrap_or_default(), + AttrFlags::repr(db, s.into()).unwrap_or_default(), sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED), ) } AdtId::UnionId(id) => { - let data = db.union_signature(id); + let repr = AttrFlags::repr(db, id.into()); let mut r = SmallVec::new(); r.push(handle_variant(id.into(), id.fields(db))?); - (r, data.repr.unwrap_or_default(), false) + (r, repr.unwrap_or_default(), false) } AdtId::EnumId(e) => { let variants = e.enum_variants(db); @@ -61,7 +61,7 @@ pub fn layout_of_adt_query<'db>( .iter() .map(|&(v, _, _)| handle_variant(v.into(), v.fields(db))) .collect::, _>>()?; - (r, db.enum_signature(e).repr.unwrap_or_default(), false) + (r, AttrFlags::repr(db, e.into()).unwrap_or_default(), false) } }; let variants = variants @@ -105,27 +105,12 @@ pub(crate) fn layout_of_adt_cycle_result<'db>( } fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, Bound) { - let attrs = db.attrs(def.into()); - let get = |name| { - let attr = attrs.by_key(name).tt_values(); - for tree in attr { - if let Some(it) = tree.iter().next_as_view() { - let text = it.to_string().replace('_', ""); - let (text, base) = match text.as_bytes() { - [b'0', b'x', ..] => (&text[2..], 16), - [b'0', b'o', ..] => (&text[2..], 8), - [b'0', b'b', ..] => (&text[2..], 2), - _ => (&*text, 10), - }; - - if let Ok(it) = u128::from_str_radix(text, base) { - return Bound::Included(it); - } - } - } - Bound::Unbounded + let range = AttrFlags::rustc_layout_scalar_valid_range(db, def); + let get = |value| match value { + Some(it) => Bound::Included(it), + None => Bound::Unbounded, }; - (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end)) + (get(range.start), get(range.end)) } /// Finds the appropriate Integer type and signedness for the given diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index cec63566338f1..1b5f4595ca3cb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -8,11 +8,11 @@ use base_db::Crate; use hir_def::{ AdtId, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleId, TraitId, TypeAliasId, + attrs::AttrFlags, nameres::{DefMap, block_def_map, crate_def_map}, signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags}, }; use hir_expand::name::Name; -use intern::sym; use rustc_ast_ir::Mutability; use rustc_hash::{FxHashMap, FxHashSet}; use rustc_type_ir::{ @@ -230,7 +230,8 @@ impl TraitImpls { // FIXME: Reservation impls should be considered during coherence checks. If we are // (ever) to implement coherence checks, this filtering should be done by the trait // solver. - if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() { + if AttrFlags::query(db, impl_id.into()).contains(AttrFlags::RUSTC_RESERVATION_IMPL) + { continue; } let target_trait = match db.impl_trait(impl_id) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index 4b1adecf8c87d..d4aab2d094960 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -3,9 +3,11 @@ //! use std::cmp::{self, Ordering}; -use hir_def::{CrateRootModuleId, resolver::HasResolver, signatures::FunctionSignature}; +use hir_def::{ + CrateRootModuleId, attrs::AttrFlags, resolver::HasResolver, signatures::FunctionSignature, +}; use hir_expand::name::Name; -use intern::{Symbol, sym}; +use intern::sym; use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _}; use stdx::never; @@ -53,7 +55,7 @@ impl<'db> Evaluator<'db> { } let function_data = self.db.function_signature(def); - let attrs = self.db.attrs(def.into()); + let attrs = AttrFlags::query(self.db, def.into()); let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def); if is_intrinsic { @@ -65,7 +67,7 @@ impl<'db> Evaluator<'db> { locals, span, !function_data.has_body() - || attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(), + || attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN), ); } let is_extern_c = match def.lookup(self.db).container { @@ -85,18 +87,13 @@ impl<'db> Evaluator<'db> { .map(|()| true); } - let alloc_fn = - attrs.iter().filter_map(|it| it.path().as_ident()).map(|it| it.symbol()).find(|it| { - [ - &sym::rustc_allocator, - &sym::rustc_deallocator, - &sym::rustc_reallocator, - &sym::rustc_allocator_zeroed, - ] - .contains(it) - }); - if let Some(alloc_fn) = alloc_fn { - self.exec_alloc_fn(alloc_fn, args, destination)?; + if attrs.intersects( + AttrFlags::RUSTC_ALLOCATOR + | AttrFlags::RUSTC_DEALLOCATOR + | AttrFlags::RUSTC_REALLOCATOR + | AttrFlags::RUSTC_ALLOCATOR_ZEROED, + ) { + self.exec_alloc_fn(attrs, args, destination)?; return Ok(true); } if let Some(it) = self.detect_lang_function(def) { @@ -245,12 +242,14 @@ impl<'db> Evaluator<'db> { fn exec_alloc_fn( &mut self, - alloc_fn: &Symbol, + alloc_fn: AttrFlags, args: &[IntervalAndTy<'db>], destination: Interval, ) -> Result<'db, ()> { match alloc_fn { - _ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => { + _ if alloc_fn + .intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) => + { let [size, align] = args else { return Err(MirEvalError::InternalError( "rustc_allocator args are not provided".into(), @@ -261,8 +260,8 @@ impl<'db> Evaluator<'db> { let result = self.heap_allocate(size, align)?; destination.write_from_bytes(self, &result.to_bytes())?; } - _ if *alloc_fn == sym::rustc_deallocator => { /* no-op for now */ } - _ if *alloc_fn == sym::rustc_reallocator => { + _ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { /* no-op for now */ } + _ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => { let [ptr, old_size, align, new_size] = args else { return Err(MirEvalError::InternalError( "rustc_allocator args are not provided".into(), @@ -288,14 +287,14 @@ impl<'db> Evaluator<'db> { fn detect_lang_function(&self, def: FunctionId) -> Option { use LangItem::*; - let attrs = self.db.attrs(def.into()); + let attrs = AttrFlags::query(self.db, def.into()); - if attrs.by_key(sym::rustc_const_panic_str).exists() { + if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) { // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE. return Some(LangItem::BeginPanic); } - let candidate = attrs.lang_item()?; + let candidate = attrs.lang_item_with_attrs(self.db, def.into())?; // We want to execute these functions with special logic // `PanicFmt` is not detected here as it's redirected later. if [BeginPanic, SliceLen, DropInPlace].contains(&candidate) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs index 42f1d926d7db3..f372411830c1e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs @@ -8,6 +8,7 @@ use base_db::Crate; use hir_def::{ AdtId, AttrDefId, BlockId, CallableDefId, EnumVariantId, ItemContainerId, StructId, UnionId, VariantId, + attrs::AttrFlags, lang_item::LangItem, signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags}, }; @@ -466,28 +467,28 @@ impl AdtDef { let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))]; - let mut repr = ReprOptions::default(); - repr.align = data.repr.and_then(|r| r.align); - repr.pack = data.repr.and_then(|r| r.pack); - repr.int = data.repr.and_then(|r| r.int); - + let data_repr = data.repr(db, struct_id); let mut repr_flags = ReprFlags::empty(); if flags.is_box { repr_flags.insert(ReprFlags::IS_LINEAR); } - if data.repr.is_some_and(|r| r.c()) { + if data_repr.is_some_and(|r| r.c()) { repr_flags.insert(ReprFlags::IS_C); } - if data.repr.is_some_and(|r| r.simd()) { + if data_repr.is_some_and(|r| r.simd()) { repr_flags.insert(ReprFlags::IS_SIMD); } - repr.flags = repr_flags; + let repr = ReprOptions { + align: data_repr.and_then(|r| r.align), + pack: data_repr.and_then(|r| r.pack), + int: data_repr.and_then(|r| r.int), + flags: repr_flags, + ..ReprOptions::default() + }; (flags, variants, repr) } AdtId::UnionId(union_id) => { - let data = db.union_signature(union_id); - let flags = AdtFlags { is_enum: false, is_union: true, @@ -500,22 +501,24 @@ impl AdtDef { let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))]; - let mut repr = ReprOptions::default(); - repr.align = data.repr.and_then(|r| r.align); - repr.pack = data.repr.and_then(|r| r.pack); - repr.int = data.repr.and_then(|r| r.int); - + let data_repr = AttrFlags::repr(db, union_id.into()); let mut repr_flags = ReprFlags::empty(); if flags.is_box { repr_flags.insert(ReprFlags::IS_LINEAR); } - if data.repr.is_some_and(|r| r.c()) { + if data_repr.is_some_and(|r| r.c()) { repr_flags.insert(ReprFlags::IS_C); } - if data.repr.is_some_and(|r| r.simd()) { + if data_repr.is_some_and(|r| r.simd()) { repr_flags.insert(ReprFlags::IS_SIMD); } - repr.flags = repr_flags; + let repr = ReprOptions { + align: data_repr.and_then(|r| r.align), + pack: data_repr.and_then(|r| r.pack), + int: data_repr.and_then(|r| r.int), + flags: repr_flags, + ..ReprOptions::default() + }; (flags, variants, repr) } @@ -539,24 +542,26 @@ impl AdtDef { .map(|(idx, v)| (idx, VariantDef::Enum(v.0))) .collect(); - let data = db.enum_signature(enum_id); - - let mut repr = ReprOptions::default(); - repr.align = data.repr.and_then(|r| r.align); - repr.pack = data.repr.and_then(|r| r.pack); - repr.int = data.repr.and_then(|r| r.int); + let data_repr = AttrFlags::repr(db, enum_id.into()); let mut repr_flags = ReprFlags::empty(); if flags.is_box { repr_flags.insert(ReprFlags::IS_LINEAR); } - if data.repr.is_some_and(|r| r.c()) { + if data_repr.is_some_and(|r| r.c()) { repr_flags.insert(ReprFlags::IS_C); } - if data.repr.is_some_and(|r| r.simd()) { + if data_repr.is_some_and(|r| r.simd()) { repr_flags.insert(ReprFlags::IS_SIMD); } - repr.flags = repr_flags; + + let repr = ReprOptions { + align: data_repr.and_then(|r| r.align), + pack: data_repr.and_then(|r| r.pack), + int: data_repr.and_then(|r| r.int), + flags: repr_flags, + ..ReprOptions::default() + }; (flags, variants, repr) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs index 0a8ed2cf0cabd..2bd675ba124e4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs @@ -1,31 +1,35 @@ //! Stuff for handling `#[target_feature]` (needed for unsafe check). +use std::borrow::Cow; use std::sync::LazyLock; -use hir_def::attr::Attrs; -use hir_def::tt; -use intern::{Symbol, sym}; +use hir_def::FunctionId; +use hir_def::attrs::AttrFlags; +use intern::Symbol; use rustc_hash::{FxHashMap, FxHashSet}; +use crate::db::HirDatabase; + #[derive(Debug, Default, Clone)] -pub struct TargetFeatures { - pub(crate) enabled: FxHashSet, +pub struct TargetFeatures<'db> { + pub(crate) enabled: Cow<'db, FxHashSet>, } -impl TargetFeatures { - pub fn from_attrs(attrs: &Attrs) -> Self { - let mut result = TargetFeatures::from_attrs_no_implications(attrs); +impl<'db> TargetFeatures<'db> { + pub fn from_fn(db: &'db dyn HirDatabase, owner: FunctionId) -> Self { + let mut result = TargetFeatures::from_fn_no_implications(db, owner); result.expand_implications(); result } fn expand_implications(&mut self) { let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS); - let mut queue = self.enabled.iter().cloned().collect::>(); + let enabled = self.enabled.to_mut(); + let mut queue = enabled.iter().cloned().collect::>(); while let Some(feature) = queue.pop() { if let Some(implications) = all_implications.get(&feature) { for implication in implications { - if self.enabled.insert(implication.clone()) { + if enabled.insert(implication.clone()) { queue.push(implication.clone()); } } @@ -34,25 +38,9 @@ impl TargetFeatures { } /// Retrieves the target features from the attributes, and does not expand the target features implied by them. - pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self { - let enabled = attrs - .by_key(sym::target_feature) - .tt_values() - .filter_map(|tt| match tt.token_trees().flat_tokens() { - [ - tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)), - tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })), - tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { - kind: tt::LitKind::Str, - symbol: features, - .. - })), - ] if enable_ident.sym == sym::enable => Some(features), - _ => None, - }) - .flat_map(|features| features.as_str().split(',').map(Symbol::intern)) - .collect(); - Self { enabled } + pub(crate) fn from_fn_no_implications(db: &'db dyn HirDatabase, owner: FunctionId) -> Self { + let enabled = AttrFlags::target_features(db, owner); + Self { enabled: Cow::Borrowed(enabled) } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index bc4701970c76c..50625c1c26d55 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -31,7 +31,6 @@ fn foo() -> i32 { &[("infer_shim", 1)], expect_test::expect![[r#" [ - "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -40,7 +39,7 @@ fn foo() -> i32 { "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", "body_shim", "body_with_source_map_shim", "trait_environment_shim", @@ -79,7 +78,7 @@ fn foo() -> i32 { "ast_id_map_shim", "file_item_tree_query", "real_span_map_shim", - "attrs_shim", + "AttrFlags::query_", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", @@ -118,7 +117,6 @@ fn baz() -> i32 { &[("infer_shim", 3)], expect_test::expect![[r#" [ - "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -127,7 +125,7 @@ fn baz() -> i32 { "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", "body_shim", "body_with_source_map_shim", "trait_environment_shim", @@ -135,8 +133,8 @@ fn baz() -> i32 { "expr_scopes_shim", "lang_item", "crate_lang_items", - "attrs_shim", - "attrs_shim", + "AttrFlags::query_", + "AttrFlags::query_", "infer_shim", "function_signature_shim", "function_signature_with_source_map_shim", @@ -189,13 +187,13 @@ fn baz() -> i32 { "ast_id_map_shim", "file_item_tree_query", "real_span_map_shim", - "attrs_shim", + "AttrFlags::query_", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", "body_shim", - "attrs_shim", - "attrs_shim", + "AttrFlags::query_", + "AttrFlags::query_", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", @@ -235,7 +233,6 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ - "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -307,7 +304,6 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ - "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -380,7 +376,6 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ - "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -454,7 +449,6 @@ $0", &[("trait_impls_in_crate_shim", 1)], expect_test::expect![[r#" [ - "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -503,14 +497,14 @@ impl SomeStruct { "real_span_map_shim", "crate_local_def_map", "trait_impls_in_crate_shim", - "attrs_shim", + "AttrFlags::query_", "impl_trait_with_diagnostics_shim", "impl_signature_shim", "impl_signature_with_source_map_shim", "impl_self_ty_with_diagnostics_shim", "struct_signature_shim", "struct_signature_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", ] "#]], ); @@ -560,7 +554,6 @@ fn main() { &[("trait_solve_shim", 0)], expect_test::expect![[r#" [ - "source_root_crates_shim", "crate_local_def_map", "file_item_tree_query", "ast_id_map_shim", @@ -569,22 +562,22 @@ fn main() { "TraitItems::query_with_diagnostics_", "body_shim", "body_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", "ImplItems::of_", "infer_shim", "trait_signature_shim", "trait_signature_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", "function_signature_shim", "function_signature_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", "body_shim", "body_with_source_map_shim", "trait_environment_shim", "lang_item", "crate_lang_items", - "attrs_shim", - "attrs_shim", + "AttrFlags::query_", + "AttrFlags::query_", "generic_predicates_shim", "return_type_impl_traits_shim", "infer_shim", @@ -666,22 +659,22 @@ fn main() { "crate_local_def_map", "TraitItems::query_with_diagnostics_", "body_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", "body_shim", "ImplItems::of_", "infer_shim", - "attrs_shim", + "AttrFlags::query_", "trait_signature_with_source_map_shim", - "attrs_shim", + "AttrFlags::query_", "function_signature_with_source_map_shim", "function_signature_shim", "body_with_source_map_shim", "body_shim", "trait_environment_shim", "crate_lang_items", - "attrs_shim", - "attrs_shim", - "attrs_shim", + "AttrFlags::query_", + "AttrFlags::query_", + "AttrFlags::query_", "generic_predicates_shim", "return_type_impl_traits_shim", "infer_shim", diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index ca5e33fe6ad00..41dc4dc533753 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -9,6 +9,7 @@ use base_db::{ }; use hir_def::{ EnumId, EnumVariantId, FunctionId, Lookup, TraitId, + attrs::AttrFlags, db::DefDatabase, hir::generics::WherePredicate, lang_item::LangItem, @@ -119,7 +120,7 @@ pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsS pub fn is_fn_unsafe_to_call( db: &dyn HirDatabase, func: FunctionId, - caller_target_features: &TargetFeatures, + caller_target_features: &TargetFeatures<'_>, call_edition: Edition, target_feature_is_safe: TargetFeatureIsSafeInTarget, ) -> Unsafety { @@ -130,8 +131,7 @@ pub fn is_fn_unsafe_to_call( if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No { // RFC 2396 . - let callee_target_features = - TargetFeatures::from_attrs_no_implications(&db.attrs(func.into())); + let callee_target_features = TargetFeatures::from_fn_no_implications(db, func); if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) { return Unsafety::Unsafe; } @@ -152,7 +152,7 @@ pub fn is_fn_unsafe_to_call( if is_intrinsic_block { // legacy intrinsics // extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute - if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() { + if AttrFlags::query(db, func.into()).contains(AttrFlags::RUSTC_SAFE_INTRINSIC) { Unsafety::Safe } else { Unsafety::Unsafe diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index 147f1b8653be8..3376c51fe5c92 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -2,9 +2,12 @@ use std::ops::ControlFlow; +use cfg::CfgExpr; +use either::Either; use hir_def::{ - AssocItemId, AttrDefId, ModuleDefId, - attr::AttrsWithOwner, + AssocItemId, AttrDefId, FieldId, InternedModuleId, LifetimeParamId, ModuleDefId, + TypeOrConstParamId, + attrs::{AttrFlags, Docs, IsInnerDoc}, expr_store::path::Path, item_scope::ItemInNs, per_ns::Namespace, @@ -15,6 +18,7 @@ use hir_expand::{ name::Name, }; use hir_ty::{db::HirDatabase, method_resolution}; +use intern::Symbol; use crate::{ Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, @@ -22,28 +26,161 @@ use crate::{ Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef, }; -pub trait HasAttrs { - fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner; +#[derive(Debug, Clone, Copy)] +pub enum AttrsOwner { + AttrDef(AttrDefId), + Field(FieldId), + LifetimeParam(LifetimeParamId), + TypeOrConstParam(TypeOrConstParamId), +} + +impl AttrsOwner { + #[inline] + fn attr_def(&self) -> Option { + match self { + AttrsOwner::AttrDef(it) => Some(*it), + _ => None, + } + } +} + +#[derive(Debug, Clone)] +pub struct AttrsWithOwner { + pub(crate) attrs: AttrFlags, + owner: AttrsOwner, +} + +impl AttrsWithOwner { + fn new(db: &dyn HirDatabase, owner: AttrDefId) -> Self { + Self { attrs: AttrFlags::query(db, owner), owner: AttrsOwner::AttrDef(owner) } + } + + fn new_field(db: &dyn HirDatabase, owner: FieldId) -> Self { + Self { attrs: AttrFlags::query_field(db, owner), owner: AttrsOwner::Field(owner) } + } + + fn new_lifetime_param(db: &dyn HirDatabase, owner: LifetimeParamId) -> Self { + Self { + attrs: AttrFlags::query_lifetime_param(db, owner), + owner: AttrsOwner::LifetimeParam(owner), + } + } + fn new_type_or_const_param(db: &dyn HirDatabase, owner: TypeOrConstParamId) -> Self { + Self { + attrs: AttrFlags::query_type_or_const_param(db, owner), + owner: AttrsOwner::TypeOrConstParam(owner), + } + } + + #[inline] + pub fn is_unstable(&self) -> bool { + self.attrs.contains(AttrFlags::IS_UNSTABLE) + } + + #[inline] + pub fn is_macro_export(&self) -> bool { + self.attrs.contains(AttrFlags::IS_MACRO_EXPORT) + } + + #[inline] + pub fn is_doc_notable_trait(&self) -> bool { + self.attrs.contains(AttrFlags::IS_DOC_NOTABLE_TRAIT) + } + + #[inline] + pub fn is_doc_hidden(&self) -> bool { + self.attrs.contains(AttrFlags::IS_DOC_HIDDEN) + } + + #[inline] + pub fn is_deprecated(&self) -> bool { + self.attrs.contains(AttrFlags::IS_DEPRECATED) + } + + #[inline] + pub fn is_non_exhaustive(&self) -> bool { + self.attrs.contains(AttrFlags::NON_EXHAUSTIVE) + } + + #[inline] + pub fn is_test(&self) -> bool { + self.attrs.contains(AttrFlags::IS_TEST) + } + + #[inline] + pub fn lang(&self, db: &dyn HirDatabase) -> Option<&'static str> { + self.owner + .attr_def() + .and_then(|owner| self.attrs.lang_item_with_attrs(db, owner)) + .map(|lang| lang.name()) + } + + #[inline] + pub fn doc_aliases<'db>(&self, db: &'db dyn HirDatabase) -> &'db [Symbol] { + let owner = match self.owner { + AttrsOwner::AttrDef(it) => Either::Left(it), + AttrsOwner::Field(it) => Either::Right(it), + AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return &[], + }; + self.attrs.doc_aliases(db, owner) + } + + #[inline] + pub fn cfgs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db CfgExpr> { + let owner = match self.owner { + AttrsOwner::AttrDef(it) => Either::Left(it), + AttrsOwner::Field(it) => Either::Right(it), + AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None, + }; + self.attrs.cfgs(db, owner) + } + + #[inline] + pub fn hir_docs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db Docs> { + match self.owner { + AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(), + AttrsOwner::Field(it) => AttrFlags::field_docs(db, it), + AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None, + } + } +} + +pub trait HasAttrs: Sized { + #[inline] + fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { + match self.attr_id(db) { + AttrsOwner::AttrDef(it) => AttrsWithOwner::new(db, it), + AttrsOwner::Field(it) => AttrsWithOwner::new_field(db, it), + AttrsOwner::LifetimeParam(it) => AttrsWithOwner::new_lifetime_param(db, it), + AttrsOwner::TypeOrConstParam(it) => AttrsWithOwner::new_type_or_const_param(db, it), + } + } + #[doc(hidden)] - fn attr_id(self) -> AttrDefId; + fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner; + + #[inline] + fn hir_docs(self, db: &dyn HirDatabase) -> Option<&Docs> { + match self.attr_id(db) { + AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(), + AttrsOwner::Field(it) => AttrFlags::field_docs(db, it), + AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None, + } + } } macro_rules! impl_has_attrs { ($(($def:ident, $def_id:ident),)*) => {$( impl HasAttrs for $def { - fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { - let def = AttrDefId::$def_id(self.into()); - AttrsWithOwner::new(db, def) - } - fn attr_id(self) -> AttrDefId { - AttrDefId::$def_id(self.into()) + #[inline] + fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner { + AttrsOwner::AttrDef(AttrDefId::$def_id(self.into())) } } )*}; } impl_has_attrs![ - (Field, FieldId), (Variant, EnumVariantId), (Static, StaticId), (Const, ConstId), @@ -52,8 +189,6 @@ impl_has_attrs![ (Macro, MacroId), (Function, FunctionId), (Adt, AdtId), - (Module, ModuleId), - (GenericParam, GenericParamId), (Impl, ImplId), (ExternCrateDecl, ExternCrateId), ]; @@ -61,11 +196,9 @@ impl_has_attrs![ macro_rules! impl_has_attrs_enum { ($($variant:ident),* for $enum:ident) => {$( impl HasAttrs for $variant { - fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { - $enum::$variant(self).attrs(db) - } - fn attr_id(self) -> AttrDefId { - $enum::$variant(self).attr_id() + #[inline] + fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { + $enum::$variant(self).attr_id(db) } } )*}; @@ -74,30 +207,46 @@ macro_rules! impl_has_attrs_enum { impl_has_attrs_enum![Struct, Union, Enum for Adt]; impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam]; -impl HasAttrs for AssocItem { - fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { +impl HasAttrs for Module { + #[inline] + fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { + AttrsOwner::AttrDef(AttrDefId::ModuleId(InternedModuleId::new(db, self.id))) + } +} + +impl HasAttrs for GenericParam { + #[inline] + fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner { match self { - AssocItem::Function(it) => it.attrs(db), - AssocItem::Const(it) => it.attrs(db), - AssocItem::TypeAlias(it) => it.attrs(db), + GenericParam::TypeParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()), + GenericParam::ConstParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()), + GenericParam::LifetimeParam(it) => AttrsOwner::LifetimeParam(it.into()), } } - fn attr_id(self) -> AttrDefId { +} + +impl HasAttrs for AssocItem { + #[inline] + fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { match self { - AssocItem::Function(it) => it.attr_id(), - AssocItem::Const(it) => it.attr_id(), - AssocItem::TypeAlias(it) => it.attr_id(), + AssocItem::Function(it) => it.attr_id(db), + AssocItem::Const(it) => it.attr_id(db), + AssocItem::TypeAlias(it) => it.attr_id(db), } } } impl HasAttrs for crate::Crate { - fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner { - let def = AttrDefId::ModuleId(self.root_module().id); - AttrsWithOwner::new(db, def) + #[inline] + fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner { + self.root_module().attr_id(db) } - fn attr_id(self) -> AttrDefId { - AttrDefId::ModuleId(self.root_module().id) +} + +impl HasAttrs for Field { + #[inline] + fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner { + AttrsOwner::Field(self.into()) } } @@ -107,21 +256,22 @@ pub fn resolve_doc_path_on( def: impl HasAttrs + Copy, link: &str, ns: Option, - is_inner_doc: bool, + is_inner_doc: IsInnerDoc, ) -> Option { - resolve_doc_path_on_(db, link, def.attr_id(), ns, is_inner_doc) + resolve_doc_path_on_(db, link, def.attr_id(db), ns, is_inner_doc) } fn resolve_doc_path_on_( db: &dyn HirDatabase, link: &str, - attr_id: AttrDefId, + attr_id: AttrsOwner, ns: Option, - is_inner_doc: bool, + is_inner_doc: IsInnerDoc, ) -> Option { let resolver = match attr_id { - AttrDefId::ModuleId(it) => { - if is_inner_doc { + AttrsOwner::AttrDef(AttrDefId::ModuleId(it)) => { + let it = it.loc(db); + if is_inner_doc.yes() { it.resolver(db) } else if let Some(parent) = Module::from(it).parent(db) { parent.id.resolver(db) @@ -129,20 +279,20 @@ fn resolve_doc_path_on_( it.resolver(db) } } - AttrDefId::FieldId(it) => it.parent.resolver(db), - AttrDefId::AdtId(it) => it.resolver(db), - AttrDefId::FunctionId(it) => it.resolver(db), - AttrDefId::EnumVariantId(it) => it.resolver(db), - AttrDefId::StaticId(it) => it.resolver(db), - AttrDefId::ConstId(it) => it.resolver(db), - AttrDefId::TraitId(it) => it.resolver(db), - AttrDefId::TypeAliasId(it) => it.resolver(db), - AttrDefId::ImplId(it) => it.resolver(db), - AttrDefId::ExternBlockId(it) => it.resolver(db), - AttrDefId::UseId(it) => it.resolver(db), - AttrDefId::MacroId(it) => it.resolver(db), - AttrDefId::ExternCrateId(it) => it.resolver(db), - AttrDefId::GenericParamId(_) => return None, + AttrsOwner::AttrDef(AttrDefId::AdtId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::FunctionId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::EnumVariantId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::StaticId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::ConstId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::TraitId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::TypeAliasId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::ImplId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::ExternBlockId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::UseId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::MacroId(it)) => it.resolver(db), + AttrsOwner::AttrDef(AttrDefId::ExternCrateId(it)) => it.resolver(db), + AttrsOwner::Field(it) => it.parent.resolver(db), + AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None, }; let mut modpath = doc_modpath_from_str(link)?; diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index a6d67e8fb4fb5..6ef6ea272e58c 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -153,8 +153,7 @@ pub struct UnresolvedImport { #[derive(Debug, Clone, Eq, PartialEq)] pub struct UnresolvedMacroCall { - pub macro_call: InFile, - pub precise_location: Option, + pub range: InFile, pub path: ModPath, pub is_bang: bool, } @@ -185,8 +184,7 @@ pub struct InactiveCode { #[derive(Debug, Clone, Eq, PartialEq)] pub struct MacroError { - pub node: InFile, - pub precise_location: Option, + pub range: InFile, pub message: String, pub error: bool, pub kind: &'static str, @@ -194,8 +192,7 @@ pub struct MacroError { #[derive(Debug, Clone, Eq, PartialEq)] pub struct MacroExpansionParseError { - pub node: InFile, - pub precise_location: Option, + pub range: InFile, pub errors: Arc<[SyntaxError]>, } @@ -213,12 +210,12 @@ pub struct UnimplementedBuiltinMacro { #[derive(Debug)] pub struct InvalidDeriveTarget { - pub node: InFile, + pub range: InFile, } #[derive(Debug)] pub struct MalformedDerive { - pub node: InFile, + pub range: InFile, } #[derive(Debug)] diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 48eafb0bd4c60..e912de6fe7614 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -45,11 +45,12 @@ use arrayvec::ArrayVec; use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin}; use either::Either; use hir_def::{ - AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, - CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, - FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId, + AdtId, AssocItemId, AssocItemLoc, CallableDefId, ConstId, ConstParamId, CrateRootModuleId, + DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, + GenericParamId, HasModule, ImplId, InternedModuleId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, + attrs::AttrFlags, expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap}, hir::{ BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat, @@ -63,13 +64,12 @@ use hir_def::{ }, per_ns::PerNs, resolver::{HasResolver, Resolver}, - signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields}, + signatures::{EnumSignature, ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields}, src::HasSource as _, visibility::visibility_from_ast, }; use hir_expand::{ - AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs, - proc_macro::ProcMacroKind, + AstId, MacroCallKind, RenderedExpandError, ValueResult, proc_macro::ProcMacroKind, }; use hir_ty::{ TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, all_super_traits, autoderef, @@ -98,8 +98,8 @@ use smallvec::SmallVec; use span::{AstIdNode, Edition, FileId}; use stdx::{format_to, impl_from, never}; use syntax::{ - AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr, - ast::{self, HasAttrs as _, HasName, HasVisibility as _}, + AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, + ast::{self, HasName, HasVisibility as _}, format_smolstr, }; use triomphe::{Arc, ThinArc}; @@ -107,7 +107,7 @@ use triomphe::{Arc, ThinArc}; use crate::db::{DefDatabase, HirDatabase}; pub use crate::{ - attrs::{HasAttrs, resolve_doc_path_on}, + attrs::{AttrsWithOwner, HasAttrs, resolve_doc_path_on}, diagnostics::*, has_source::HasSource, semantics::{ @@ -130,7 +130,7 @@ pub use { hir_def::{ Complete, FindPathConfig, - attr::{AttrSourceMap, Attrs, AttrsWithOwner}, + attrs::{Docs, IsInnerDoc}, find_path::PrefixKind, import_map, lang_item::LangItem, @@ -144,7 +144,6 @@ pub use { }, hir_expand::{ EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind, - attrs::{Attr, AttrId}, change::ChangeWithProcMacros, files::{ FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition, @@ -290,11 +289,10 @@ impl Crate { } /// Try to get the root URL of the documentation of a crate. - pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option { + pub fn get_html_root_url(self, db: &dyn HirDatabase) -> Option { // Look for #![doc(html_root_url = "...")] - let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into())); - let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url); - doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") + let doc_url = AttrFlags::doc_html_root_url(db, self.id); + doc_url.as_ref().map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/") } pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions { @@ -639,7 +637,7 @@ impl Module { // FIXME: This is accidentally quadratic. continue; } - emit_def_diagnostic(db, acc, diag, edition); + emit_def_diagnostic(db, acc, diag, edition, def_map.krate()); } if !self.id.is_block_module() { @@ -658,8 +656,9 @@ impl Module { acc.extend(def.diagnostics(db, style_lints)) } ModuleDef::Trait(t) => { + let krate = t.krate(db); for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() { - emit_def_diagnostic(db, acc, diag, edition); + emit_def_diagnostic(db, acc, diag, edition, krate.id); } for item in t.items(db) { @@ -777,7 +776,7 @@ impl Module { let ast_id_map = db.ast_id_map(file_id); for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() { - emit_def_diagnostic(db, acc, diag, edition); + emit_def_diagnostic(db, acc, diag, edition, loc.container.krate()); } if inherent_impls.invalid_impls().contains(&impl_def.id) { @@ -808,21 +807,10 @@ impl Module { return None; } let parent = impl_def.id.into(); - let generic_params = db.generic_params(parent); - let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| { - GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id }) - }); - let type_params = generic_params - .iter_type_or_consts() - .filter(|(_, it)| it.type_param().is_some()) - .map(|(local_id, _)| { - GenericParamId::TypeParamId(TypeParamId::from_unchecked( - TypeOrConstParamId { parent, local_id }, - )) - }); - let res = type_params.chain(lifetime_params).any(|p| { - db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists() - }); + let (lifetimes_attrs, type_and_consts_attrs) = + AttrFlags::query_generic_params(db, parent); + let res = lifetimes_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE)) + || type_and_consts_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE)); Some(res) })() .unwrap_or(false); @@ -983,6 +971,17 @@ impl Module { ) -> Option { hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg) } + + #[inline] + pub fn doc_keyword(self, db: &dyn HirDatabase) -> Option { + AttrFlags::doc_keyword(db, InternedModuleId::new(db, self.id)) + } + + /// Whether it has `#[path = "..."]` attribute. + #[inline] + pub fn has_path(&self, db: &dyn HirDatabase) -> bool { + self.attrs(db).attrs.contains(AttrFlags::HAS_PATH) + } } fn macro_call_diagnostics<'db>( @@ -997,31 +996,19 @@ fn macro_call_diagnostics<'db>( if let Some(err) = err { let loc = db.lookup_intern_macro_call(macro_call_id); let file_id = loc.kind.file_id(); - let node = - InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id())); + let mut range = precise_macro_call_location(&loc.kind, db, loc.krate); let RenderedExpandError { message, error, kind } = err.render_to_string(db); - let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id); - let precise_location = if editioned_file_id == file_id { - Some( - err.span().range - + db.ast_id_map(editioned_file_id.into()) - .get_erased(err.span().anchor.ast_id) - .text_range() - .start(), - ) - } else { - None - }; - acc.push(MacroError { node, precise_location, message, error, kind }.into()); + if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.editioned_file_id(db)) { + range.value = err.span().range + + db.ast_id_map(file_id).get_erased(err.span().anchor.ast_id).text_range().start(); + } + acc.push(MacroError { range, message, error, kind }.into()); } if !parse_errors.is_empty() { let loc = db.lookup_intern_macro_call(macro_call_id); - let (node, precise_location) = precise_macro_call_location(&loc.kind, db); - acc.push( - MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() } - .into(), - ) + let range = precise_macro_call_location(&loc.kind, db, loc.krate); + acc.push(MacroExpansionParseError { range, errors: parse_errors.clone() }.into()) } } @@ -1045,6 +1032,7 @@ fn emit_macro_def_diagnostics<'db>( acc, &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, edition, + m.krate(db).id, ); } } @@ -1054,8 +1042,9 @@ fn emit_def_diagnostic<'db>( acc: &mut Vec>, diag: &DefDiagnostic, edition: Edition, + krate: base_db::Crate, ) { - emit_def_diagnostic_(db, acc, &diag.kind, edition) + emit_def_diagnostic_(db, acc, &diag.kind, edition, krate) } fn emit_def_diagnostic_<'db>( @@ -1063,6 +1052,7 @@ fn emit_def_diagnostic_<'db>( acc: &mut Vec>, diag: &DefDiagnosticKind, edition: Edition, + krate: base_db::Crate, ) { match diag { DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => { @@ -1085,8 +1075,7 @@ fn emit_def_diagnostic_<'db>( let RenderedExpandError { message, error, kind } = err.render_to_string(db); acc.push( MacroError { - node: InFile::new(ast.file_id, item.syntax_node_ptr()), - precise_location: None, + range: InFile::new(ast.file_id, item.text_range()), message: format!("{}: {message}", path.display(db, edition)), error, kind, @@ -1116,11 +1105,10 @@ fn emit_def_diagnostic_<'db>( ); } DefDiagnosticKind::UnresolvedMacroCall { ast, path } => { - let (node, precise_location) = precise_macro_call_location(ast, db); + let location = precise_macro_call_location(ast, db, krate); acc.push( UnresolvedMacroCall { - macro_call: node, - precise_location, + range: location, path: path.clone(), is_bang: matches!(ast, MacroCallKind::FnLike { .. }), } @@ -1139,34 +1127,12 @@ fn emit_def_diagnostic_<'db>( ); } DefDiagnosticKind::InvalidDeriveTarget { ast, id } => { - let node = ast.to_node(db); - let derive = node.attrs().nth(*id); - match derive { - Some(derive) => { - acc.push( - InvalidDeriveTarget { - node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))), - } - .into(), - ); - } - None => stdx::never!("derive diagnostic on item without derive attribute"), - } + let derive = id.find_attr_range(db, krate, *ast).3.path_range(); + acc.push(InvalidDeriveTarget { range: ast.with_value(derive) }.into()); } DefDiagnosticKind::MalformedDerive { ast, id } => { - let node = ast.to_node(db); - let derive = node.attrs().nth(*id); - match derive { - Some(derive) => { - acc.push( - MalformedDerive { - node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))), - } - .into(), - ); - } - None => stdx::never!("derive diagnostic on item without derive attribute"), - } + let derive = id.find_attr_range(db, krate, *ast).2; + acc.push(MalformedDerive { range: ast.with_value(derive) }.into()); } DefDiagnosticKind::MacroDefError { ast, message } => { let node = ast.to_node(db); @@ -1185,61 +1151,28 @@ fn emit_def_diagnostic_<'db>( fn precise_macro_call_location( ast: &MacroCallKind, db: &dyn HirDatabase, -) -> (InFile, Option) { + krate: base_db::Crate, +) -> InFile { // FIXME: maybe we actually want slightly different ranges for the different macro diagnostics // - e.g. the full attribute for macro errors, but only the name for name resolution match ast { MacroCallKind::FnLike { ast_id, .. } => { let node = ast_id.to_node(db); - ( - ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), - node.path() - .and_then(|it| it.segment()) - .and_then(|it| it.name_ref()) - .map(|it| it.syntax().text_range()), - ) + let range = node + .path() + .and_then(|it| it.segment()) + .and_then(|it| it.name_ref()) + .map(|it| it.syntax().text_range()); + let range = range.unwrap_or_else(|| node.syntax().text_range()); + ast_id.with_value(range) } MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => { - let node = ast_id.to_node(db); - // Compute the precise location of the macro name's token in the derive - // list. - let token = (|| { - let derive_attr = collect_attrs(&node) - .nth(derive_attr_index.ast_index()) - .and_then(|x| Either::left(x.1))?; - let token_tree = derive_attr.meta()?.token_tree()?; - let chunk_by = token_tree - .syntax() - .children_with_tokens() - .filter_map(|elem| match elem { - syntax::NodeOrToken::Token(tok) => Some(tok), - _ => None, - }) - .chunk_by(|t| t.kind() == T![,]); - let (_, mut group) = chunk_by - .into_iter() - .filter(|&(comma, _)| !comma) - .nth(*derive_index as usize)?; - group.find(|t| t.kind() == T![ident]) - })(); - ( - ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), - token.as_ref().map(|tok| tok.text_range()), - ) + let range = derive_attr_index.find_derive_range(db, krate, *ast_id, *derive_index); + ast_id.with_value(range) } - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - let node = ast_id.to_node(db); - let attr = collect_attrs(&node) - .nth(invoc_attr_index.ast_index()) - .and_then(|x| Either::left(x.1)) - .unwrap_or_else(|| { - panic!("cannot find attribute #{}", invoc_attr_index.ast_index()) - }); - - ( - ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), - Some(attr.syntax().text_range()), - ) + MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => { + let attr_range = attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).2; + ast_id.with_value(attr_range) } } } @@ -1437,7 +1370,7 @@ impl Struct { } pub fn repr(self, db: &dyn HirDatabase) -> Option { - db.struct_signature(self.id).repr + AttrFlags::repr(db, self.id.into()) } pub fn kind(self, db: &dyn HirDatabase) -> StructKind { @@ -1453,7 +1386,7 @@ impl Struct { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_unstable() + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) } pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedStruct<'db> { @@ -1542,7 +1475,7 @@ impl Union { .collect() } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_unstable() + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) } } @@ -1577,7 +1510,7 @@ impl Enum { } pub fn repr(self, db: &dyn HirDatabase) -> Option { - db.enum_signature(self.id).repr + AttrFlags::repr(db, self.id.into()) } pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> { @@ -1593,7 +1526,7 @@ impl Enum { let interner = DbInterner::new_with(db, None, None); Type::new_for_crate( self.id.lookup(db).container.krate(), - match db.enum_signature(self.id).variant_body_type() { + match EnumSignature::variant_body_type(db, self.id) { layout::IntegerType::Pointer(sign) => match sign { true => Ty::new_int(interner, rustc_type_ir::IntTy::Isize), false => Ty::new_uint(interner, rustc_type_ir::UintTy::Usize), @@ -1634,7 +1567,7 @@ impl Enum { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_unstable() + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) } } @@ -1735,7 +1668,7 @@ impl Variant { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_unstable() + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) } pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedVariant<'db> { @@ -2220,8 +2153,7 @@ fn expr_store_diagnostics<'db>( InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() } ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall { - macro_call: (*node).map(|ast_ptr| ast_ptr.into()), - precise_location: None, + range: node.map(|ptr| ptr.text_range()), path: path.clone(), is_bang: true, } @@ -2446,33 +2378,33 @@ impl Function { /// Does this function have `#[test]` attribute? pub fn is_test(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_test() + self.attrs(db).is_test() } /// is this a `fn main` or a function with an `export_name` of `main`? pub fn is_main(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).export_name() == Some(&sym::main) + self.exported_main(db) || self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main } /// Is this a function with an `export_name` of `main`? pub fn exported_main(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).export_name() == Some(&sym::main) + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_EXPORT_NAME_MAIN) } /// Does this function have the ignore attribute? pub fn is_ignore(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_ignore() + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_IGNORE) } /// Does this function have `#[bench]` attribute? pub fn is_bench(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_bench() + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_BENCH) } /// Is this function marked as unstable with `#[feature]` attribute? pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - db.attrs(self.id.into()).is_unstable() + AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE) } pub fn is_unsafe_to_call( @@ -2483,8 +2415,7 @@ impl Function { ) -> bool { let (target_features, target_feature_is_safe_in_target) = caller .map(|caller| { - let target_features = - hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into())); + let target_features = hir_ty::TargetFeatures::from_fn(db, caller.id); let target_feature_is_safe_in_target = match &caller.krate(db).id.workspace_data(db).target { Ok(target) => hir_ty::target_feature_is_safe_in_target(target), @@ -2515,14 +2446,6 @@ impl Function { } pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option { - let attrs = db.attrs(self.id.into()); - // FIXME: Store this in FunctionData flags? - if !(attrs.is_proc_macro() - || attrs.is_proc_macro_attribute() - || attrs.is_proc_macro_derive()) - { - return None; - } let def_map = crate_def_map(db, HasModule::krate(&self.id, db)); def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() }) } @@ -2975,7 +2898,7 @@ impl Trait { /// `#[rust_analyzer::completions(...)]` mode. pub fn complete(self, db: &dyn HirDatabase) -> Complete { - Complete::extract(true, &self.attrs(db)) + Complete::extract(true, self.attrs(db).attrs) } } @@ -3146,10 +3069,10 @@ impl Macro { let loc = id.lookup(db); let source = loc.source(db); match loc.kind { - ProcMacroKind::CustomDerive => db - .attrs(id.into()) - .parse_proc_macro_derive() - .map_or_else(|| as_name_opt(source.value.name()), |(it, _)| it), + ProcMacroKind::CustomDerive => AttrFlags::derive_info(db, self.id).map_or_else( + || as_name_opt(source.value.name()), + |info| Name::new_symbol_root(info.trait_name.clone()), + ), ProcMacroKind::Bang | ProcMacroKind::Attr => as_name_opt(source.value.name()), } } @@ -3157,7 +3080,7 @@ impl Macro { } pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool { - matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists()) + matches!(self.id, MacroId::MacroRulesId(_) if AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_MACRO_EXPORT)) } pub fn is_proc_macro(self) -> bool { @@ -3981,18 +3904,10 @@ impl DeriveHelper { } pub fn name(&self, db: &dyn HirDatabase) -> Name { - match self.derive { - makro @ MacroId::Macro2Id(_) => db - .attrs(makro.into()) - .parse_rustc_builtin_macro() - .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()), - MacroId::MacroRulesId(_) => None, - makro @ MacroId::ProcMacroId(_) => db - .attrs(makro.into()) - .parse_proc_macro_derive() - .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()), - } - .unwrap_or_else(Name::missing) + AttrFlags::derive_info(db, self.derive) + .and_then(|it| it.helpers.get(self.idx as usize)) + .map(|helper| Name::new_symbol_root(helper.clone())) + .unwrap_or_else(Name::missing) } } @@ -4213,7 +4128,7 @@ impl TypeParam { } pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { - db.attrs(GenericParamId::from(self.id).into()).is_unstable() + self.attrs(db).is_unstable() } } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 62ce3daab75df..8eb1c9725cd2a 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -21,7 +21,6 @@ use hir_def::{ }; use hir_expand::{ EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId, - attrs::collect_attrs, builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::{FileRangeWrapper, HirFileRange, InRealFile}, @@ -36,7 +35,7 @@ use intern::{Interned, Symbol, sym}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{SmallVec, smallvec}; -use span::{Edition, FileId, SyntaxContext}; +use span::{FileId, SyntaxContext}; use stdx::{TupleExt, always}; use syntax::{ AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, @@ -386,17 +385,14 @@ impl<'db> SemanticsImpl<'db> { } pub fn attach_first_edition(&self, file: FileId) -> Option { - Some(EditionedFileId::new( - self.db, - file, - self.file_to_module_defs(file).next()?.krate().edition(self.db), - )) + let krate = self.file_to_module_defs(file).next()?.krate(); + Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id)) } pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile { let file_id = self .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file_id)); let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); @@ -1197,33 +1193,34 @@ impl<'db> SemanticsImpl<'db> { .zip(Some(item)) }) .map(|(call_id, item)| { - let attr_id = match db.lookup_intern_macro_call(call_id).kind { + let item_range = item.syntax().text_range(); + let loc = db.lookup_intern_macro_call(call_id); + let text_range = match loc.kind { hir_expand::MacroCallKind::Attr { - invoc_attr_index, .. - } => invoc_attr_index.ast_index(), - _ => 0, + censored_attr_ids: attr_ids, + .. + } => { + // FIXME: here, the attribute's text range is used to strip away all + // entries from the start of the attribute "list" up the invoking + // attribute. But in + // ``` + // mod foo { + // #![inner] + // } + // ``` + // we don't wanna strip away stuff in the `mod foo {` range, that is + // here if the id corresponds to an inner attribute we got strip all + // text ranges of the outer ones, and then all of the inner ones up + // to the invoking attribute so that the inbetween is ignored. + // FIXME: Should cfg_attr be handled differently? + let (attr, _, _, _) = attr_ids + .invoc_attr() + .find_attr_range_with_source(db, loc.krate, &item); + let start = attr.syntax().text_range().start(); + TextRange::new(start, item_range.end()) + } + _ => item_range, }; - // FIXME: here, the attribute's text range is used to strip away all - // entries from the start of the attribute "list" up the invoking - // attribute. But in - // ``` - // mod foo { - // #![inner] - // } - // ``` - // we don't wanna strip away stuff in the `mod foo {` range, that is - // here if the id corresponds to an inner attribute we got strip all - // text ranges of the outer ones, and then all of the inner ones up - // to the invoking attribute so that the inbetween is ignored. - let text_range = item.syntax().text_range(); - let start = collect_attrs(&item) - .nth(attr_id) - .map(|attr| match attr.1 { - Either::Left(it) => it.syntax().text_range().start(), - Either::Right(it) => it.syntax().text_range().start(), - }) - .unwrap_or_else(|| text_range.start()); - let text_range = TextRange::new(start, text_range.end()); filter_duplicates(tokens, text_range); process_expansion_for_token(ctx, &mut stack, call_id) }) @@ -1473,6 +1470,14 @@ impl<'db> SemanticsImpl<'db> { FileRangeWrapper { file_id: file_id.file_id(self.db), range } } + pub fn diagnostics_display_range_for_range( + &self, + src: InFile, + ) -> FileRangeWrapper { + let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db); + FileRangeWrapper { file_id: file_id.file_id(self.db), range } + } + fn token_ancestors_with_macros( &self, token: SyntaxToken, diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs index 5019a5987e513..165ac7e4a08d3 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs @@ -5,7 +5,7 @@ //! node for a *child*, and get its hir. use either::Either; -use hir_expand::{HirFileId, attrs::collect_attrs}; +use hir_expand::HirFileId; use span::AstIdNode; use syntax::{AstPtr, ast}; @@ -94,6 +94,7 @@ impl ChildBySource for ModuleId { impl ChildBySource for ItemScope { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { + let krate = file_id.krate(db); self.declarations().for_each(|item| add_module_def(db, res, file_id, item)); self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL)); self.extern_blocks().for_each(|extern_block| { @@ -123,12 +124,10 @@ impl ChildBySource for ItemScope { |(ast_id, calls)| { let adt = ast_id.to_node(db); calls.for_each(|(attr_id, call_id, calls)| { - if let Some((_, Either::Left(attr))) = - collect_attrs(&adt).nth(attr_id.ast_index()) - { - res[keys::DERIVE_MACRO_CALL] - .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into())); - } + // FIXME: Fix cfg_attr handling. + let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt); + res[keys::DERIVE_MACRO_CALL] + .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into())); }); }, ); diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index d8c624e5c6896..9059c88ad66a1 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -392,12 +392,12 @@ impl<'a> SymbolCollector<'a> { let mut do_not_complete = Complete::Yes; if let Some(attrs) = def.attrs(self.db) { - do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs); + do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs); if let Some(trait_do_not_complete) = trait_do_not_complete { do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete); } - for alias in attrs.doc_aliases() { + for alias in attrs.doc_aliases(self.db) { self.symbols.insert(FileSymbol { name: alias.clone(), def, @@ -441,9 +441,9 @@ impl<'a> SymbolCollector<'a> { let mut do_not_complete = Complete::Yes; if let Some(attrs) = def.attrs(self.db) { - do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs); + do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs); - for alias in attrs.doc_aliases() { + for alias in attrs.doc_aliases(self.db) { self.symbols.insert(FileSymbol { name: alias.clone(), def, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 7843ab9e8f25b..e06c534e3c51f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -1,7 +1,7 @@ use std::iter::{self, Peekable}; use either::Either; -use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics, sym}; +use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics}; use ide_db::RootDatabase; use ide_db::assists::ExprFillDefaultMode; use ide_db::syntax_helpers::suggest_name; @@ -401,7 +401,7 @@ impl ExtendedVariant { fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool { match self { ExtendedVariant::Variant { variant: var, .. } => { - var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate + var.attrs(db).is_doc_hidden() && var.module(db).krate() != krate } _ => false, } @@ -424,7 +424,7 @@ impl ExtendedEnum { fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool { match self { ExtendedEnum::Enum { enum_: e, .. } => { - e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate + e.attrs(db).is_non_exhaustive() && e.module(db).krate() != krate } _ => false, } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs index 8b24d33bf9965..46f210804da32 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -1,4 +1,4 @@ -use hir::{HasVisibility, sym}; +use hir::HasVisibility; use ide_db::{ FxHashMap, FxHashSet, assists::AssistId, @@ -93,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option) -> let mut buf = String::from("./"); let db = ctx.db(); match parent_module.name(db) { - Some(name) - if !parent_module.is_mod_rs(db) - && parent_module - .attrs(db) - .by_key(sym::path) - .string_value_unescape() - .is_none() => - { + Some(name) if !parent_module.is_mod_rs(db) && !parent_module.has_path(db) => { format_to!(buf, "{}/", name.as_str()) } _ => (), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index 2977f8b8c2e75..eb7553222a688 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -68,7 +68,7 @@ pub mod utils; use hir::Semantics; use ide_db::{EditionedFileId, RootDatabase}; -use syntax::{Edition, TextRange}; +use syntax::TextRange; pub(crate) use crate::assist_context::{AssistContext, Assists}; @@ -90,7 +90,7 @@ pub fn assists( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(range.file_id) - .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, range.file_id)); let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range }); let mut acc = Assists::new(&ctx, resolve); handlers::all().iter().for_each(|handler| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index ade60691b57bc..2e220b129fe13 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -321,11 +321,13 @@ fn check_with_config( let _tracing = setup_tracing(); let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before); db.enable_proc_attr_macros(); + let sema = Semantics::new(&db); + let file_with_caret_id = + sema.attach_first_edition(file_with_caret_id.file_id(&db)).unwrap_or(file_with_caret_id); let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string(); let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() }; - let sema = Semantics::new(&db); let ctx = AssistContext::new(sema, &config, frange); let resolve = match expected { ExpectedResult::Unresolved => AssistResolveStrategy::None, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 5a3c5a39dac79..7a86339c1c9c3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -93,16 +93,7 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option { } pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool { - attrs.iter().any(|attr| { - let path = attr.path(); - (|| { - Some( - path.segments().first()?.as_str().starts_with("test") - || path.segments().last()?.as_str().ends_with("test"), - ) - })() - .unwrap_or_default() - }) + attrs.is_test() } #[derive(Clone, Copy, PartialEq)] @@ -128,7 +119,7 @@ pub fn filter_assoc_items( .copied() .filter(|assoc_item| { if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent - && assoc_item.attrs(sema.db).has_doc_hidden() + && assoc_item.attrs(sema.db).is_doc_hidden() { if let hir::AssocItem::Function(f) = assoc_item && !f.has_body(sema.db) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs index c87c46d98127b..df577b8ed02eb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs @@ -56,7 +56,7 @@ pub(super) fn complete_lint( }; let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition); - item.documentation(Documentation::new(description.to_owned())); + item.documentation(Documentation::new_owned(description.to_owned())); item.add_to(acc, ctx.db) } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index d1e05a4359f19..20d01485a45a2 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -266,7 +266,7 @@ fn import_on_the_fly( let original_item = &import.original_item; !ctx.is_item_hidden(&import.item_to_import) && !ctx.is_item_hidden(original_item) - && ctx.check_stability(original_item.attrs(ctx.db).as_deref()) + && ctx.check_stability(original_item.attrs(ctx.db).as_ref()) }) .filter(|import| filter_excluded_flyimport(ctx, import)) .sorted_by(|a, b| { @@ -313,7 +313,7 @@ fn import_on_the_fly_pat_( let original_item = &import.original_item; !ctx.is_item_hidden(&import.item_to_import) && !ctx.is_item_hidden(original_item) - && ctx.check_stability(original_item.attrs(ctx.db).as_deref()) + && ctx.check_stability(original_item.attrs(ctx.db).as_ref()) }) .sorted_by(|a, b| { let key = |import_path| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 73cbe3f0aaab9..4474d6181c209 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -440,7 +440,7 @@ fn add_custom_postfix_completions( let body = snippet.postfix_snippet(receiver_text); let mut builder = postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body); - builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); + builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs index ead9852eff53c..04450aea75bf7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs @@ -139,7 +139,7 @@ fn add_custom_completions( }; let body = snip.snippet(); let mut builder = snippet(ctx, cap, trigger, &body); - builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); + builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index fc2cc3b796ec9..c95b83ef8a027 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -558,7 +558,7 @@ impl CompletionContext<'_> { I: hir::HasAttrs + Copy, { let attrs = item.attrs(self.db); - attrs.doc_aliases().map(|it| it.as_str().into()).collect() + attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect() } /// Check if an item is `#[doc(hidden)]`. @@ -572,7 +572,7 @@ impl CompletionContext<'_> { } /// Checks whether this item should be listed in regards to stability. Returns `true` if we should. - pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool { + pub(crate) fn check_stability(&self, attrs: Option<&hir::AttrsWithOwner>) -> bool { let Some(attrs) = attrs else { return true; }; @@ -590,15 +590,15 @@ impl CompletionContext<'_> { /// Whether the given trait is an operator trait or not. pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool { - match trait_.attrs(self.db).lang() { - Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()), + match trait_.attrs(self.db).lang(self.db) { + Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang), None => false, } } /// Whether the given trait has `#[doc(notable_trait)]` pub(crate) fn is_doc_notable_trait(&self, trait_: hir::Trait) -> bool { - trait_.attrs(self.db).has_doc_notable_trait() + trait_.attrs(self.db).is_doc_notable_trait() } /// Returns the traits in scope, with the [`Drop`] trait removed. @@ -662,7 +662,7 @@ impl CompletionContext<'_> { fn is_visible_impl( &self, vis: &hir::Visibility, - attrs: &hir::Attrs, + attrs: &hir::AttrsWithOwner, defining_crate: hir::Crate, ) -> Visible { if !self.check_stability(Some(attrs)) { @@ -684,14 +684,18 @@ impl CompletionContext<'_> { if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes } } - pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool { + pub(crate) fn is_doc_hidden( + &self, + attrs: &hir::AttrsWithOwner, + defining_crate: hir::Crate, + ) -> bool { // `doc(hidden)` items are only completed within the defining crate. - self.krate != defining_crate && attrs.has_doc_hidden() + self.krate != defining_crate && attrs.is_doc_hidden() } pub(crate) fn doc_aliases_in_scope(&self, scope_def: ScopeDef) -> Vec { if let Some(attrs) = scope_def.attrs(self.db) { - attrs.doc_aliases().map(|it| it.as_str().into()).collect() + attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect() } else { vec![] } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 303c71230d606..c526c7f070bff 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -57,7 +57,8 @@ pub struct CompletionItem { /// Additional info to show in the UI pop up. pub detail: Option, - pub documentation: Option, + // FIXME: Make this with `'db` lifetime. + pub documentation: Option>, /// Whether this item is marked as deprecated pub deprecated: bool, @@ -488,7 +489,8 @@ pub(crate) struct Builder { insert_text: Option, is_snippet: bool, detail: Option, - documentation: Option, + // FIXME: Make this with `'db` lifetime. + documentation: Option>, lookup: Option, kind: CompletionItemKind, text_edit: Option, @@ -644,11 +646,11 @@ impl Builder { self } #[allow(unused)] - pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder { + pub(crate) fn documentation(&mut self, docs: Documentation<'_>) -> &mut Builder { self.set_documentation(Some(docs)) } - pub(crate) fn set_documentation(&mut self, docs: Option) -> &mut Builder { - self.documentation = docs; + pub(crate) fn set_documentation(&mut self, docs: Option>) -> &mut Builder { + self.documentation = docs.map(Documentation::into_owned); self } pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index 094e679501fc2..77a2a3a3a9a02 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -10,7 +10,7 @@ pub(crate) mod type_alias; pub(crate) mod union_literal; pub(crate) mod variant; -use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym}; +use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; use ide_db::text_edit::TextEdit; use ide_db::{ RootDatabase, SnippetCap, SymbolKind, @@ -91,8 +91,7 @@ impl<'a> RenderContext<'a> { } fn is_deprecated(&self, def: impl HasAttrs) -> bool { - let attrs = def.attrs(self.db()); - attrs.by_key(sym::deprecated).exists() + def.attrs(self.db()).is_deprecated() } fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool { @@ -115,7 +114,7 @@ impl<'a> RenderContext<'a> { } // FIXME: remove this - fn docs(&self, def: impl HasDocs) -> Option { + fn docs(&self, def: impl HasDocs) -> Option> { def.docs(self.db()) } } @@ -321,7 +320,9 @@ pub(crate) fn render_expr( ); let edit = TextEdit::replace(source_range, snippet); item.snippet_edit(ctx.config.snippet_cap?, edit); - item.documentation(Documentation::new(String::from("Autogenerated expression by term search"))); + item.documentation(Documentation::new_owned(String::from( + "Autogenerated expression by term search", + ))); item.set_relevance(crate::CompletionRelevance { type_match: compute_type_match(ctx, &expr.ty(ctx.db)), ..Default::default() @@ -554,7 +555,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind { } } -fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option { +fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option> { use hir::ModuleDef::*; match resolution { ScopeDef::ModuleDef(Module(it)) => it.docs(db), diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs index 6c89e49f94e8b..8b14f05b72b2e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs @@ -180,7 +180,7 @@ impl Variant { } } - fn docs(self, db: &dyn HirDatabase) -> Option { + fn docs(self, db: &dyn HirDatabase) -> Option> { match self { Variant::Struct(it) => it.docs(db), Variant::EnumVariant(it) => it.docs(db), diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs index 312d3bd426f90..60474a31b4d3e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs @@ -108,7 +108,7 @@ fn build_completion( label: SmolStr, lookup: SmolStr, pat: String, - def: impl HasDocs + Copy, + def: impl HasDocs, adt_ty: hir::Type<'_>, // Missing in context of match statement completions is_variant_missing: bool, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs index 42324b4290a77..967b9091b5cac 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs @@ -1,7 +1,7 @@ //! Code common to structs, unions, and enum variants. use crate::context::CompletionContext; -use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym}; +use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind}; use ide_db::SnippetCap; use itertools::Itertools; use syntax::SmolStr; @@ -96,8 +96,8 @@ pub(crate) fn visible_fields( .copied() .collect::>(); let has_invisible_field = n_fields - fields.len() > 0; - let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists() - && item.krate(ctx.db) != module.krate(); + let is_foreign_non_exhaustive = + item.attrs(ctx.db).is_non_exhaustive() && item.krate(ctx.db) != module.krate(); let fields_omitted = has_invisible_field || is_foreign_non_exhaustive; Some((fields, fields_omitted)) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index b32a895457268..36d739455030d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -160,12 +160,12 @@ pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { let mut database = RootDatabase::default(); - let change_fixture = ChangeFixture::parse(&database, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); database.enable_proc_attr_macros(); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - let position = FilePosition { file_id: file_id.file_id(&database), offset }; + let position = FilePosition { file_id: file_id.file_id(), offset }; (database, position) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index c051fd863de6f..9ce85b2bf3304 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -5,8 +5,10 @@ // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). +use std::borrow::Cow; + use crate::RootDatabase; -use crate::documentation::{DocsRangeMap, Documentation, HasDocs}; +use crate::documentation::{Documentation, HasDocs}; use crate::famous_defs::FamousDefs; use arrayvec::ArrayVec; use either::Either; @@ -21,7 +23,7 @@ use hir::{ use span::Edition; use stdx::{format_to, impl_from}; use syntax::{ - SyntaxKind, SyntaxNode, SyntaxToken, TextSize, + SyntaxKind, SyntaxNode, SyntaxToken, ast::{self, AstNode}, match_ast, }; @@ -199,21 +201,25 @@ impl Definition { Some(name) } - pub fn docs( + pub fn docs<'db>( &self, - db: &RootDatabase, + db: &'db RootDatabase, famous_defs: Option<&FamousDefs<'_, '_>>, display_target: DisplayTarget, - ) -> Option { - self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs) + ) -> Option> { + self.docs_with_rangemap(db, famous_defs, display_target).map(|docs| match docs { + Either::Left(Cow::Borrowed(docs)) => Documentation::new_borrowed(docs.docs()), + Either::Left(Cow::Owned(docs)) => Documentation::new_owned(docs.into_docs()), + Either::Right(docs) => docs, + }) } - pub fn docs_with_rangemap( + pub fn docs_with_rangemap<'db>( &self, - db: &RootDatabase, + db: &'db RootDatabase, famous_defs: Option<&FamousDefs<'_, '_>>, display_target: DisplayTarget, - ) -> Option<(Documentation, Option)> { + ) -> Option, Documentation<'db>>> { let docs = match self { Definition::Macro(it) => it.docs_with_rangemap(db), Definition::Field(it) => it.docs_with_rangemap(db), @@ -229,15 +235,13 @@ impl Definition { it.docs_with_rangemap(db).or_else(|| { // docs are missing, try to fall back to the docs of the aliased item. let adt = it.ty(db).as_adt()?; - let (docs, range_map) = adt.docs_with_rangemap(db)?; + let mut docs = adt.docs_with_rangemap(db)?.into_owned(); let header_docs = format!( "*This is the documentation for* `{}`\n\n", adt.display(db, display_target) ); - let offset = TextSize::new(header_docs.len() as u32); - let range_map = range_map.shift_docstring_line_range(offset); - let docs = header_docs + docs.as_str(); - Some((Documentation::new(docs), range_map)) + docs.prepend_str(&header_docs); + Some(Cow::Owned(docs)) }) } Definition::BuiltinType(it) => { @@ -246,7 +250,7 @@ impl Definition { let primitive_mod = format!("prim_{}", it.name().display(fd.0.db, display_target.edition)); let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?; - doc_owner.docs_with_rangemap(fd.0.db) + doc_owner.docs_with_rangemap(db) }) } Definition::BuiltinLifetime(StaticLifetime) => None, @@ -282,7 +286,7 @@ impl Definition { ); } - return Some((Documentation::new(docs.replace('*', "\\*")), None)); + return Some(Either::Right(Documentation::new_owned(docs.replace('*', "\\*")))); } Definition::ToolModule(_) => None, Definition::DeriveHelper(_) => None, @@ -299,7 +303,7 @@ impl Definition { let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; item.docs_with_rangemap(db) }) - .map(|(docs, range_map)| (docs, Some(range_map))) + .map(Either::Left) } pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs index cab19aadfd010..4c4691cca2ca1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs @@ -1,337 +1,100 @@ //! Documentation attribute related utilities. -use either::Either; -use hir::{ - AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile, - db::{DefDatabase, HirDatabase}, - resolve_doc_path_on, sym, -}; -use itertools::Itertools; -use span::{TextRange, TextSize}; -use syntax::{ - AstToken, - ast::{self, IsString}, -}; +use std::borrow::Cow; + +use hir::{HasAttrs, db::HirDatabase, resolve_doc_path_on}; /// Holds documentation #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Documentation(String); +pub struct Documentation<'db>(Cow<'db, str>); + +impl<'db> Documentation<'db> { + #[inline] + pub fn new_owned(s: String) -> Self { + Documentation(Cow::Owned(s)) + } -impl Documentation { - pub fn new(s: String) -> Self { - Documentation(s) + #[inline] + pub fn new_borrowed(s: &'db str) -> Self { + Documentation(Cow::Borrowed(s)) } + #[inline] + pub fn into_owned(self) -> Documentation<'static> { + Documentation::new_owned(self.0.into_owned()) + } + + #[inline] pub fn as_str(&self) -> &str { &self.0 } } -impl From for String { - fn from(Documentation(string): Documentation) -> Self { - string +pub trait HasDocs: HasAttrs + Copy { + fn docs(self, db: &dyn HirDatabase) -> Option> { + let docs = match self.docs_with_rangemap(db)? { + Cow::Borrowed(docs) => Documentation::new_borrowed(docs.docs()), + Cow::Owned(docs) => Documentation::new_owned(docs.into_docs()), + }; + Some(docs) + } + fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option> { + self.hir_docs(db).map(Cow::Borrowed) } -} - -pub trait HasDocs: HasAttrs { - fn docs(self, db: &dyn HirDatabase) -> Option; - fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>; fn resolve_doc_path( self, db: &dyn HirDatabase, link: &str, ns: Option, - is_inner_doc: bool, - ) -> Option; -} -/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree. -#[derive(Debug)] -pub struct DocsRangeMap { - source_map: AttrSourceMap, - // (docstring-line-range, attr_index, attr-string-range) - // a mapping from the text range of a line of the [`Documentation`] to the attribute index and - // the original (untrimmed) syntax doc line - mapping: Vec<(TextRange, AttrId, TextRange)>, -} - -impl DocsRangeMap { - /// Maps a [`TextRange`] relative to the documentation string back to its AST range - pub fn map(&self, range: TextRange) -> Option<(InFile, AttrId)> { - let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?; - let (line_docs_range, idx, original_line_src_range) = self.mapping[found]; - if !line_docs_range.contains_range(range) { - return None; - } - - let relative_range = range - line_docs_range.start(); - - let InFile { file_id, value: source } = self.source_map.source_of_id(idx); - match source { - Either::Left(attr) => { - let string = get_doc_string_in_attr(attr)?; - let text_range = string.open_quote_text_range()?; - let range = TextRange::at( - text_range.end() + original_line_src_range.start() + relative_range.start(), - string.syntax().text_range().len().min(range.len()), - ); - Some((InFile { file_id, value: range }, idx)) - } - Either::Right(comment) => { - let text_range = comment.syntax().text_range(); - let range = TextRange::at( - text_range.start() - + TextSize::try_from(comment.prefix().len()).ok()? - + original_line_src_range.start() - + relative_range.start(), - text_range.len().min(range.len()), - ); - Some((InFile { file_id, value: range }, idx)) - } - } - } - - pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap { - let mapping = self - .mapping - .into_iter() - .map(|(buf_offset, id, base_offset)| { - let buf_offset = buf_offset.checked_add(offset).unwrap(); - (buf_offset, id, base_offset) - }) - .collect_vec(); - DocsRangeMap { source_map: self.source_map, mapping } - } -} - -pub fn docs_with_rangemap( - db: &dyn DefDatabase, - attrs: &AttrsWithOwner, -) -> Option<(Documentation, DocsRangeMap)> { - let docs = attrs - .by_key(sym::doc) - .attrs() - .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id))); - let indent = doc_indent(attrs); - let mut buf = String::new(); - let mut mapping = Vec::new(); - for (doc, idx) in docs { - if !doc.is_empty() { - let mut base_offset = 0; - for raw_line in doc.split('\n') { - let line = raw_line.trim_end(); - let line_len = line.len(); - let (offset, line) = match line.char_indices().nth(indent) { - Some((offset, _)) => (offset, &line[offset..]), - None => (0, line), - }; - let buf_offset = buf.len(); - buf.push_str(line); - mapping.push(( - TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?), - idx, - TextRange::at( - (base_offset + offset).try_into().ok()?, - line_len.try_into().ok()?, - ), - )); - buf.push('\n'); - base_offset += raw_line.len() + 1; - } - } else { - buf.push('\n'); - } - } - buf.pop(); - if buf.is_empty() { - None - } else { - Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) })) - } -} - -pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option { - let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()); - let indent = doc_indent(attrs); - let mut buf = String::new(); - for doc in docs { - // str::lines doesn't yield anything for the empty string - if !doc.is_empty() { - // We don't trim trailing whitespace from doc comments as multiple trailing spaces - // indicates a hard line break in Markdown. - let lines = doc.lines().map(|line| { - line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..]) - }); - - buf.extend(Itertools::intersperse(lines, "\n")); - } - buf.push('\n'); + is_inner_doc: hir::IsInnerDoc, + ) -> Option { + resolve_doc_path_on(db, self, link, ns, is_inner_doc) } - buf.pop(); - if buf.is_empty() { None } else { Some(buf) } } macro_rules! impl_has_docs { ($($def:ident,)*) => {$( - impl HasDocs for hir::$def { - fn docs(self, db: &dyn HirDatabase) -> Option { - docs_from_attrs(&self.attrs(db)).map(Documentation) - } - fn docs_with_rangemap( - self, - db: &dyn HirDatabase, - ) -> Option<(Documentation, DocsRangeMap)> { - docs_with_rangemap(db, &self.attrs(db)) - } - fn resolve_doc_path( - self, - db: &dyn HirDatabase, - link: &str, - ns: Option, - is_inner_doc: bool, - ) -> Option { - resolve_doc_path_on(db, self, link, ns, is_inner_doc) - } - } + impl HasDocs for hir::$def {} )*}; } impl_has_docs![ Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate, + AssocItem, Struct, Union, Enum, ]; -macro_rules! impl_has_docs_enum { - ($($variant:ident),* for $enum:ident) => {$( - impl HasDocs for hir::$variant { - fn docs(self, db: &dyn HirDatabase) -> Option { - hir::$enum::$variant(self).docs(db) - } - - fn docs_with_rangemap( - self, - db: &dyn HirDatabase, - ) -> Option<(Documentation, DocsRangeMap)> { - hir::$enum::$variant(self).docs_with_rangemap(db) - } - fn resolve_doc_path( - self, - db: &dyn HirDatabase, - link: &str, - ns: Option, - is_inner_doc: bool, - ) -> Option { - hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc) - } - } - )*}; -} - -impl_has_docs_enum![Struct, Union, Enum for Adt]; - -impl HasDocs for hir::AssocItem { - fn docs(self, db: &dyn HirDatabase) -> Option { - match self { - hir::AssocItem::Function(it) => it.docs(db), - hir::AssocItem::Const(it) => it.docs(db), - hir::AssocItem::TypeAlias(it) => it.docs(db), - } - } - - fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> { - match self { - hir::AssocItem::Function(it) => it.docs_with_rangemap(db), - hir::AssocItem::Const(it) => it.docs_with_rangemap(db), - hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db), - } - } - - fn resolve_doc_path( - self, - db: &dyn HirDatabase, - link: &str, - ns: Option, - is_inner_doc: bool, - ) -> Option { - match self { - hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), - hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), - hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), - } - } -} - impl HasDocs for hir::ExternCrateDecl { - fn docs(self, db: &dyn HirDatabase) -> Option { - let crate_docs = docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db)); - let decl_docs = docs_from_attrs(&self.attrs(db)); + fn docs(self, db: &dyn HirDatabase) -> Option> { + let crate_docs = self.resolved_crate(db)?.hir_docs(db); + let decl_docs = self.hir_docs(db); match (decl_docs, crate_docs) { (None, None) => None, - (Some(decl_docs), None) => Some(decl_docs), - (None, Some(crate_docs)) => Some(crate_docs), - (Some(mut decl_docs), Some(crate_docs)) => { - decl_docs.push('\n'); - decl_docs.push('\n'); - decl_docs += &crate_docs; - Some(decl_docs) + (Some(docs), None) | (None, Some(docs)) => { + Some(Documentation::new_borrowed(docs.docs())) + } + (Some(decl_docs), Some(crate_docs)) => { + let mut docs = String::with_capacity( + decl_docs.docs().len() + "\n\n".len() + crate_docs.docs().len(), + ); + docs.push_str(decl_docs.docs()); + docs.push_str("\n\n"); + docs.push_str(crate_docs.docs()); + Some(Documentation::new_owned(docs)) } } - .map(Documentation::new) } - fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> { - let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db)); - let decl_docs = docs_with_rangemap(db, &self.attrs(db)); + fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option> { + let crate_docs = self.resolved_crate(db)?.hir_docs(db); + let decl_docs = self.hir_docs(db); match (decl_docs, crate_docs) { (None, None) => None, - (Some(decl_docs), None) => Some(decl_docs), - (None, Some(crate_docs)) => Some(crate_docs), - ( - Some((Documentation(mut decl_docs), mut decl_range_map)), - Some((Documentation(crate_docs), crate_range_map)), - ) => { - decl_docs.push('\n'); - decl_docs.push('\n'); - let offset = TextSize::new(decl_docs.len() as u32); - decl_docs += &crate_docs; - let crate_range_map = crate_range_map.shift_docstring_line_range(offset); - decl_range_map.mapping.extend(crate_range_map.mapping); - Some((Documentation(decl_docs), decl_range_map)) + (Some(docs), None) | (None, Some(docs)) => Some(Cow::Borrowed(docs)), + (Some(decl_docs), Some(crate_docs)) => { + let mut docs = decl_docs.clone(); + docs.append_str("\n\n"); + docs.append(crate_docs); + Some(Cow::Owned(docs)) } } } - fn resolve_doc_path( - self, - db: &dyn HirDatabase, - link: &str, - ns: Option, - is_inner_doc: bool, - ) -> Option { - resolve_doc_path_on(db, self, link, ns, is_inner_doc) - } -} - -fn get_doc_string_in_attr(it: &ast::Attr) -> Option { - match it.expr() { - // #[doc = lit] - Some(ast::Expr::Literal(lit)) => match lit.kind() { - ast::LiteralKind::String(it) => Some(it), - _ => None, - }, - // #[cfg_attr(..., doc = "", ...)] - None => { - // FIXME: See highlight injection for what to do here - None - } - _ => None, - } -} - -fn doc_indent(attrs: &hir::Attrs) -> usize { - let mut min = !0; - for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) { - if let Some(m) = - val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min() - { - min = min.min(m); - } - } - min } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs index 1f056a835bc62..cd86e7765196c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs @@ -25,18 +25,14 @@ impl RootDatabase { // We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`. std::panic::catch_unwind(|| { let mut db = RootDatabase::default(); - let fixture = test_fixture::ChangeFixture::parse_with_proc_macros( - &db, - text, - minicore.0, - Vec::new(), - ); + let fixture = + test_fixture::ChangeFixture::parse_with_proc_macros(text, minicore.0, Vec::new()); db.apply_change(fixture.change); let files = fixture .files .into_iter() .zip(fixture.file_lines) - .map(|(file_id, range)| (file_id.file_id(&db), range)) + .map(|(file_id, range)| (file_id.file_id(), range)) .collect(); (db, files, fixture.sysroot_files) }) @@ -525,7 +521,7 @@ impl_empty_upmap_from_ra_fixture!( &str, String, SmolStr, - Documentation, + Documentation<'_>, SymbolKind, CfgExpr, ReferenceCategory, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs index eacd9b9b4d2f6..36a6938af6b82 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs @@ -33,7 +33,7 @@ pub fn is_rust_fence(s: &str) -> bool { const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; -pub fn format_docs(src: &Documentation) -> String { +pub fn format_docs(src: &Documentation<'_>) -> String { format_docs_(src.as_str()) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index f1d076e874d5c..8b53cea7e6d38 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -12,7 +12,7 @@ use either::Either; use hir::{ Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs, HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer, - ModuleSource, PathResolution, Semantics, Visibility, sym, + ModuleSource, PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; use parser::SyntaxKind; @@ -169,7 +169,7 @@ impl SearchScope { entries.extend( source_root .iter() - .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)), + .map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)), ); } SearchScope { entries } @@ -183,11 +183,9 @@ impl SearchScope { let source_root = db.file_source_root(root_file).source_root_id(db); let source_root = db.source_root(source_root).source_root(db); - entries.extend( - source_root - .iter() - .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)), - ); + entries.extend(source_root.iter().map(|id| { + (EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None) + })); } SearchScope { entries } } @@ -201,7 +199,7 @@ impl SearchScope { SearchScope { entries: source_root .iter() - .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None)) + .map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None)) .collect(), } } @@ -368,7 +366,7 @@ impl Definition { if let Definition::Macro(macro_def) = self { return match macro_def.kind(db) { hir::MacroKind::Declarative => { - if macro_def.attrs(db).by_key(sym::macro_export).exists() { + if macro_def.attrs(db).is_macro_export() { SearchScope::reverse_dependencies(db, module.krate()) } else { SearchScope::krate(db, module.krate()) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt index 30d1df4f8e554..427a510559486 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt @@ -3,7 +3,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2c00), ), block: None, local_id: Idx::(0), @@ -16,7 +16,7 @@ Struct( Struct { id: StructId( - 3401, + 3801, ), }, ), @@ -24,7 +24,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -50,7 +50,7 @@ Struct( Struct { id: StructId( - 3400, + 3800, ), }, ), @@ -58,7 +58,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -84,7 +84,7 @@ Struct( Struct { id: StructId( - 3400, + 3800, ), }, ), @@ -92,7 +92,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -118,7 +118,7 @@ Struct( Struct { id: StructId( - 3400, + 3800, ), }, ), @@ -126,7 +126,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -152,7 +152,7 @@ Struct( Struct { id: StructId( - 3400, + 3800, ), }, ), @@ -160,7 +160,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -186,7 +186,7 @@ Struct( Struct { id: StructId( - 3401, + 3801, ), }, ), @@ -194,7 +194,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -220,7 +220,7 @@ Struct( Struct { id: StructId( - 3400, + 3800, ), }, ), @@ -228,7 +228,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 973256c470f34..ce93fa59e258e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -3,7 +3,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2c00), ), block: None, local_id: Idx::(0), @@ -22,7 +22,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -49,14 +49,14 @@ def: TypeAlias( TypeAlias { id: TypeAliasId( - 6800, + 6c00, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -88,7 +88,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -115,14 +115,14 @@ def: Const( Const { id: ConstId( - 6000, + 6400, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -147,14 +147,14 @@ def: Const( Const { id: ConstId( - 6002, + 6402, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -180,7 +180,7 @@ Enum( Enum { id: EnumId( - 4c00, + 5000, ), }, ), @@ -188,7 +188,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -214,7 +214,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4c00, ), ), }, @@ -222,7 +222,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -248,7 +248,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4c00, ), ), }, @@ -256,7 +256,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -281,14 +281,14 @@ def: Static( Static { id: StaticId( - 6400, + 6800, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -314,7 +314,7 @@ Struct( Struct { id: StructId( - 4401, + 4801, ), }, ), @@ -322,7 +322,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -348,7 +348,7 @@ Struct( Struct { id: StructId( - 4400, + 4800, ), }, ), @@ -356,7 +356,7 @@ loc: DeclarationLocation { hir_file_id: MacroFile( MacroCallId( - Id(3800), + Id(3c00), ), ), ptr: SyntaxNodePtr { @@ -382,7 +382,7 @@ Struct( Struct { id: StructId( - 4405, + 4805, ), }, ), @@ -390,7 +390,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -418,7 +418,7 @@ Struct( Struct { id: StructId( - 4406, + 4806, ), }, ), @@ -426,7 +426,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -454,7 +454,7 @@ Struct( Struct { id: StructId( - 4407, + 4807, ), }, ), @@ -462,7 +462,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -488,7 +488,7 @@ Struct( Struct { id: StructId( - 4402, + 4802, ), }, ), @@ -496,7 +496,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -521,14 +521,14 @@ def: Trait( Trait { id: TraitId( - 5800, + 5c00, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -554,7 +554,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4c00, ), ), }, @@ -562,7 +562,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -588,7 +588,7 @@ Union( Union { id: UnionId( - 5000, + 5400, ), }, ), @@ -596,7 +596,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -622,7 +622,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2c00), ), block: None, local_id: Idx::(1), @@ -632,7 +632,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -658,7 +658,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2c00), ), block: None, local_id: Idx::(2), @@ -668,7 +668,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -694,7 +694,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3401, + 3801, ), ), }, @@ -702,7 +702,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -727,14 +727,14 @@ def: Function( Function { id: FunctionId( - 5c02, + 6002, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -761,14 +761,14 @@ def: Function( Function { id: FunctionId( - 5c01, + 6001, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -796,7 +796,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3400, + 3800, ), ), }, @@ -804,7 +804,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -829,14 +829,14 @@ def: Function( Function { id: FunctionId( - 5c00, + 6000, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -862,7 +862,7 @@ Macro { id: MacroRulesId( MacroRulesId( - 3401, + 3801, ), ), }, @@ -870,7 +870,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -895,14 +895,14 @@ def: Function( Function { id: FunctionId( - 5c03, + 6003, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -930,7 +930,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2c00), ), block: None, local_id: Idx::(1), @@ -943,7 +943,7 @@ Struct( Struct { id: StructId( - 4403, + 4803, ), }, ), @@ -951,7 +951,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { @@ -977,7 +977,7 @@ Module { id: ModuleId { krate: Crate( - Id(3000), + Id(2c00), ), block: None, local_id: Idx::(2), @@ -989,14 +989,14 @@ def: Trait( Trait { id: TraitId( - 5800, + 5c00, ), }, ), loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(3001), ), ), ptr: SyntaxNodePtr { @@ -1022,7 +1022,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4c00, ), ), }, @@ -1030,7 +1030,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(3001), ), ), ptr: SyntaxNodePtr { @@ -1056,7 +1056,7 @@ Struct( Struct { id: StructId( - 4404, + 4804, ), }, ), @@ -1064,7 +1064,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(3001), ), ), ptr: SyntaxNodePtr { @@ -1090,7 +1090,7 @@ Macro { id: Macro2Id( Macro2Id( - 4800, + 4c00, ), ), }, @@ -1098,7 +1098,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(3001), ), ), ptr: SyntaxNodePtr { @@ -1124,7 +1124,7 @@ Struct( Struct { id: StructId( - 4404, + 4804, ), }, ), @@ -1132,7 +1132,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(3001), ), ), ptr: SyntaxNodePtr { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt index 22872b577f712..3ab837aa613f1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt @@ -13,7 +13,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(3001), ), ), ptr: SyntaxNodePtr { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt index 9f98bf87e2e8d..a6a808d616a7a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt @@ -13,7 +13,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2001), + Id(3001), ), ), ptr: SyntaxNodePtr { @@ -47,7 +47,7 @@ loc: DeclarationLocation { hir_file_id: FileId( EditionedFileId( - Id(2000), + Id(3000), ), ), ptr: SyntaxNodePtr { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs index 61e28386d0721..7b9fdb1e1cf3b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs @@ -114,8 +114,7 @@ fn assoc_item_of_trait( #[cfg(test)] mod tests { use expect_test::{Expect, expect}; - use hir::FilePosition; - use hir::Semantics; + use hir::{EditionedFileId, FilePosition, Semantics}; use span::Edition; use syntax::ast::{self, AstNode}; use test_fixture::ChangeFixture; @@ -127,10 +126,11 @@ mod tests { #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { let mut database = RootDatabase::default(); - let change_fixture = ChangeFixture::parse(&database, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); + let file_id = EditionedFileId::from_span_guess_origin(&database, file_id); let offset = range_or_offset.expect_offset(); (database, FilePosition { file_id, offset }) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs index 8611ef653b02d..dfa9639f6eb90 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -95,7 +95,7 @@ fn f() { //^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled #[cfg(no)] #[cfg(no2)] mod m; - //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled #[cfg(all(not(a), b))] enum E {} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled @@ -130,7 +130,6 @@ trait Bar { /// Tests that `cfg` attributes behind `cfg_attr` is handled properly. #[test] fn inactive_via_cfg_attr() { - cov_mark::check!(cfg_attr_active); check( r#" #[cfg_attr(not(never), cfg(no))] fn f() {} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs index 8b708f229d009..9aa7aed16964d 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs @@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target( ctx: &DiagnosticsContext<'_>, d: &hir::InvalidDeriveTarget, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range(d.node); + let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); Diagnostic::new( DiagnosticCode::RustcHardError("E0774"), @@ -29,7 +29,7 @@ mod tests { //- minicore:derive mod __ { #[derive()] - //^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s + // ^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s fn main() {} } "#, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index 6a1ecae651501..a44b043f433c6 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -13,7 +13,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // This diagnostic is shown for proc macros that have been specifically disabled via `rust-analyzer.procMacro.ignored`. pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic { // Use more accurate position if available. - let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); + let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); Diagnostic::new( DiagnosticCode::Ra(d.kind, if d.error { Severity::Error } else { Severity::WeakWarning }), d.message.clone(), @@ -27,8 +27,10 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> // This diagnostic is shown for macro expansion errors. pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefError) -> Diagnostic { // Use more accurate position if available. - let display_range = - ctx.resolve_precise_location(&d.node.map(|it| it.syntax_node_ptr()), d.name); + let display_range = match d.name { + Some(name) => ctx.sema.diagnostics_display_range_for_range(d.node.with_value(name)), + None => ctx.sema.diagnostics_display_range(d.node.map(|it| it.syntax_node_ptr())), + }; Diagnostic::new( DiagnosticCode::Ra("macro-def-error", Severity::Error), d.message.clone(), @@ -135,10 +137,12 @@ macro_rules! env { () => {} } #[rustc_builtin_macro] macro_rules! concat { () => {} } - include!(concat!(env!("OUT_DIR"), "/out.rs")); - //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run - //^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run - //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run + include!(concat!( + // ^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run + env!( + //^^^ error: `OUT_DIR` not set, build scripts may have failed to run + "OUT_DIR"), "/out.rs")); + //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run "#, ); } @@ -182,7 +186,7 @@ fn main() { //^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` include!(concat!("does ", "not ", "exist")); - //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` + // ^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist` env!(invalid); //^^^^^^^ error: expected string literal @@ -289,7 +293,7 @@ include!("include-me.rs"); //- /include-me.rs /// long doc that pushes the diagnostic range beyond the first file's text length #[err] -//^^^^^^error: unresolved macro `err` + // ^^^ error: unresolved macro `err` mod prim_never {} "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs index 701b30b9b593d..7d0c71f4fa7c1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs @@ -7,7 +7,7 @@ pub(crate) fn malformed_derive( ctx: &DiagnosticsContext<'_>, d: &hir::MalformedDerive, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range(d.node); + let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); Diagnostic::new( DiagnosticCode::RustcHardError("E0777"), @@ -28,7 +28,7 @@ mod tests { //- minicore:derive mod __ { #[derive = "aaaa"] - //^^^^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]` + // ^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]` struct Foo; } "#, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index a87b8c42ac1d0..030c82ca0ba79 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -8,8 +8,7 @@ pub(crate) fn unresolved_macro_call( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMacroCall, ) -> Diagnostic { - // Use more accurate position if available. - let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location); + let display_range = ctx.sema.diagnostics_display_range_for_range(d.range); let bang = if d.is_bang { "!" } else { "" }; Diagnostic::new( DiagnosticCode::RustcHardError("unresolved-macro-call"), @@ -76,7 +75,7 @@ self::m!(); self::m2!(); r#" mod _test_inner { #![empty_attr] - //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr` + // ^^^^^^^^^^ error: unresolved macro `empty_attr` } "#, ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 1530e64652464..5c8f030de4def 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -102,7 +102,7 @@ use ide_db::{ use itertools::Itertools; use syntax::{ AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, T, TextRange, - ast::{self, AstNode, HasAttrs}, + ast::{self, AstNode}, }; // FIXME: Make this an enum @@ -277,31 +277,6 @@ struct DiagnosticsContext<'a> { is_nightly: bool, } -impl DiagnosticsContext<'_> { - fn resolve_precise_location( - &self, - node: &InFile, - precise_location: Option, - ) -> FileRange { - let sema = &self.sema; - (|| { - let precise_location = precise_location?; - let root = sema.parse_or_expand(node.file_id); - match root.covering_element(precise_location) { - syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)), - syntax::NodeOrToken::Token(it) => { - node.with_value(it).original_file_range_opt(sema.db) - } - } - })() - .map(|frange| ide_db::FileRange { - file_id: frange.file_id.file_id(self.sema.db), - range: frange.range, - }) - .unwrap_or_else(|| sema.diagnostics_display_range(*node)) - } -} - /// Request parser level diagnostics for the given [`FileId`]. pub fn syntax_diagnostics( db: &RootDatabase, @@ -317,7 +292,7 @@ pub fn syntax_diagnostics( let sema = Semantics::new(db); let editioned_file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); let (file_id, _) = editioned_file_id.unpack(db); @@ -348,7 +323,7 @@ pub fn semantic_diagnostics( let sema = Semantics::new(db); let editioned_file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); let (file_id, edition) = editioned_file_id.unpack(db); let mut res = Vec::new(); @@ -426,7 +401,7 @@ pub fn semantic_diagnostics( Diagnostic::new( DiagnosticCode::SyntaxError, format!("Syntax Error in Expansion: {err}"), - ctx.resolve_precise_location(&d.node.clone(), d.precise_location), + ctx.sema.diagnostics_display_range_for_range(d.range), ) })); continue; @@ -677,7 +652,7 @@ fn find_outline_mod_lint_severity( let lint_groups = lint_groups(&diag.code, edition); lint_attrs( sema, - ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"), + &ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"), edition, ) .for_each(|(lint, severity)| { @@ -698,7 +673,7 @@ fn lint_severity_at( .ancestors() .filter_map(ast::AnyHasAttrs::cast) .find_map(|ancestor| { - lint_attrs(sema, ancestor, edition) + lint_attrs(sema, &ancestor, edition) .find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity)) }) .or_else(|| { @@ -706,13 +681,13 @@ fn lint_severity_at( }) } +// FIXME: Switch this to analysis' `expand_cfg_attr`. fn lint_attrs<'a>( sema: &'a Semantics<'a, RootDatabase>, - ancestor: ast::AnyHasAttrs, + ancestor: &'a ast::AnyHasAttrs, edition: Edition, ) -> impl Iterator + 'a { - ancestor - .attrs_including_inner() + ast::attrs_including_inner(ancestor) .filter_map(|attr| { attr.as_simple_call().and_then(|(name, value)| match &*name { "allow" | "expect" => Some(Either::Left(iter::once((Severity::Allow, value)))), diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs index 181cc74a51d4f..de26879c2959d 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs @@ -17,7 +17,7 @@ pub fn ssr_from_comment( frange: FileRange, ) -> Option<(MatchFinder<'_>, TextRange)> { let comment = { - let file_id = EditionedFileId::current_edition(db, frange.file_id); + let file_id = EditionedFileId::current_edition_guess_origin(db, frange.file_id); let file = db.parse(file_id); file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast) diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index 43ad12c1f699a..7b2142a9f3489 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -125,9 +125,9 @@ impl<'db> MatchFinder<'db> { ) -> Result, SsrError> { restrict_ranges.retain(|range| !range.range.is_empty()); let sema = Semantics::new(db); - let file_id = sema - .attach_first_edition(lookup_context.file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id)); + let file_id = sema.attach_first_edition(lookup_context.file_id).unwrap_or_else(|| { + EditionedFileId::current_edition_guess_origin(db, lookup_context.file_id) + }); let resolution_scope = resolving::ResolutionScope::new( &sema, hir::FilePosition { file_id, offset: lookup_context.offset }, diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index 72f857ceda903..d23d22b4e8986 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -135,11 +135,9 @@ impl<'db> MatchFinder<'db> { // seems to get put into a single source root. let mut files = Vec::new(); self.search_files_do(|file_id| { - files.push( - self.sema - .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)), - ); + files.push(self.sema.attach_first_edition(file_id).unwrap_or_else(|| { + EditionedFileId::current_edition_guess_origin(self.sema.db, file_id) + })); }); SearchScope::files(&files) } diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index c197d559aa89a..0ed91cf7f5885 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -13,13 +13,13 @@ use stdx::format_to; use url::Url; use hir::{ - Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, sym, + Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, }; use ide_db::{ RootDatabase, base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb}, defs::{Definition, NameClass, NameRefClass}, - documentation::{DocsRangeMap, Documentation, HasDocs, docs_with_rangemap}, + documentation::{Documentation, HasDocs}, helpers::pick_best_token, }; use syntax::{ @@ -54,7 +54,7 @@ pub(crate) fn rewrite_links( db: &RootDatabase, markdown: &str, definition: Definition, - range_map: Option, + range_map: Option<&hir::Docs>, ) -> String { let mut cb = broken_link_clone_cb; let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb)) @@ -74,9 +74,9 @@ pub(crate) fn rewrite_links( TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()); let is_inner_doc = range_map .as_ref() - .and_then(|range_map| range_map.map(text_range)) - .map(|(_, attr_id)| attr_id.is_inner_attr()) - .unwrap_or(false); + .and_then(|range_map| range_map.find_ast_range(text_range)) + .map(|(_, is_inner)| is_inner) + .unwrap_or(hir::IsInnerDoc::No); if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type) { @@ -187,7 +187,7 @@ pub(crate) fn external_docs( /// Extracts all links from a given markdown text returning the definition text range, link-text /// and the namespace if known. pub(crate) fn extract_definitions_from_docs( - docs: &Documentation, + docs: &Documentation<'_>, ) -> Vec<(TextRange, String, Option)> { Parser::new_with_broken_link_callback( docs.as_str(), @@ -214,7 +214,7 @@ pub(crate) fn resolve_doc_path_for_def( def: Definition, link: &str, ns: Option, - is_inner_doc: bool, + is_inner_doc: hir::IsInnerDoc, ) -> Option { match def { Definition::Module(it) => it.resolve_doc_path(db, link, ns, is_inner_doc), @@ -324,11 +324,11 @@ impl DocCommentToken { let token_start = t.text_range().start(); let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len; let (attributes, def) = Self::doc_attributes(sema, &node, is_inner)?; - let (docs, doc_mapping) = docs_with_rangemap(sema.db, &attributes)?; + let doc_mapping = attributes.hir_docs(sema.db)?; let (in_expansion_range, link, ns, is_inner) = - extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| { - let (mapped, idx) = doc_mapping.map(range)?; - (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, idx.is_inner_attr())) + extract_definitions_from_docs(&Documentation::new_borrowed(doc_mapping.docs())).into_iter().find_map(|(range, link, ns)| { + let (mapped, is_inner) = doc_mapping.find_ast_range(range)?; + (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, is_inner)) })?; // get the relative range to the doc/attribute in the expansion let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start; @@ -416,7 +416,7 @@ fn rewrite_intra_doc_link( def: Definition, target: &str, title: &str, - is_inner_doc: bool, + is_inner_doc: hir::IsInnerDoc, link_type: LinkType, ) -> Option<(String, String)> { let (link, ns) = parse_intra_doc_link(target); @@ -659,14 +659,12 @@ fn filename_and_frag_for_def( Definition::Crate(_) => String::from("index.html"), Definition::Module(m) => match m.name(db) { // `#[doc(keyword = "...")]` is internal used only by rust compiler - Some(name) => { - match m.attrs(db).by_key(sym::doc).find_string_value_in_tt(sym::keyword) { - Some(kw) => { - format!("keyword.{kw}.html") - } - None => format!("{}/index.html", name.as_str()), + Some(name) => match m.doc_keyword(db) { + Some(kw) => { + format!("keyword.{kw}.html") } - } + None => format!("{}/index.html", name.as_str()), + }, None => String::from("index.html"), }, Definition::Trait(t) => { diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs index 3fd885535a234..34ffc11c4b5f3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs @@ -1,11 +1,11 @@ -use std::iter; +use std::{borrow::Cow, iter}; use expect_test::{Expect, expect}; use hir::Semantics; use ide_db::{ FilePosition, FileRange, RootDatabase, defs::Definition, - documentation::{DocsRangeMap, Documentation, HasDocs}, + documentation::{Documentation, HasDocs}, }; use itertools::Itertools; use syntax::{AstNode, SyntaxNode, ast, match_ast}; @@ -45,9 +45,9 @@ fn check_external_docs( fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let sema = &Semantics::new(&analysis.db); - let (cursor_def, docs, range) = def_under_cursor(sema, &position); + let (cursor_def, docs) = def_under_cursor(sema, &position); let res = - hir::attach_db(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range))); + hir::attach_db(sema.db, || rewrite_links(sema.db, docs.docs(), cursor_def, Some(&docs))); expect.assert_eq(&res) } @@ -57,33 +57,36 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (analysis, position, mut expected) = fixture::annotations(ra_fixture); expected.sort_by_key(key_fn); let sema = &Semantics::new(&analysis.db); - let (cursor_def, docs, range) = def_under_cursor(sema, &position); - let defs = extract_definitions_from_docs(&docs); - let actual: Vec<_> = defs - .into_iter() - .flat_map(|(text_range, link, ns)| { - let attr = range.map(text_range); - let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false); - let def = hir::attach_db(sema.db, || { - resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr) - .unwrap_or_else(|| panic!("Failed to resolve {link}")) - }); - def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link)) - }) - .map(|(nav_target, link)| { - let range = - FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() }; - (range, link) - }) - .sorted_by_key(key_fn) - .collect(); - assert_eq!(expected, actual); -} - -fn def_under_cursor( - sema: &Semantics<'_, RootDatabase>, + hir::attach_db(sema.db, || { + let (cursor_def, docs) = def_under_cursor(sema, &position); + let defs = extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs())); + let actual: Vec<_> = defs + .into_iter() + .flat_map(|(text_range, link, ns)| { + let attr = docs.find_ast_range(text_range); + let is_inner_attr = + attr.map(|(_file, is_inner)| is_inner).unwrap_or(hir::IsInnerDoc::No); + let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr) + .unwrap_or_else(|| panic!("Failed to resolve {link}")); + def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link)) + }) + .map(|(nav_target, link)| { + let range = FileRange { + file_id: nav_target.file_id, + range: nav_target.focus_or_full_range(), + }; + (range, link) + }) + .sorted_by_key(key_fn) + .collect(); + assert_eq!(expected, actual); + }); +} + +fn def_under_cursor<'db>( + sema: &Semantics<'db, RootDatabase>, position: &FilePosition, -) -> (Definition, Documentation, DocsRangeMap) { +) -> (Definition, Cow<'db, hir::Docs>) { let (docs, def) = sema .parse_guess_edition(position.file_id) .syntax() @@ -94,14 +97,14 @@ fn def_under_cursor( .find_map(|it| node_to_def(sema, &it)) .expect("no def found") .unwrap(); - let (docs, range) = docs.expect("no docs found for cursor def"); - (def, docs, range) + let docs = docs.expect("no docs found for cursor def"); + (def, docs) } -fn node_to_def( - sema: &Semantics<'_, RootDatabase>, +fn node_to_def<'db>( + sema: &Semantics<'db, RootDatabase>, node: &SyntaxNode, -) -> Option, Definition)>> { +) -> Option>, Definition)>> { Some(match_ast! { match node { ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Module(def))), diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs index fbf89042fae15..1a8591d25dcaf 100644 --- a/src/tools/rust-analyzer/crates/ide/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs @@ -7,10 +7,10 @@ use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange}; /// Creates analysis for a single file. pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); - (host.analysis(), change_fixture.files[0].file_id(&host.db)) + (host.analysis(), change_fixture.files[0].file_id()) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -18,23 +18,23 @@ pub(crate) fn position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FilePosition) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }) + (host.analysis(), FilePosition { file_id: file_id.file_id(), offset }) } /// Creates analysis for a single file, returns range marked with a pair of $0. pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let range = range_or_offset.expect_range(); - (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range }) + (host.analysis(), FileRange { file_id: file_id.file_id(), range }) } /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0. @@ -42,11 +42,11 @@ pub(crate) fn range_or_position( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FileId, RangeOrOffset) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); - (host.analysis(), file_id.file_id(&host.db), range_or_offset) + (host.analysis(), file_id.file_id(), range_or_offset) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. @@ -54,25 +54,24 @@ pub(crate) fn annotations( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, FilePosition, Vec<(FileRange, String)>) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - let db = &host.db; let annotations = change_fixture .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap(); + let file_text = host.analysis().file_text(file_id.file_id()).unwrap(); let annotations = extract_annotations(&file_text); annotations .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) + .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data)) }) .collect(); - (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations) + (host.analysis(), FilePosition { file_id: file_id.file_id(), offset }, annotations) } /// Creates analysis from a multi-file fixture with annotations without $0 @@ -80,20 +79,19 @@ pub(crate) fn annotations_without_marker( #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (Analysis, Vec<(FileRange, String)>) { let mut host = AnalysisHost::default(); - let change_fixture = ChangeFixture::parse(&host.db, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); host.db.enable_proc_attr_macros(); host.db.apply_change(change_fixture.change); - let db = &host.db; let annotations = change_fixture .files .iter() .flat_map(|&file_id| { - let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap(); + let file_text = host.analysis().file_text(file_id.file_id()).unwrap(); let annotations = extract_annotations(&file_text); annotations .into_iter() - .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data)) + .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data)) }) .collect(); (host.analysis(), annotations) diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs index 875403c4e32a4..cc333d66caf32 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs @@ -355,7 +355,7 @@ trait Bar {} fn test() { #[derive(Copy)] - //^^^^^^^^^^^^^^^ + // ^^^^^^^^^^^^ struct Foo$0; impl Foo {} diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 04ce5a7567f3c..f7870032ea281 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -62,7 +62,7 @@ pub(crate) fn highlight_related( let _p = tracing::info_span!("highlight_related").entered(); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id)); let syntax = sema.parse(file_id).syntax().clone(); let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index a1eff3aaee789..5bdfb57356583 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -1,5 +1,5 @@ //! Logic for rendering the different hover messages -use std::{env, mem, ops::Not}; +use std::{borrow::Cow, env, mem, ops::Not}; use either::Either; use hir::{ @@ -11,7 +11,7 @@ use hir::{ use ide_db::{ RootDatabase, defs::{Definition, find_std_module}, - documentation::{DocsRangeMap, HasDocs}, + documentation::{Documentation, HasDocs}, famous_defs::FamousDefs, generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES}, syntax_helpers::prettify_macro_expansion, @@ -278,9 +278,9 @@ pub(super) fn keyword( keyword_hints(sema, token, parent, edition, display_target); let doc_owner = find_std_module(&famous_defs, &keyword_mod, edition)?; - let (docs, range_map) = doc_owner.docs_with_rangemap(sema.db)?; + let docs = doc_owner.docs_with_rangemap(sema.db)?; let (markup, range_map) = - markup(Some(docs.into()), Some(range_map), description, None, None, String::new()); + markup(Some(Either::Left(docs)), description, None, None, String::new()); let markup = process_markup(sema.db, Definition::Module(doc_owner), &markup, range_map, config); Some(HoverResult { markup, actions }) } @@ -370,12 +370,12 @@ pub(super) fn process_markup( db: &RootDatabase, def: Definition, markup: &Markup, - markup_range_map: Option, + markup_range_map: Option, config: &HoverConfig<'_>, ) -> Markup { let markup = markup.as_str(); let markup = if config.links_in_hover { - rewrite_links(db, markup, def, markup_range_map) + rewrite_links(db, markup, def, markup_range_map.as_ref()) } else { remove_links(markup) }; @@ -484,7 +484,7 @@ pub(super) fn definition( config: &HoverConfig<'_>, edition: Edition, display_target: DisplayTarget, -) -> (Markup, Option) { +) -> (Markup, Option) { let mod_path = definition_path(db, &def, edition); let label = match def { Definition::Trait(trait_) => trait_ @@ -520,12 +520,7 @@ pub(super) fn definition( } _ => def.label(db, display_target), }; - let (docs, range_map) = - if let Some((docs, doc_range)) = def.docs_with_rangemap(db, famous_defs, display_target) { - (Some(docs), doc_range) - } else { - (None, None) - }; + let docs = def.docs_with_rangemap(db, famous_defs, display_target); let value = || match def { Definition::Variant(it) => { if !it.parent_enum(db).is_data_carrying(db) { @@ -842,14 +837,7 @@ pub(super) fn definition( } }; - markup( - docs.map(Into::into), - range_map, - desc, - extra.is_empty().not().then_some(extra), - mod_path, - subst_types, - ) + markup(docs, desc, extra.is_empty().not().then_some(extra), mod_path, subst_types) } #[derive(Debug)] @@ -1124,13 +1112,12 @@ fn definition_path(db: &RootDatabase, &def: &Definition, edition: Edition) -> Op } fn markup( - docs: Option, - range_map: Option, + docs: Option, Documentation<'_>>>, rust: String, extra: Option, mod_path: Option, subst_types: String, -) -> (Markup, Option) { +) -> (Markup, Option) { let mut buf = String::new(); if let Some(mod_path) = mod_path @@ -1151,10 +1138,21 @@ fn markup( if let Some(doc) = docs { format_to!(buf, "\n___\n\n"); let offset = TextSize::new(buf.len() as u32); - let buf_range_map = range_map.map(|range_map| range_map.shift_docstring_line_range(offset)); - format_to!(buf, "{}", doc); + let docs_str = match &doc { + Either::Left(docs) => docs.docs(), + Either::Right(docs) => docs.as_str(), + }; + format_to!(buf, "{}", docs_str); + let range_map = match doc { + Either::Left(range_map) => { + let mut range_map = range_map.into_owned(); + range_map.shift_by(offset); + Some(range_map) + } + Either::Right(_) => None, + }; - (buf.into(), buf_range_map) + (buf.into(), range_map) } else { (buf.into(), None) } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 21550d5e66658..d474e50d3c2c9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -90,7 +90,7 @@ pub(crate) fn inlay_hints( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); @@ -143,7 +143,7 @@ pub(crate) fn inlay_hints_resolve( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 857252832ffe1..a633877adb4e1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -331,7 +331,8 @@ impl Analysis { pub fn parse(&self, file_id: FileId) -> Cancellable { // FIXME edition self.with_db(|db| { - let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); + let editioned_file_id_wrapper = + EditionedFileId::current_edition_guess_origin(&self.db, file_id); db.parse(editioned_file_id_wrapper).tree() }) @@ -360,7 +361,7 @@ impl Analysis { /// supported). pub fn matching_brace(&self, position: FilePosition) -> Cancellable> { self.with_db(|db| { - let file_id = EditionedFileId::current_edition(&self.db, position.file_id); + let file_id = EditionedFileId::current_edition_guess_origin(&self.db, position.file_id); let parse = db.parse(file_id); let file = parse.tree(); matching_brace::matching_brace(&file, position.offset) @@ -421,7 +422,7 @@ impl Analysis { pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable { self.with_db(|db| { let editioned_file_id_wrapper = - EditionedFileId::current_edition(&self.db, frange.file_id); + EditionedFileId::current_edition_guess_origin(&self.db, frange.file_id); let parse = db.parse(editioned_file_id_wrapper); join_lines::join_lines(config, &parse.tree(), frange.range) }) @@ -462,7 +463,8 @@ impl Analysis { ) -> Cancellable> { // FIXME: Edition self.with_db(|db| { - let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); + let editioned_file_id_wrapper = + EditionedFileId::current_edition_guess_origin(&self.db, file_id); let source_file = db.parse(editioned_file_id_wrapper).tree(); file_structure::file_structure(&source_file, config) }) @@ -493,7 +495,8 @@ impl Analysis { /// Returns the set of folding ranges. pub fn folding_ranges(&self, file_id: FileId) -> Cancellable> { self.with_db(|db| { - let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id); + let editioned_file_id_wrapper = + EditionedFileId::current_edition_guess_origin(&self.db, file_id); folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree()) }) diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index b222ff3eec0be..8e73ddf8bfc3f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -54,7 +54,8 @@ pub struct NavigationTarget { // FIXME: Symbol pub container_name: Option, pub description: Option, - pub docs: Option, + // FIXME: Use the database lifetime here. + pub docs: Option>, /// In addition to a `name` field, a `NavigationTarget` may also be aliased /// In such cases we want a `NavigationTarget` to be accessible by its alias // FIXME: Symbol @@ -163,7 +164,7 @@ impl NavigationTarget { full_range, SymbolKind::Module, ); - res.docs = module.docs(db); + res.docs = module.docs(db).map(Documentation::into_owned); res.description = Some( module.display(db, module.krate().to_display_target(db)).to_string(), ); @@ -437,7 +438,7 @@ where D::KIND, ) .map(|mut res| { - res.docs = self.docs(db); + res.docs = self.docs(db).map(Documentation::into_owned); res.description = hir::attach_db(db, || { Some(self.display(db, self.krate(db).to_display_target(db)).to_string()) }); @@ -536,7 +537,7 @@ impl TryToNav for hir::ExternCrateDecl { SymbolKind::Module, ); - res.docs = self.docs(db); + res.docs = self.docs(db).map(Documentation::into_owned); res.description = Some(self.display(db, krate.to_display_target(db)).to_string()); res.container_name = container_name(db, *self, edition); res @@ -558,10 +559,9 @@ impl TryToNav for hir::Field { FieldSource::Named(it) => { NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map( |mut res| { - res.docs = self.docs(db); - res.description = hir::attach_db(db, || { - Some(self.display(db, krate.to_display_target(db)).to_string()) - }); + res.docs = self.docs(db).map(Documentation::into_owned); + res.description = + Some(self.display(db, krate.to_display_target(db)).to_string()); res }, ) @@ -600,7 +600,7 @@ impl TryToNav for hir::Macro { self.kind(db).into(), ) .map(|mut res| { - res.docs = self.docs(db); + res.docs = self.docs(db).map(Documentation::into_owned); res }), ) @@ -939,7 +939,7 @@ pub(crate) fn orig_range_with_focus_r( ) -> UpmappingResult<(FileRange, Option)> { let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) }; - let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind; + let call = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()); let def_range = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db); @@ -965,7 +965,8 @@ pub(crate) fn orig_range_with_focus_r( // name lies outside the node, so instead point to the macro call which // *should* contain the name _ => { - let kind = call_kind(); + let call = call(); + let kind = call.kind; let range = kind.clone().original_call_range_with_input(db); //If the focus range is in the attribute/derive body, we // need to point the call site to the entire body, if not, fall back @@ -977,7 +978,7 @@ pub(crate) fn orig_range_with_focus_r( { range } else { - kind.original_call_range(db) + kind.original_call_range(db, call.krate) } } }, @@ -1006,11 +1007,14 @@ pub(crate) fn orig_range_with_focus_r( }, ), // node is in macro def, just show the focus - _ => ( - // show the macro call - (call_kind().original_call_range(db), None), - Some((focus_range, Some(focus_range))), - ), + _ => { + let call = call(); + ( + // show the macro call + (call.kind.original_call_range(db, call.krate), None), + Some((focus_range, Some(focus_range))), + ) + } } } // lost name? can't happen for single tokens diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index a53a192997274..c4dcd588d6934 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -1124,7 +1124,10 @@ pub(super) struct Foo$0 { check_with_scope( code, Some(&mut |db| { - SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2))) + SearchScope::single_file(EditionedFileId::current_edition_guess_origin( + db, + FileId::from_raw(2), + )) }), expect![[r#" quux Function FileId(0) 19..35 26..30 diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 494701d97def1..4b475dac87b59 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -3,17 +3,13 @@ use std::{fmt, sync::OnceLock}; use arrayvec::ArrayVec; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; -use hir::{ - AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, - sym, -}; +use hir::{AsAssocItem, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, sym}; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::impl_empty_upmap_from_ra_fixture; use ide_db::{ FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind, base_db::RootQueryDb, defs::Definition, - documentation::docs_from_attrs, helpers::visit_file_defs, search::{FileReferenceNode, SearchScope}, }; @@ -323,7 +319,7 @@ pub(crate) fn runnable_fn( def: hir::Function, ) -> Option { let edition = def.krate(sema.db).edition(sema.db); - let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db)); + let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db).cfgs(sema.db)); let kind = if !under_cfg_test && def.is_main(sema.db) { RunnableKind::Bin } else { @@ -358,7 +354,7 @@ pub(crate) fn runnable_fn( let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db); let update_test = UpdateTest::find_snapshot_macro(sema, file_range); - let cfg = def.attrs(sema.db).cfg(); + let cfg = def.attrs(sema.db).cfgs(sema.db).cloned(); Some(Runnable { use_name_in_title: false, nav, kind, cfg, update_test }) } @@ -366,8 +362,8 @@ pub(crate) fn runnable_mod( sema: &Semantics<'_, RootDatabase>, def: hir::Module, ) -> Option { - if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db))) - { + let cfg = def.attrs(sema.db).cfgs(sema.db); + if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) { return None; } let path = def @@ -381,8 +377,7 @@ pub(crate) fn runnable_mod( }) .join("::"); - let attrs = def.attrs(sema.db); - let cfg = attrs.cfg(); + let cfg = cfg.cloned(); let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site(); let module_source = sema.module_definition_node(def); @@ -409,10 +404,10 @@ pub(crate) fn runnable_impl( let display_target = def.module(sema.db).krate().to_display_target(sema.db); let edition = display_target.edition; let attrs = def.attrs(sema.db); - if !has_runnable_doc_test(&attrs) { + if !has_runnable_doc_test(sema.db, &attrs) { return None; } - let cfg = attrs.cfg(); + let cfg = attrs.cfgs(sema.db).cloned(); let nav = def.try_to_nav(sema)?.call_site(); let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); @@ -442,8 +437,16 @@ pub(crate) fn runnable_impl( }) } -fn has_cfg_test(attrs: AttrsWithOwner) -> bool { - attrs.cfgs().any(|cfg| matches!(&cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if *s == sym::test)) +fn has_cfg_test(cfg: Option<&CfgExpr>) -> bool { + return cfg.is_some_and(has_cfg_test_impl); + + fn has_cfg_test_impl(cfg: &CfgExpr) -> bool { + match cfg { + CfgExpr::Atom(CfgAtom::Flag(s)) => *s == sym::test, + CfgExpr::Any(cfgs) | CfgExpr::All(cfgs) => cfgs.iter().any(has_cfg_test_impl), + _ => false, + } + } } /// Creates a test mod runnable for outline modules at the top of their definition. @@ -453,8 +456,8 @@ fn runnable_mod_outline_definition( ) -> Option { def.as_source_file_id(sema.db)?; - if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db))) - { + let cfg = def.attrs(sema.db).cfgs(sema.db); + if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) { return None; } let path = def @@ -468,8 +471,7 @@ fn runnable_mod_outline_definition( }) .join("::"); - let attrs = def.attrs(sema.db); - let cfg = attrs.cfg(); + let cfg = cfg.cloned(); let mod_source = sema.module_definition_node(def); let mod_syntax = mod_source.file_syntax(sema.db); @@ -508,7 +510,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op let display_target = krate .unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into()) .to_display_target(db); - if !has_runnable_doc_test(&attrs) { + if !has_runnable_doc_test(db, &attrs) { return None; } let def_name = def.name(db)?; @@ -554,7 +556,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, - cfg: attrs.cfg(), + cfg: attrs.cfgs(db).cloned(), update_test: UpdateTest::default(), }; Some(res) @@ -571,15 +573,15 @@ impl TestAttr { } } -fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool { +fn has_runnable_doc_test(db: &RootDatabase, attrs: &hir::AttrsWithOwner) -> bool { const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; - docs_from_attrs(attrs).is_some_and(|doc| { + attrs.hir_docs(db).is_some_and(|doc| { let mut in_code_block = false; - for line in doc.lines() { + for line in doc.docs().lines() { if let Some(header) = RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence)) { diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index 5f7e12cf53f84..a8fc57a431b4f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -31,7 +31,7 @@ use crate::RootDatabase; /// edited. #[derive(Debug)] pub struct SignatureHelp { - pub doc: Option, + pub doc: Option>, pub signature: String, pub active_parameter: Option, parameters: Vec, @@ -174,7 +174,7 @@ fn signature_help_for_call( let mut fn_params = None; match callable.kind() { hir::CallableKind::Function(func) => { - res.doc = func.docs(db); + res.doc = func.docs(db).map(Documentation::into_owned); format_to!(res.signature, "fn {}", func.name(db).display(db, edition)); let generic_params = GenericDef::Function(func) @@ -196,7 +196,7 @@ fn signature_help_for_call( }); } hir::CallableKind::TupleStruct(strukt) => { - res.doc = strukt.docs(db); + res.doc = strukt.docs(db).map(Documentation::into_owned); format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition)); let generic_params = GenericDef::Adt(strukt.into()) @@ -209,7 +209,7 @@ fn signature_help_for_call( } } hir::CallableKind::TupleEnumVariant(variant) => { - res.doc = variant.docs(db); + res.doc = variant.docs(db).map(Documentation::into_owned); format_to!( res.signature, "enum {}", @@ -314,33 +314,33 @@ fn signature_help_for_generics( let db = sema.db; match generics_def { hir::GenericDef::Function(it) => { - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "fn {}", it.name(db).display(db, edition)); } hir::GenericDef::Adt(hir::Adt::Enum(it)) => { - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "enum {}", it.name(db).display(db, edition)); if let Some(variant) = variant { // In paths, generics of an enum can be specified *after* one of its variants. // eg. `None::` // We'll use the signature of the enum, but include the docs of the variant. - res.doc = variant.docs(db); + res.doc = variant.docs(db).map(Documentation::into_owned); } } hir::GenericDef::Adt(hir::Adt::Struct(it)) => { - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "struct {}", it.name(db).display(db, edition)); } hir::GenericDef::Adt(hir::Adt::Union(it)) => { - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "union {}", it.name(db).display(db, edition)); } hir::GenericDef::Trait(it) => { - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "trait {}", it.name(db).display(db, edition)); } hir::GenericDef::TypeAlias(it) => { - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "type {}", it.name(db).display(db, edition)); } // These don't have generic args that can be specified @@ -495,7 +495,7 @@ fn signature_help_for_tuple_struct_pat( let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res { let en = variant.parent_enum(db); - res.doc = en.docs(db); + res.doc = en.docs(db).map(Documentation::into_owned); format_to!( res.signature, "enum {}::{} (", @@ -512,7 +512,7 @@ fn signature_help_for_tuple_struct_pat( match adt { hir::Adt::Struct(it) => { - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "struct {} (", it.name(db).display(db, edition)); it.fields(db) } @@ -622,7 +622,7 @@ fn signature_help_for_record_<'db>( fields = variant.fields(db); let en = variant.parent_enum(db); - res.doc = en.docs(db); + res.doc = en.docs(db).map(Documentation::into_owned); format_to!( res.signature, "enum {}::{} {{ ", @@ -639,12 +639,12 @@ fn signature_help_for_record_<'db>( match adt { hir::Adt::Struct(it) => { fields = it.fields(db); - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "struct {} {{ ", it.name(db).display(db, edition)); } hir::Adt::Union(it) => { fields = it.fields(db); - res.doc = it.docs(db); + res.doc = it.docs(db).map(Documentation::into_owned); format_to!(res.signature, "union {} {{ ", it.name(db).display(db, edition)); } _ => return None, @@ -740,12 +740,12 @@ mod tests { #[rust_analyzer::rust_fixture] ra_fixture: &str, ) -> (RootDatabase, FilePosition) { let mut database = RootDatabase::default(); - let change_fixture = ChangeFixture::parse(&database, ra_fixture); + let change_fixture = ChangeFixture::parse(ra_fixture); database.apply_change(change_fixture.change); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let offset = range_or_offset.expect_offset(); - let position = FilePosition { file_id: file_id.file_id(&database), offset }; + let position = FilePosition { file_id: file_id.file_id(), offset }; (database, position) } diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index e261928c413f4..ec8292968dbf7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -42,7 +42,8 @@ pub struct ReferenceData { #[derive(Debug)] pub struct TokenStaticData { - pub documentation: Option, + // FIXME: Make this have the lifetime of the database. + pub documentation: Option>, pub hover: Option, pub definition: Option, pub references: Vec, @@ -109,7 +110,7 @@ fn documentation_for_definition( sema: &Semantics<'_, RootDatabase>, def: Definition, scope_node: &SyntaxNode, -) -> Option { +) -> Option> { let famous_defs = match &def { Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), _ => None, @@ -124,6 +125,7 @@ fn documentation_for_definition( }) .to_display_target(sema.db), ) + .map(Documentation::into_owned) } // FIXME: This is a weird function diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 66895cb0b053c..782a73d20ca3a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -199,7 +199,7 @@ pub(crate) fn highlight( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); // Determine the root based on the given range. let (root, range_to_highlight) = { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs index 75e46b8ebfdef..597550b482cd2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs @@ -20,7 +20,7 @@ pub(crate) fn highlight_as_html_with_config( let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) - .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id)); + .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id)); let file = sema.parse(file_id); let file = file.syntax(); fn rainbowify(seed: u64) -> String { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs index 7955f5ac0de99..26d2bb5e02884 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs @@ -1,16 +1,13 @@ //! "Recursive" Syntax highlighting for code in doctests and fixtures. -use std::mem; - -use either::Either; -use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym}; -use ide_db::range_mapper::RangeMapper; +use hir::{EditionedFileId, HirFileId, InFile, Semantics}; use ide_db::{ - SymbolKind, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence, + SymbolKind, defs::Definition, documentation::Documentation, range_mapper::RangeMapper, + rust_doc::is_rust_fence, }; use syntax::{ - AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize, - ast::{self, AstNode, IsString, QuoteOffsets}, + SyntaxNode, TextRange, TextSize, + ast::{self, IsString}, }; use crate::{ @@ -96,118 +93,79 @@ pub(super) fn doc_comment( None => return, }; let src_file_id: HirFileId = src_file_id.into(); + let Some(docs) = attributes.hir_docs(sema.db) else { return }; // Extract intra-doc links and emit highlights for them. - if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) { - extract_definitions_from_docs(&docs) - .into_iter() - .filter_map(|(range, link, ns)| { - doc_mapping - .map(range) - .filter(|(mapping, _)| mapping.file_id == src_file_id) - .and_then(|(InFile { value: mapped_range, .. }, attr_id)| { - Some(mapped_range).zip(resolve_doc_path_for_def( - sema.db, - def, - &link, - ns, - attr_id.is_inner_attr(), - )) - }) - }) - .for_each(|(range, def)| { - hl.add(HlRange { - range, - highlight: module_def_to_hl_tag(def) - | HlMod::Documentation - | HlMod::Injected - | HlMod::IntraDocLink, - binding_hash: None, + extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs())) + .into_iter() + .filter_map(|(range, link, ns)| { + docs.find_ast_range(range) + .filter(|(mapping, _)| mapping.file_id == src_file_id) + .and_then(|(InFile { value: mapped_range, .. }, is_inner)| { + Some(mapped_range) + .zip(resolve_doc_path_for_def(sema.db, def, &link, ns, is_inner)) }) + }) + .for_each(|(range, def)| { + hl.add(HlRange { + range, + highlight: module_def_to_hl_tag(def) + | HlMod::Documentation + | HlMod::Injected + | HlMod::IntraDocLink, + binding_hash: None, }) - } + }); // Extract doc-test sources from the docs and calculate highlighting for them. let mut inj = RangeMapper::default(); inj.add_unmapped("fn doctest() {\n"); - let attrs_source_map = attributes.source_map(sema.db); - let mut is_codeblock = false; let mut is_doctest = false; - let mut new_comments = Vec::new(); - let mut string; + let mut has_doctests = false; + + let mut docs_offset = TextSize::new(0); + for mut line in docs.docs().split('\n') { + let mut line_docs_offset = docs_offset; + docs_offset += TextSize::of(line) + TextSize::of("\n"); + + match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) { + Some(idx) => { + is_codeblock = !is_codeblock; + // Check whether code is rust by inspecting fence guards + let guards = &line[idx + RUSTDOC_FENCE_LENGTH..]; + let is_rust = is_rust_fence(guards); + is_doctest = is_codeblock && is_rust; + continue; + } + None if !is_doctest => continue, + None => (), + } + + // lines marked with `#` should be ignored in output, we skip the `#` char + if line.starts_with('#') { + line_docs_offset += TextSize::of("#"); + line = &line["#".len()..]; + } - for attr in attributes.by_key(sym::doc).attrs() { - let InFile { file_id, value: src } = attrs_source_map.source_of(attr); + let Some((InFile { file_id, value: mapped_range }, _)) = + docs.find_ast_range(TextRange::at(line_docs_offset, TextSize::of(line))) + else { + continue; + }; if file_id != src_file_id { continue; } - let (line, range) = match &src { - Either::Left(it) => { - string = match find_doc_string_in_attr(attr, it) { - Some(it) => it, - None => continue, - }; - let text = string.text(); - let text_range = string.syntax().text_range(); - match string.quote_offsets() { - Some(QuoteOffsets { contents, .. }) => { - (&text[contents - text_range.start()], contents) - } - None => (text, text_range), - } - } - Either::Right(comment) => { - let value = comment.prefix().len(); - let range = comment.syntax().text_range(); - ( - &comment.text()[value..], - TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()), - ) - } - }; - - let mut range_start = range.start(); - for line in line.split('\n') { - let line_len = TextSize::from(line.len() as u32); - let prev_range_start = { - let next_range_start = range_start + line_len + TextSize::from(1); - mem::replace(&mut range_start, next_range_start) - }; - let mut pos = TextSize::from(0); - - match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) { - Some(idx) => { - is_codeblock = !is_codeblock; - // Check whether code is rust by inspecting fence guards - let guards = &line[idx + RUSTDOC_FENCE_LENGTH..]; - let is_rust = is_rust_fence(guards); - is_doctest = is_codeblock && is_rust; - continue; - } - None if !is_doctest => continue, - None => (), - } - // whitespace after comment is ignored - if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) { - pos += TextSize::of(ws); - } - // lines marked with `#` should be ignored in output, we skip the `#` char - if line[pos.into()..].starts_with('#') { - pos += TextSize::of('#'); - } - - new_comments.push(TextRange::at(prev_range_start, pos)); - inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start); - inj.add_unmapped("\n"); - } + has_doctests = true; + inj.add(line, mapped_range); + inj.add_unmapped("\n"); } - if new_comments.is_empty() { + if !has_doctests { return; // no need to run an analysis on an empty file } @@ -240,37 +198,6 @@ pub(super) fn doc_comment( } } } - - for range in new_comments { - hl.add(HlRange { - range, - highlight: HlTag::Comment | HlMod::Documentation, - binding_hash: None, - }); - } -} - -fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option { - match it.expr() { - // #[doc = lit] - Some(ast::Expr::Literal(lit)) => match lit.kind() { - ast::LiteralKind::String(it) => Some(it), - _ => None, - }, - // #[cfg_attr(..., doc = "", ...)] - None => { - // We gotta hunt the string token manually here - let text = attr.string_value()?.as_str(); - // FIXME: We just pick the first string literal that has the same text as the doc attribute - // This means technically we might highlight the wrong one - it.syntax() - .descendants_with_tokens() - .filter_map(NodeOrToken::into_token) - .filter_map(ast::String::cast) - .find(|string| string.text().get(1..string.text().len() - 1) == Some(text)) - } - _ => None, - } } fn module_def_to_hl_tag(def: Definition) -> HlTag { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index d00f279c82995..53750ae0bac07 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html @@ -42,21 +42,21 @@
//! This is a module to test doc injection.
 //! ```
-//! fn test() {}
+//! fn test() {}
 //! ```
 
 //! Syntactic name ref highlighting testing
 //! ```rust
-//! extern crate self;
-//! extern crate other as otter;
-//! extern crate core;
-//! trait T { type Assoc; }
-//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
+//! extern crate self;
+//! extern crate other as otter;
+//! extern crate core;
+//! trait T { type Assoc; }
+//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
 //! ```
 mod outline_module;
 
 /// ```
-/// let _ = "early doctests should not go boom";
+/// let _ = "early doctests should not go boom";
 /// ```
 struct Foo {
     bar: bool,
@@ -65,15 +65,15 @@
 /// This is an impl of [`Foo`] with a code block.
 ///
 /// ```
-/// fn foo() {
+/// fn foo() {
 ///
-/// }
+/// }
 /// ```
 impl Foo {
     /// ```
-    /// let _ = "Call me
+    /// let _ = "Call me
     //    KILLER WHALE
-    ///     Ishmael.";
+    ///     Ishmael.";
     /// ```
     pub const bar: bool = true;
 
@@ -82,8 +82,8 @@
     /// # Examples
     ///
     /// ```
-    /// # #![allow(unused_mut)]
-    /// let mut foo: Foo = Foo::new();
+    /// # #![allow(unused_mut)]
+    /// let mut foo: Foo = Foo::new();
     /// ```
     pub const fn new() -> Foo {
         Foo { bar: true }
@@ -94,38 +94,38 @@
     /// # Examples
     ///
     /// ```
-    /// use x::y;
+    /// use x::y;
     ///
-    /// let foo = Foo::new();
+    /// let foo = Foo::new();
     ///
-    /// // calls bar on foo
-    /// assert!(foo.bar());
+    /// // calls bar on foo
+    /// assert!(foo.bar());
     ///
-    /// let bar = foo.bar || Foo::bar;
+    /// let bar = foo.bar || Foo::bar;
     ///
-    /// /* multi-line
-    ///        comment */
+    /// /* multi-line
+    ///        comment */
     ///
-    /// let multi_line_string = "Foo
-    ///   bar\n
-    ///          ";
+    /// let multi_line_string = "Foo
+    ///   bar\n
+    ///          ";
     ///
     /// ```
     ///
     /// ```rust,no_run
-    /// let foobar = Foo::new().bar();
+    /// let foobar = Foo::new().bar();
     /// ```
     ///
     /// ~~~rust,no_run
-    /// // code block with tilde.
-    /// let foobar = Foo::new().bar();
+    /// // code block with tilde.
+    /// let foobar = Foo::new().bar();
     /// ~~~
     ///
     /// ```
-    /// // functions
-    /// fn foo<T, const X: usize>(arg: i32) {
-    ///     let x: T = X;
-    /// }
+    /// // functions
+    /// fn foo<T, const X: usize>(arg: i32) {
+    ///     let x: T = X;
+    /// }
     /// ```
     ///
     /// ```sh
@@ -150,8 +150,8 @@
 }
 
 /// ```
-/// macro_rules! noop { ($expr:expr) => { $expr }}
-/// noop!(1);
+/// macro_rules! noop { ($expr:expr) => { $expr }}
+/// noop!(1);
 /// ```
 macro_rules! noop {
     ($expr:expr) => {
@@ -160,18 +160,18 @@
 }
 
 /// ```rust
-/// let _ = example(&[1, 2, 3]);
+/// let _ = example(&[1, 2, 3]);
 /// ```
 ///
 /// ```
-/// loop {}
+/// loop {}
 #[cfg_attr(not(feature = "false"), doc = "loop {}")]
 #[doc = "loop {}"]
 /// ```
 ///
 #[cfg_attr(feature = "alloc", doc = "```rust")]
 #[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
-/// let _ = example(&alloc::vec![1, 2, 3]);
+/// let _ = example(&alloc::vec![1, 2, 3]);
 /// ```
 pub fn mix_and_match() {}
 
@@ -187,7 +187,7 @@
 /**
     Really, I don't get it
     ```rust
-    let _ = example(&[1, 2, 3]);
+    let _ = example(&[1, 2, 3]);
     ```
     [`block_comments`] tests these without indentation
 */
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index ed55ac5bf04b0..0381865fed457 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -75,7 +75,10 @@ pub(crate) fn on_char_typed(
     // FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
     // causing the editor to feel sluggish!
     let edition = Edition::CURRENT_FIXME;
-    let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
+    let editioned_file_id_wrapper = EditionedFileId::from_span_guess_origin(
+        db,
+        span::EditionedFileId::new(position.file_id, edition),
+    );
     let file = &db.parse(editioned_file_id_wrapper);
     let char_matches_position =
         file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
index fdc583a15cc71..76a2802d294c0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
@@ -51,7 +51,7 @@ use ide_db::text_edit::TextEdit;
 // ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
 pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option {
     let editioned_file_id_wrapper =
-        ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
+        ide_db::base_db::EditionedFileId::current_edition_guess_origin(db, position.file_id);
     let parse = db.parse(editioned_file_id_wrapper);
     let file = parse.tree();
     let token = file.syntax().token_at_offset(position.offset).left_biased()?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
index 2cd751463bdb8..c9a2f31696f45 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
@@ -12,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
     db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index 717bd230a21e9..824cc2ff94e36 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -145,7 +145,9 @@ impl flags::AnalysisStats {
                     if !source_root.is_library || self.with_deps {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
+                            .file_item_tree(
+                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
+                            )
                             .item_tree_stats()
                             .into();
 
@@ -155,7 +157,9 @@ impl flags::AnalysisStats {
                     } else {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
+                            .file_item_tree(
+                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
+                            )
                             .item_tree_stats()
                             .into();
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index 37f83f6dee678..92bb2c1ce4fa1 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -514,12 +514,12 @@ mod test {
 
     fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
+        let change_fixture = ChangeFixture::parse(ra_fixture);
         host.raw_database_mut().apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ()");
         let offset = range_or_offset.expect_offset();
-        let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
+        let position = FilePosition { file_id: file_id.file_id(), offset };
         (host, position)
     }
 
@@ -870,7 +870,7 @@ pub mod example_mod {
         let s = "/// foo\nfn bar() {}";
 
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(host.raw_database(), s);
+        let change_fixture = ChangeFixture::parse(s);
         host.raw_database_mut().apply_change(change_fixture.change);
 
         let analysis = host.analysis();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index e3e3a143de03a..cc2ab0f07ca0a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -73,7 +73,7 @@ impl flags::Search {
                 let sr = db.source_root(root).source_root(db);
                 for file_id in sr.iter() {
                     for debug_info in match_finder.debug_where_text_equal(
-                        EditionedFileId::current_edition(db, file_id),
+                        EditionedFileId::current_edition_guess_origin(db, file_id),
                         debug_snippet,
                     ) {
                         println!("{debug_info:#?}");
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
index 0362e13b88b7b..2cb0fe9eefadf 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -141,7 +141,7 @@ fn all_unresolved_references(
 ) -> Vec {
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id));
     let file = sema.parse(file_id);
     let root = file.syntax();
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index 04b20033062eb..5a42cbd933f99 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -119,7 +119,7 @@ pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSe
     }
 }
 
-pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
+pub(crate) fn documentation(documentation: Documentation<'_>) -> lsp_types::Documentation {
     let value = format_docs(&documentation);
     let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
     lsp_types::Documentation::MarkupContent(markup_content)
@@ -1970,7 +1970,7 @@ pub(crate) fn markup_content(
         ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
         ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
     };
-    let value = format_docs(&Documentation::new(markup.into()));
+    let value = format_docs(&Documentation::new_owned(markup.into()));
     lsp_types::MarkupContent { kind, value }
 }
 
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
index 4e525be3fe3c8..2d1955d1f651e 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
@@ -1,6 +1,6 @@
 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
 
-use std::{fmt, hash::Hash};
+use std::{collections::VecDeque, fmt, hash::Hash};
 
 use intern::Symbol;
 use rustc_hash::{FxHashMap, FxHashSet};
@@ -102,26 +102,34 @@ where
     SpanData: Copy + fmt::Debug,
     SpanMap: SpanMapper>,
 {
-    let mut c = Converter::new(node, map, Default::default(), Default::default(), span, mode);
+    let mut c =
+        Converter::new(node, map, Default::default(), Default::default(), span, mode, |_, _| {
+            (true, Vec::new())
+        });
     convert_tokens(&mut c)
 }
 
 /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
 /// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
 /// be injected or hidden from the output.
-pub fn syntax_node_to_token_tree_modified(
+pub fn syntax_node_to_token_tree_modified(
     node: &SyntaxNode,
     map: SpanMap,
     append: FxHashMap>>>,
     remove: FxHashSet,
     call_site: SpanData,
     mode: DocCommentDesugarMode,
+    on_enter: OnEvent,
 ) -> tt::TopSubtree>
 where
     SpanMap: SpanMapper>,
     SpanData: Copy + fmt::Debug,
+    OnEvent: FnMut(
+        &mut PreorderWithTokens,
+        &WalkEvent,
+    ) -> (bool, Vec>>),
 {
-    let mut c = Converter::new(node, map, append, remove, call_site, mode);
+    let mut c = Converter::new(node, map, append, remove, call_site, mode, on_enter);
     convert_tokens(&mut c)
 }
 
@@ -624,9 +632,9 @@ where
     }
 }
 
-struct Converter {
+struct Converter {
     current: Option,
-    current_leaves: Vec>,
+    current_leaves: VecDeque>,
     preorder: PreorderWithTokens,
     range: TextRange,
     punct_offset: Option<(SyntaxToken, TextSize)>,
@@ -636,9 +644,13 @@ struct Converter {
     remove: FxHashSet,
     call_site: S,
     mode: DocCommentDesugarMode,
+    on_event: OnEvent,
 }
 
-impl Converter {
+impl Converter
+where
+    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec>),
+{
     fn new(
         node: &SyntaxNode,
         map: SpanMap,
@@ -646,8 +658,9 @@ impl Converter {
         remove: FxHashSet,
         call_site: S,
         mode: DocCommentDesugarMode,
+        on_enter: OnEvent,
     ) -> Self {
-        let mut this = Converter {
+        let mut converter = Converter {
             current: None,
             preorder: node.preorder_with_tokens(),
             range: node.text_range(),
@@ -656,16 +669,21 @@ impl Converter {
             append,
             remove,
             call_site,
-            current_leaves: vec![],
+            current_leaves: VecDeque::new(),
             mode,
+            on_event: on_enter,
         };
-        let first = this.next_token();
-        this.current = first;
-        this
+        converter.current = converter.next_token();
+        converter
     }
 
     fn next_token(&mut self) -> Option {
         while let Some(ev) = self.preorder.next() {
+            let (keep_event, insert_leaves) = (self.on_event)(&mut self.preorder, &ev);
+            self.current_leaves.extend(insert_leaves);
+            if !keep_event {
+                continue;
+            }
             match ev {
                 WalkEvent::Enter(token) => {
                     if self.remove.contains(&token) {
@@ -675,10 +693,9 @@ impl Converter {
                             }
                             node => {
                                 self.preorder.skip_subtree();
-                                if let Some(mut v) = self.append.remove(&node) {
-                                    v.reverse();
+                                if let Some(v) = self.append.remove(&node) {
                                     self.current_leaves.extend(v);
-                                    return None;
+                                    continue;
                                 }
                             }
                         }
@@ -687,10 +704,9 @@ impl Converter {
                     }
                 }
                 WalkEvent::Leave(ele) => {
-                    if let Some(mut v) = self.append.remove(&ele) {
-                        v.reverse();
+                    if let Some(v) = self.append.remove(&ele) {
                         self.current_leaves.extend(v);
-                        return None;
+                        continue;
                     }
                 }
             }
@@ -715,8 +731,8 @@ impl SynToken {
     }
 }
 
-impl SrcToken, S> for SynToken {
-    fn kind(&self, _ctx: &Converter) -> SyntaxKind {
+impl SrcToken, S> for SynToken {
+    fn kind(&self, _ctx: &Converter) -> SyntaxKind {
         match self {
             SynToken::Ordinary(token) => token.kind(),
             SynToken::Punct { token, offset: i } => {
@@ -728,14 +744,14 @@ impl SrcToken, S> for SynToken {
             }
         }
     }
-    fn to_char(&self, _ctx: &Converter) -> Option {
+    fn to_char(&self, _ctx: &Converter) -> Option {
         match self {
             SynToken::Ordinary(_) => None,
             SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
             SynToken::Leaf(_) => None,
         }
     }
-    fn to_text(&self, _ctx: &Converter) -> SmolStr {
+    fn to_text(&self, _ctx: &Converter) -> SmolStr {
         match self {
             SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
             SynToken::Leaf(_) => {
@@ -752,10 +768,11 @@ impl SrcToken, S> for SynToken {
     }
 }
 
-impl TokenConverter for Converter
+impl TokenConverter for Converter
 where
     S: Copy,
     SpanMap: SpanMapper,
+    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec>),
 {
     type Token = SynToken;
     fn convert_doc_comment(
@@ -781,10 +798,7 @@ where
             ));
         }
 
-        if let Some(leaf) = self.current_leaves.pop() {
-            if self.current_leaves.is_empty() {
-                self.current = self.next_token();
-            }
+        if let Some(leaf) = self.current_leaves.pop_front() {
             return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
         }
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
index aea99a4389b9b..5d67fd4491755 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -26,7 +26,8 @@ pub use self::{
     generated::{nodes::*, tokens::*},
     node_ext::{
         AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
-        SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
+        SlicePatComponents, StructKind, TokenTreeChildren, TypeBoundKind, TypeOrConstParam,
+        VisibilityKind,
     },
     operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
     token_ext::{
@@ -35,6 +36,7 @@ pub use self::{
     traits::{
         AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericArgs,
         HasGenericParams, HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
+        attrs_including_inner,
     },
 };
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index af741d100f680..901d17bb14911 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -10,7 +10,7 @@ use parser::SyntaxKind;
 use rowan::{GreenNodeData, GreenTokenData};
 
 use crate::{
-    NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, T, TokenText,
+    NodeOrToken, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxToken, T, TokenText,
     ast::{
         self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName,
         HasTypeBounds, SyntaxNode, support,
@@ -1114,3 +1114,39 @@ impl ast::OrPat {
             .filter(|it| it.kind() == T![|])
     }
 }
+
+/// An iterator over the elements in an [`ast::TokenTree`].
+///
+/// Does not yield trivia or the delimiters.
+#[derive(Clone)]
+pub struct TokenTreeChildren {
+    iter: SyntaxElementChildren,
+}
+
+impl TokenTreeChildren {
+    #[inline]
+    pub fn new(tt: &ast::TokenTree) -> Self {
+        let mut iter = tt.syntax.children_with_tokens();
+        iter.next(); // Bump the opening delimiter.
+        Self { iter }
+    }
+}
+
+impl Iterator for TokenTreeChildren {
+    type Item = NodeOrToken;
+
+    #[inline]
+    fn next(&mut self) -> Option {
+        self.iter.find_map(|item| match item {
+            NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
+            NodeOrToken::Token(token) => {
+                let kind = token.kind();
+                (!matches!(
+                    kind,
+                    SyntaxKind::WHITESPACE | SyntaxKind::COMMENT | T![')'] | T![']'] | T!['}']
+                ))
+                .then_some(NodeOrToken::Token(token))
+            }
+        })
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index e1a9f3ac03418..83ab87c1c687e 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -40,8 +40,8 @@ impl ast::Comment {
     }
 
     /// Returns the textual content of a doc comment node as a single string with prefix and suffix
-    /// removed.
-    pub fn doc_comment(&self) -> Option<&str> {
+    /// removed, plus the offset of the returned string from the beginning of the comment.
+    pub fn doc_comment(&self) -> Option<(&str, TextSize)> {
         let kind = self.kind();
         match kind {
             CommentKind { shape, doc: Some(_) } => {
@@ -52,7 +52,7 @@ impl ast::Comment {
                 } else {
                     text
                 };
-                Some(text)
+                Some((text, TextSize::of(prefix)))
             }
             _ => None,
         }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
index 5290f32dd27db..2f4109a2c9760 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -4,8 +4,9 @@
 use either::Either;
 
 use crate::{
-    SyntaxElement, SyntaxToken, T,
+    SyntaxElement, SyntaxNode, SyntaxToken, T,
     ast::{self, AstChildren, AstNode, AstToken, support},
+    match_ast,
     syntax_node::SyntaxElementChildren,
 };
 
@@ -76,34 +77,44 @@ pub trait HasAttrs: AstNode {
         self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
     }
 
-    /// Returns all attributes of this node, including inner attributes that may not be directly under this node
-    /// but under a child.
-    fn attrs_including_inner(self) -> impl Iterator
-    where
-        Self: Sized,
-    {
-        let inner_attrs_node = if let Some(it) =
-            support::child::(self.syntax()).and_then(|it| it.stmt_list())
-        {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::(self.syntax()) {
-            Some(it.syntax)
-        } else if let Some(it) = support::child::(self.syntax()) {
-            Some(it.syntax)
-        } else {
-            None
-        };
-
-        self.attrs().chain(inner_attrs_node.into_iter().flat_map(|it| support::children(&it)))
+    /// This may return the same node as called with (with `SourceFile`). The caller has the responsibility
+    /// to avoid duplicate attributes.
+    fn inner_attributes_node(&self) -> Option {
+        let syntax = self.syntax();
+        Some(match_ast! {
+            match syntax {
+                // A `SourceFile` contains the inner attributes of itself.
+                ast::SourceFile(_) => syntax.clone(),
+                ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
+                ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
+                ast::MatchExpr(it) => it.match_arm_list()?.syntax().clone(),
+                ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
+                ast::Trait(it) => it.assoc_item_list()?.syntax().clone(),
+                ast::Module(it) => it.item_list()?.syntax().clone(),
+                ast::BlockExpr(it) => {
+                    if !it.may_carry_attributes() {
+                        return None;
+                    }
+                    syntax.clone()
+                },
+                _ => return None,
+            }
+        })
     }
 }
 
+/// Returns all attributes of this node, including inner attributes that may not be directly under this node
+/// but under a child.
+pub fn attrs_including_inner(owner: &dyn HasAttrs) -> impl Iterator + Clone {
+    owner.attrs().filter(|attr| attr.kind().is_outer()).chain(
+        owner
+            .inner_attributes_node()
+            .into_iter()
+            .flat_map(|node| support::children::(&node))
+            .filter(|attr| attr.kind().is_inner()),
+    )
+}
+
 pub trait HasDocComments: HasAttrs {
     fn doc_comments(&self) -> DocCommentIter {
         DocCommentIter { iter: self.syntax().children_with_tokens() }
@@ -118,7 +129,7 @@ impl DocCommentIter {
     #[cfg(test)]
     pub fn doc_comment_text(self) -> Option {
         let docs = itertools::Itertools::join(
-            &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
+            &mut self.filter_map(|comment| comment.doc_comment().map(|it| it.0.to_owned())),
             "\n",
         );
         if docs.is_empty() { None } else { Some(docs) }
@@ -151,7 +162,7 @@ impl AttrDocCommentIter {
 impl Iterator for AttrDocCommentIter {
     type Item = Either;
     fn next(&mut self) -> Option {
-        self.iter.by_ref().find_map(|el| match el {
+        self.iter.find_map(|el| match el {
             SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
             SyntaxElement::Token(tok) => {
                 ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
index aefe81f83e294..2b05add55216d 100644
--- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
@@ -5,7 +5,7 @@ use base_db::target::TargetData;
 use base_db::{
     Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
     DependencyBuilder, Env, FileChange, FileSet, FxIndexMap, LangCrateOrigin, SourceDatabase,
-    SourceRoot, Version, VfsPath, salsa,
+    SourceRoot, Version, VfsPath,
 };
 use cfg::CfgOptions;
 use hir_expand::{
@@ -37,10 +37,11 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
         assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
-        (db, fixture.files[0])
+        let file = EditionedFileId::from_span_guess_origin(&db, fixture.files[0]);
+        (db, file)
     }
 
     #[track_caller]
@@ -48,16 +49,21 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, Vec) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
-        (db, fixture.files)
+        let files = fixture
+            .files
+            .into_iter()
+            .map(|file| EditionedFileId::from_span_guess_origin(&db, file))
+            .collect();
+        (db, files)
     }
 
     #[track_caller]
     fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -69,12 +75,8 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         proc_macros: Vec<(String, ProcMacro)>,
     ) -> Self {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse_with_proc_macros(
-            &db,
-            ra_fixture,
-            MiniCore::RAW_SOURCE,
-            proc_macros,
-        );
+        let fixture =
+            ChangeFixture::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, proc_macros);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -99,12 +101,13 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId, RangeOrOffset) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(&db, ra_fixture);
+        let fixture = ChangeFixture::parse(ra_fixture);
         fixture.change.apply(&mut db);
 
         let (file_id, range_or_offset) = fixture
             .file_position
             .expect("Could not find file position in fixture. Did you forget to add an `$0`?");
+        let file_id = EditionedFileId::from_span_guess_origin(&db, file_id);
         (db, file_id, range_or_offset)
     }
 
@@ -116,9 +119,9 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
 impl WithFixture for DB {}
 
 pub struct ChangeFixture {
-    pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
+    pub file_position: Option<(span::EditionedFileId, RangeOrOffset)>,
     pub file_lines: Vec,
-    pub files: Vec,
+    pub files: Vec,
     pub change: ChangeWithProcMacros,
     pub sysroot_files: Vec,
 }
@@ -126,15 +129,11 @@ pub struct ChangeFixture {
 const SOURCE_ROOT_PREFIX: &str = "/";
 
 impl ChangeFixture {
-    pub fn parse(
-        db: &dyn salsa::Database,
-        #[rust_analyzer::rust_fixture] ra_fixture: &str,
-    ) -> ChangeFixture {
-        Self::parse_with_proc_macros(db, ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
+    pub fn parse(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> ChangeFixture {
+        Self::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
     }
 
     pub fn parse_with_proc_macros(
-        db: &dyn salsa::Database,
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
         minicore_raw: &str,
         mut proc_macro_defs: Vec<(String, ProcMacro)>,
@@ -202,7 +201,7 @@ impl ChangeFixture {
             let meta = FileMeta::from_fixture(entry, current_source_root_kind);
             if let Some(range_or_offset) = range_or_offset {
                 file_position =
-                    Some((EditionedFileId::new(db, file_id, meta.edition), range_or_offset));
+                    Some((span::EditionedFileId::new(file_id, meta.edition), range_or_offset));
             }
 
             assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
@@ -259,7 +258,7 @@ impl ChangeFixture {
             source_change.change_file(file_id, Some(text));
             let path = VfsPath::new_virtual_path(meta.path);
             file_set.insert(file_id, path);
-            files.push(EditionedFileId::new(db, file_id, meta.edition));
+            files.push(span::EditionedFileId::new(file_id, meta.edition));
             file_id = FileId::from_raw(file_id.index() + 1);
         }
 

From 2ae4ddbecbe69e1076cc3526a38453fa3c7020b4 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Wed, 22 Oct 2025 19:19:13 +0300
Subject: [PATCH 51/76] Revert "internal: Rewrite attribute handling"

---
 src/tools/rust-analyzer/Cargo.lock            |    6 +-
 src/tools/rust-analyzer/Cargo.toml            |    5 +-
 .../crates/base-db/src/editioned_file_id.rs   |  291 ---
 .../rust-analyzer/crates/base-db/src/input.rs |    7 +-
 .../rust-analyzer/crates/base-db/src/lib.rs   |   39 +-
 src/tools/rust-analyzer/crates/cfg/Cargo.toml |    1 -
 .../rust-analyzer/crates/cfg/src/cfg_expr.rs  |   59 -
 .../rust-analyzer/crates/cfg/src/tests.rs     |   42 +-
 .../rust-analyzer/crates/hir-def/Cargo.toml   |    4 +-
 .../rust-analyzer/crates/hir-def/src/attr.rs  |  901 +++++++++
 .../rust-analyzer/crates/hir-def/src/attrs.rs | 1613 -----------------
 .../rust-analyzer/crates/hir-def/src/db.rs    |   71 +-
 .../crates/hir-def/src/expr_store/expander.rs |   14 +-
 .../crates/hir-def/src/expr_store/lower.rs    |   22 +-
 .../crates/hir-def/src/expr_store/pretty.rs   |   19 +-
 .../src/expr_store/tests/body/block.rs        |    4 +-
 .../src/expr_store/tests/signatures.rs        |   14 +-
 .../crates/hir-def/src/import_map.rs          |   34 +-
 .../crates/hir-def/src/item_tree.rs           |   40 +-
 .../crates/hir-def/src/item_tree/attrs.rs     |  220 ---
 .../crates/hir-def/src/item_tree/lower.rs     |   35 +-
 .../crates/hir-def/src/item_tree/pretty.rs    |   12 +-
 .../crates/hir-def/src/item_tree/tests.rs     |    9 +-
 .../crates/hir-def/src/lang_item.rs           |    7 +-
 .../rust-analyzer/crates/hir-def/src/lib.rs   |   94 +-
 .../hir-def/src/macro_expansion_tests/mbe.rs  |   43 +-
 .../hir-def/src/macro_expansion_tests/mod.rs  |   15 -
 .../src/macro_expansion_tests/proc_macros.rs  |   96 +-
 .../crates/hir-def/src/nameres.rs             |   15 +-
 .../crates/hir-def/src/nameres/assoc.rs       |   39 +-
 .../hir-def/src/nameres/attr_resolution.rs    |   10 +-
 .../crates/hir-def/src/nameres/collector.rs   |  179 +-
 .../crates/hir-def/src/nameres/diagnostics.rs |   14 +-
 .../hir-def/src/nameres/mod_resolution.rs     |    5 +-
 .../crates/hir-def/src/nameres/proc_macro.rs  |   24 +-
 .../crates/hir-def/src/signatures.rs          |  130 +-
 .../rust-analyzer/crates/hir-def/src/src.rs   |    9 +-
 .../crates/hir-def/src/test_db.rs             |   33 +-
 .../crates/hir-expand/Cargo.toml              |    2 -
 .../crates/hir-expand/src/attrs.rs            |  808 ++++-----
 .../crates/hir-expand/src/builtin/fn_macro.rs |    2 +-
 .../crates/hir-expand/src/cfg_process.rs      |  638 +++----
 .../rust-analyzer/crates/hir-expand/src/db.rs |  181 +-
 .../crates/hir-expand/src/declarative.rs      |   58 +-
 .../crates/hir-expand/src/files.rs            |   33 +-
 .../crates/hir-expand/src/fixup.rs            |    5 +-
 .../crates/hir-expand/src/lib.rs              |  165 +-
 .../crates/hir-expand/src/mod_path.rs         |   59 +-
 .../crates/hir-expand/src/span_map.rs         |   13 +-
 .../crates/hir-ty/src/consteval.rs            |    3 +-
 .../hir-ty/src/diagnostics/decl_check.rs      |    6 +-
 .../diagnostics/match_check/pat_analysis.rs   |    6 +-
 .../hir-ty/src/diagnostics/unsafe_check.rs    |    4 +-
 .../rust-analyzer/crates/hir-ty/src/infer.rs  |   14 +-
 .../crates/hir-ty/src/infer/coerce.rs         |   14 +-
 .../crates/hir-ty/src/infer/expr.rs           |   14 +-
 .../rust-analyzer/crates/hir-ty/src/layout.rs |    4 +-
 .../crates/hir-ty/src/layout/adt.rs           |   35 +-
 .../crates/hir-ty/src/method_resolution.rs    |    5 +-
 .../crates/hir-ty/src/mir/eval/shim.rs        |   45 +-
 .../crates/hir-ty/src/next_solver/interner.rs |   59 +-
 .../crates/hir-ty/src/target_feature.rs       |   46 +-
 .../crates/hir-ty/src/tests/incremental.rs    |   49 +-
 .../rust-analyzer/crates/hir-ty/src/utils.rs  |    8 +-
 .../rust-analyzer/crates/hir/src/attrs.rs     |  256 +--
 .../crates/hir/src/diagnostics.rs             |   13 +-
 src/tools/rust-analyzer/crates/hir/src/lib.rs |  267 ++-
 .../rust-analyzer/crates/hir/src/semantics.rs |   71 +-
 .../hir/src/semantics/child_by_source.rs      |   13 +-
 .../rust-analyzer/crates/hir/src/symbols.rs   |    8 +-
 .../src/handlers/add_missing_match_arms.rs    |    6 +-
 .../handlers/destructure_struct_binding.rs    |    4 +-
 .../src/handlers/move_module_to_file.rs       |   10 +-
 .../crates/ide-assists/src/lib.rs             |    4 +-
 .../crates/ide-assists/src/tests.rs           |    4 +-
 .../crates/ide-assists/src/utils.rs           |   13 +-
 .../src/completions/attribute/lint.rs         |    2 +-
 .../src/completions/flyimport.rs              |    4 +-
 .../ide-completion/src/completions/postfix.rs |    2 +-
 .../ide-completion/src/completions/snippet.rs |    2 +-
 .../crates/ide-completion/src/context.rs      |   22 +-
 .../crates/ide-completion/src/item.rs         |   12 +-
 .../crates/ide-completion/src/render.rs       |   13 +-
 .../ide-completion/src/render/literal.rs      |    2 +-
 .../ide-completion/src/render/pattern.rs      |    2 +-
 .../ide-completion/src/render/variant.rs      |    6 +-
 .../crates/ide-completion/src/tests.rs        |    4 +-
 .../rust-analyzer/crates/ide-db/src/defs.rs   |   38 +-
 .../crates/ide-db/src/documentation.rs        |  351 +++-
 .../crates/ide-db/src/ra_fixture.rs           |   12 +-
 .../crates/ide-db/src/rust_doc.rs             |    2 +-
 .../rust-analyzer/crates/ide-db/src/search.rs |   16 +-
 .../ide-db/src/test_data/test_doc_alias.txt   |   30 +-
 .../test_symbol_index_collection.txt          |  134 +-
 .../test_symbols_exclude_imports.txt          |    2 +-
 .../test_data/test_symbols_with_imports.txt   |    4 +-
 .../rust-analyzer/crates/ide-db/src/traits.rs |    6 +-
 .../src/handlers/inactive_code.rs             |    3 +-
 .../src/handlers/invalid_derive_target.rs     |    4 +-
 .../src/handlers/macro_error.rs               |   22 +-
 .../src/handlers/malformed_derive.rs          |    4 +-
 .../src/handlers/unresolved_macro_call.rs     |    5 +-
 .../crates/ide-diagnostics/src/lib.rs         |   43 +-
 .../crates/ide-ssr/src/from_comment.rs        |    2 +-
 .../rust-analyzer/crates/ide-ssr/src/lib.rs   |    6 +-
 .../crates/ide-ssr/src/search.rs              |    8 +-
 .../rust-analyzer/crates/ide/src/doc_links.rs |   38 +-
 .../crates/ide/src/doc_links/tests.rs         |   73 +-
 .../rust-analyzer/crates/ide/src/fixture.rs   |   32 +-
 .../crates/ide/src/goto_implementation.rs     |    2 +-
 .../crates/ide/src/highlight_related.rs       |    2 +-
 .../crates/ide/src/hover/render.rs            |   52 +-
 .../crates/ide/src/inlay_hints.rs             |    4 +-
 src/tools/rust-analyzer/crates/ide/src/lib.rs |   13 +-
 .../crates/ide/src/navigation_target.rs       |   38 +-
 .../crates/ide/src/references.rs              |    5 +-
 .../rust-analyzer/crates/ide/src/runnables.rs |   50 +-
 .../crates/ide/src/signature_help.rs          |   36 +-
 .../crates/ide/src/static_index.rs            |    6 +-
 .../crates/ide/src/syntax_highlighting.rs     |    2 +-
 .../ide/src/syntax_highlighting/html.rs       |    2 +-
 .../ide/src/syntax_highlighting/inject.rs     |  189 +-
 .../test_data/highlight_doctest.html          |   72 +-
 .../rust-analyzer/crates/ide/src/typing.rs    |    5 +-
 .../crates/ide/src/typing/on_enter.rs         |    2 +-
 .../crates/ide/src/view_item_tree.rs          |    2 +-
 .../rust-analyzer/src/cli/analysis_stats.rs   |    8 +-
 .../crates/rust-analyzer/src/cli/scip.rs      |    6 +-
 .../crates/rust-analyzer/src/cli/ssr.rs       |    2 +-
 .../src/cli/unresolved_references.rs          |    2 +-
 .../crates/rust-analyzer/src/lsp/to_proto.rs  |    4 +-
 .../crates/syntax-bridge/src/lib.rs           |   68 +-
 .../rust-analyzer/crates/syntax/src/ast.rs    |    4 +-
 .../crates/syntax/src/ast/node_ext.rs         |   38 +-
 .../crates/syntax/src/ast/token_ext.rs        |    6 +-
 .../crates/syntax/src/ast/traits.rs           |   67 +-
 .../crates/test-fixture/src/lib.rs            |   45 +-
 137 files changed, 3805 insertions(+), 4953 deletions(-)
 delete mode 100644 src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs
 create mode 100644 src/tools/rust-analyzer/crates/hir-def/src/attr.rs
 delete mode 100644 src/tools/rust-analyzer/crates/hir-def/src/attrs.rs
 delete mode 100644 src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs

diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index d31d233dc4b69..ea8d1a781dccb 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -725,7 +725,6 @@ dependencies = [
 name = "hir-expand"
 version = "0.0.0"
 dependencies = [
- "arrayvec",
  "base-db",
  "cfg",
  "cov-mark",
@@ -744,7 +743,6 @@ dependencies = [
  "stdx",
  "syntax",
  "syntax-bridge",
- "thin-vec",
  "tracing",
  "triomphe",
  "tt",
@@ -1993,9 +1991,9 @@ dependencies = [
 
 [[package]]
 name = "rowan"
-version = "0.15.17"
+version = "0.15.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4f1e4a001f863f41ea8d0e6a0c34b356d5b733db50dadab3efef640bafb779b"
+checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
 dependencies = [
  "countme",
  "hashbrown 0.14.5",
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 767dbcae90314..8a108974681a1 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -52,7 +52,7 @@ debug = 2
 # local crates
 macros = { path = "./crates/macros", version = "0.0.0" }
 base-db = { path = "./crates/base-db", version = "0.0.0" }
-cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt", "syntax"] }
+cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] }
 hir = { path = "./crates/hir", version = "0.0.0" }
 hir-def = { path = "./crates/hir-def", version = "0.0.0" }
 hir-expand = { path = "./crates/hir-expand", version = "0.0.0" }
@@ -131,7 +131,7 @@ process-wrap = { version = "8.2.1", features = ["std"] }
 pulldown-cmark-to-cmark = "10.0.4"
 pulldown-cmark = { version = "0.9.6", default-features = false }
 rayon = "1.10.0"
-rowan = "=0.15.17"
+rowan = "=0.15.15"
 # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
 # on impls without it
 salsa = { version = "0.24.0", default-features = true, features = [
@@ -167,7 +167,6 @@ tracing-subscriber = { version = "0.3.20", default-features = false, features =
 triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
 url = "2.5.4"
 xshell = "0.2.7"
-thin-vec = "0.2.14"
 petgraph = { version = "0.8.2", default-features = false }
 
 # We need to freeze the version of the crate, as the raw-api feature is considered unstable
diff --git a/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs b/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs
deleted file mode 100644
index 2f8969c0ea339..0000000000000
--- a/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs
+++ /dev/null
@@ -1,291 +0,0 @@
-//! Defines [`EditionedFileId`], an interned wrapper around [`span::EditionedFileId`] that
-//! is interned (so queries can take it) and remembers its crate.
-
-use core::fmt;
-use std::hash::{Hash, Hasher};
-
-use span::Edition;
-use vfs::FileId;
-
-use crate::{Crate, RootQueryDb};
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct EditionedFileId(
-    salsa::Id,
-    std::marker::PhantomData<&'static salsa::plumbing::interned::Value>,
-);
-
-const _: () = {
-    use salsa::plumbing as zalsa_;
-    use zalsa_::interned as zalsa_struct_;
-    type Configuration_ = EditionedFileId;
-
-    #[derive(Debug, Clone, PartialEq, Eq)]
-    pub struct EditionedFileIdData {
-        editioned_file_id: span::EditionedFileId,
-        krate: Crate,
-    }
-
-    /// We like to include the origin crate in an `EditionedFileId` (for use in the item tree),
-    /// but this poses us a problem.
-    ///
-    /// Spans contain `EditionedFileId`s, and we don't want to make them store the crate too
-    /// because that will increase their size, which will increase memory usage significantly.
-    /// Furthermore, things using spans do not generally need the crate: they are using the
-    /// file id for queries like `ast_id_map` or `parse`, which do not care about the crate.
-    ///
-    /// To solve this, we hash **only the `span::EditionedFileId`**, but on still compare
-    /// the crate in equality check. This preserves the invariant of `Hash` and `Eq` -
-    /// although same hashes can be used for different items, same file ids used for multiple
-    /// crates is a rare thing, and different items always have different hashes. Then,
-    /// when we only have a `span::EditionedFileId`, we use the `intern()` method to
-    /// reuse existing file ids, and create new one only if needed. See [`from_span_guess_origin`].
-    ///
-    /// See this for more info: https://rust-lang.zulipchat.com/#narrow/channel/185405-t-compiler.2Frust-analyzer/topic/Letting.20EditionedFileId.20know.20its.20crate/near/530189401
-    ///
-    /// [`from_span_guess_origin`]: EditionedFileId::from_span_guess_origin
-    #[derive(Hash, PartialEq, Eq)]
-    struct WithoutCrate {
-        editioned_file_id: span::EditionedFileId,
-    }
-
-    impl Hash for EditionedFileIdData {
-        #[inline]
-        fn hash(&self, state: &mut H) {
-            let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
-            editioned_file_id.hash(state);
-        }
-    }
-
-    impl zalsa_struct_::HashEqLike for EditionedFileIdData {
-        #[inline]
-        fn hash(&self, state: &mut H) {
-            Hash::hash(self, state);
-        }
-
-        #[inline]
-        fn eq(&self, data: &WithoutCrate) -> bool {
-            let EditionedFileIdData { editioned_file_id, krate: _ } = *self;
-            editioned_file_id == data.editioned_file_id
-        }
-    }
-
-    impl zalsa_::HasJar for EditionedFileId {
-        type Jar = zalsa_struct_::JarImpl;
-        const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
-    }
-
-    zalsa_::register_jar! {
-        zalsa_::ErasedJar::erase::()
-    }
-
-    impl zalsa_struct_::Configuration for EditionedFileId {
-        const LOCATION: salsa::plumbing::Location =
-            salsa::plumbing::Location { file: file!(), line: line!() };
-        const DEBUG_NAME: &'static str = "EditionedFileId";
-        const REVISIONS: std::num::NonZeroUsize = std::num::NonZeroUsize::MAX;
-        const PERSIST: bool = false;
-
-        type Fields<'a> = EditionedFileIdData;
-        type Struct<'db> = EditionedFileId;
-
-        fn serialize(_: &Self::Fields<'_>, _: S) -> Result
-        where
-            S: zalsa_::serde::Serializer,
-        {
-            unimplemented!("attempted to serialize value that set `PERSIST` to false")
-        }
-
-        fn deserialize<'de, D>(_: D) -> Result, D::Error>
-        where
-            D: zalsa_::serde::Deserializer<'de>,
-        {
-            unimplemented!("attempted to deserialize value that cannot set `PERSIST` to false");
-        }
-    }
-
-    impl Configuration_ {
-        pub fn ingredient(zalsa: &zalsa_::Zalsa) -> &zalsa_struct_::IngredientImpl {
-            static CACHE: zalsa_::IngredientCache> =
-                zalsa_::IngredientCache::new();
-
-            // SAFETY: `lookup_jar_by_type` returns a valid ingredient index, and the only
-            // ingredient created by our jar is the struct ingredient.
-            unsafe {
-                CACHE.get_or_create(zalsa, || {
-                    zalsa.lookup_jar_by_type::>()
-                })
-            }
-        }
-    }
-
-    impl zalsa_::AsId for EditionedFileId {
-        fn as_id(&self) -> salsa::Id {
-            self.0.as_id()
-        }
-    }
-    impl zalsa_::FromId for EditionedFileId {
-        fn from_id(id: salsa::Id) -> Self {
-            Self(::from_id(id), std::marker::PhantomData)
-        }
-    }
-
-    unsafe impl Send for EditionedFileId {}
-    unsafe impl Sync for EditionedFileId {}
-
-    impl std::fmt::Debug for EditionedFileId {
-        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-            Self::default_debug_fmt(*self, f)
-        }
-    }
-
-    impl zalsa_::SalsaStructInDb for EditionedFileId {
-        type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
-
-        fn lookup_ingredient_index(aux: &zalsa_::Zalsa) -> salsa::plumbing::IngredientIndices {
-            aux.lookup_jar_by_type::>().into()
-        }
-
-        fn entries(zalsa: &zalsa_::Zalsa) -> impl Iterator + '_ {
-            let _ingredient_index =
-                zalsa.lookup_jar_by_type::>();
-            ::ingredient(zalsa).entries(zalsa).map(|entry| entry.key())
-        }
-
-        #[inline]
-        fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option {
-            if type_id == std::any::TypeId::of::() {
-                Some(::from_id(id))
-            } else {
-                None
-            }
-        }
-
-        #[inline]
-        unsafe fn memo_table(
-            zalsa: &zalsa_::Zalsa,
-            id: zalsa_::Id,
-            current_revision: zalsa_::Revision,
-        ) -> zalsa_::MemoTableWithTypes<'_> {
-            // SAFETY: Guaranteed by caller.
-            unsafe {
-                zalsa.table().memos::>(id, current_revision)
-            }
-        }
-    }
-
-    unsafe impl zalsa_::Update for EditionedFileId {
-        unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
-            if unsafe { *old_pointer } != new_value {
-                unsafe { *old_pointer = new_value };
-                true
-            } else {
-                false
-            }
-        }
-    }
-
-    impl EditionedFileId {
-        pub fn from_span(
-            db: &(impl salsa::Database + ?Sized),
-            editioned_file_id: span::EditionedFileId,
-            krate: Crate,
-        ) -> Self {
-            let (zalsa, zalsa_local) = db.zalsas();
-            Configuration_::ingredient(zalsa).intern(
-                zalsa,
-                zalsa_local,
-                EditionedFileIdData { editioned_file_id, krate },
-                |_, data| data,
-            )
-        }
-
-        /// Guesses the crate for the file.
-        ///
-        /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
-        ///
-        ///  1. The file is not in the module tree.
-        ///  2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
-        ///     (e.g. on enter feature, folding, etc.).
-        pub fn from_span_guess_origin(
-            db: &dyn RootQueryDb,
-            editioned_file_id: span::EditionedFileId,
-        ) -> Self {
-            let (zalsa, zalsa_local) = db.zalsas();
-            Configuration_::ingredient(zalsa).intern(
-                zalsa,
-                zalsa_local,
-                WithoutCrate { editioned_file_id },
-                |_, _| {
-                    // FileId not in the database.
-                    let krate = db
-                        .relevant_crates(editioned_file_id.file_id())
-                        .first()
-                        .copied()
-                        .unwrap_or_else(|| db.all_crates()[0]);
-                    EditionedFileIdData { editioned_file_id, krate }
-                },
-            )
-        }
-
-        pub fn editioned_file_id(self, db: &dyn salsa::Database) -> span::EditionedFileId {
-            let zalsa = db.zalsa();
-            let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
-            fields.editioned_file_id
-        }
-
-        pub fn krate(self, db: &dyn salsa::Database) -> Crate {
-            let zalsa = db.zalsa();
-            let fields = Configuration_::ingredient(zalsa).fields(zalsa, self);
-            fields.krate
-        }
-
-        /// Default debug formatting for this struct (may be useful if you define your own `Debug` impl)
-        pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-            zalsa_::with_attached_database(|db| {
-                let zalsa = db.zalsa();
-                let fields = Configuration_::ingredient(zalsa).fields(zalsa, this);
-                fmt::Debug::fmt(fields, f)
-            })
-            .unwrap_or_else(|| {
-                f.debug_tuple("EditionedFileId").field(&zalsa_::AsId::as_id(&this)).finish()
-            })
-        }
-    }
-};
-
-impl EditionedFileId {
-    #[inline]
-    pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition, krate: Crate) -> Self {
-        EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition), krate)
-    }
-
-    /// Attaches the current edition and guesses the crate for the file.
-    ///
-    /// Only use this if you cannot precisely determine the origin. This can happen in one of two cases:
-    ///
-    ///  1. The file is not in the module tree.
-    ///  2. You are latency sensitive and cannot afford calling the def map to precisely compute the origin
-    ///     (e.g. on enter feature, folding, etc.).
-    #[inline]
-    pub fn current_edition_guess_origin(db: &dyn RootQueryDb, file_id: FileId) -> Self {
-        Self::from_span_guess_origin(db, span::EditionedFileId::current_edition(file_id))
-    }
-
-    #[inline]
-    pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
-        let id = self.editioned_file_id(db);
-        id.file_id()
-    }
-
-    #[inline]
-    pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
-        let id = self.editioned_file_id(db);
-        (id.file_id(), id.edition())
-    }
-
-    #[inline]
-    pub fn edition(self, db: &dyn salsa::Database) -> Edition {
-        self.editioned_file_id(db).edition()
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index 28539d59825f1..cac74778a26b0 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -829,10 +829,9 @@ pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet<
     rev_deps
 }
 
-impl Crate {
-    pub fn root_file_id(self, db: &dyn salsa::Database) -> EditionedFileId {
-        let data = self.data(db);
-        EditionedFileId::new(db, data.root_file_id, data.edition, self)
+impl BuiltCrateData {
+    pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
+        EditionedFileId::new(db, self.root_file_id, self.edition)
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
index 32909af5d78d5..0e411bcfae60e 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -5,7 +5,6 @@ pub use salsa_macros;
 
 // FIXME: Rename this crate, base db is non descriptive
 mod change;
-mod editioned_file_id;
 mod input;
 pub mod target;
 
@@ -18,7 +17,6 @@ use std::{
 
 pub use crate::{
     change::FileChange,
-    editioned_file_id::EditionedFileId,
     input::{
         BuiltCrateData, BuiltDependency, Crate, CrateBuilder, CrateBuilderId, CrateDataBuilder,
         CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CratesIdMap, CratesMap,
@@ -31,6 +29,7 @@ pub use query_group::{self};
 use rustc_hash::{FxHashSet, FxHasher};
 use salsa::{Durability, Setter};
 pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
+use span::Edition;
 use syntax::{Parse, SyntaxError, ast};
 use triomphe::Arc;
 pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet};
@@ -176,6 +175,42 @@ impl Files {
     }
 }
 
+#[salsa_macros::interned(no_lifetime, debug, constructor=from_span, revisions = usize::MAX)]
+#[derive(PartialOrd, Ord)]
+pub struct EditionedFileId {
+    pub editioned_file_id: span::EditionedFileId,
+}
+
+impl EditionedFileId {
+    // Salsa already uses the name `new`...
+    #[inline]
+    pub fn new(db: &dyn salsa::Database, file_id: FileId, edition: Edition) -> Self {
+        EditionedFileId::from_span(db, span::EditionedFileId::new(file_id, edition))
+    }
+
+    #[inline]
+    pub fn current_edition(db: &dyn salsa::Database, file_id: FileId) -> Self {
+        EditionedFileId::new(db, file_id, Edition::CURRENT)
+    }
+
+    #[inline]
+    pub fn file_id(self, db: &dyn salsa::Database) -> vfs::FileId {
+        let id = self.editioned_file_id(db);
+        id.file_id()
+    }
+
+    #[inline]
+    pub fn unpack(self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
+        let id = self.editioned_file_id(db);
+        (id.file_id(), id.edition())
+    }
+
+    #[inline]
+    pub fn edition(self, db: &dyn SourceDatabase) -> Edition {
+        self.editioned_file_id(db).edition()
+    }
+}
+
 #[salsa_macros::input(debug)]
 pub struct FileText {
     #[returns(ref)]
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index 9e2a95dbf32c0..e17969bd82d41 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -18,7 +18,6 @@ tracing.workspace = true
 
 # locals deps
 tt = { workspace = true, optional = true }
-syntax = { workspace = true, optional = true }
 intern.workspace = true
 
 [dev-dependencies]
diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
index 76e0aba859e68..7a21015e14bec 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
@@ -63,8 +63,6 @@ impl From for CfgExpr {
 }
 
 impl CfgExpr {
-    // FIXME: Parsing from `tt` is only used in a handful of places, reconsider
-    // if we should switch them to AST.
     #[cfg(feature = "tt")]
     pub fn parse(tt: &tt::TopSubtree) -> CfgExpr {
         next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid)
@@ -75,13 +73,6 @@ impl CfgExpr {
         next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid)
     }
 
-    #[cfg(feature = "syntax")]
-    pub fn parse_from_ast(
-        ast: &mut std::iter::Peekable,
-    ) -> CfgExpr {
-        next_cfg_expr_from_ast(ast).unwrap_or(CfgExpr::Invalid)
-    }
-
     /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
     pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option {
         match self {
@@ -98,56 +89,6 @@ impl CfgExpr {
     }
 }
 
-#[cfg(feature = "syntax")]
-fn next_cfg_expr_from_ast(
-    it: &mut std::iter::Peekable,
-) -> Option {
-    use intern::sym;
-    use syntax::{NodeOrToken, SyntaxKind, T, ast};
-
-    let name = match it.next() {
-        None => return None,
-        Some(NodeOrToken::Token(ident)) if ident.kind().is_any_identifier() => {
-            Symbol::intern(ident.text())
-        }
-        Some(_) => return Some(CfgExpr::Invalid),
-    };
-
-    let ret = match it.peek() {
-        Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
-            it.next();
-            if let Some(NodeOrToken::Token(literal)) = it.peek()
-                && matches!(literal.kind(), SyntaxKind::STRING)
-            {
-                let literal = tt::token_to_literal(literal.text(), ()).symbol;
-                it.next();
-                CfgAtom::KeyValue { key: name, value: literal.clone() }.into()
-            } else {
-                return Some(CfgExpr::Invalid);
-            }
-        }
-        Some(NodeOrToken::Node(subtree)) => {
-            let mut subtree_iter = ast::TokenTreeChildren::new(subtree).peekable();
-            it.next();
-            let mut subs = std::iter::from_fn(|| next_cfg_expr_from_ast(&mut subtree_iter));
-            match name {
-                s if s == sym::all => CfgExpr::All(subs.collect()),
-                s if s == sym::any => CfgExpr::Any(subs.collect()),
-                s if s == sym::not => {
-                    CfgExpr::Not(Box::new(subs.next().unwrap_or(CfgExpr::Invalid)))
-                }
-                _ => CfgExpr::Invalid,
-            }
-        }
-        _ => CfgAtom::Flag(name).into(),
-    };
-
-    // Eat comma separator
-    while it.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
-
-    Some(ret)
-}
-
 #[cfg(feature = "tt")]
 fn next_cfg_expr(it: &mut tt::iter::TtIter<'_, S>) -> Option {
     use intern::sym;
diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
index 52c581dbbd3ae..6766748097f00 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
@@ -1,10 +1,7 @@
 use arbitrary::{Arbitrary, Unstructured};
 use expect_test::{Expect, expect};
 use intern::Symbol;
-use syntax::{
-    AstNode, Edition,
-    ast::{self, TokenTreeChildren},
-};
+use syntax::{AstNode, Edition, ast};
 use syntax_bridge::{
     DocCommentDesugarMode,
     dummy_test_span_utils::{DUMMY, DummyTestSpanMap},
@@ -13,33 +10,24 @@ use syntax_bridge::{
 
 use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
 
-#[track_caller]
-fn parse_ast_cfg(tt: &ast::TokenTree) -> CfgExpr {
-    CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable())
-}
-
-#[track_caller]
 fn assert_parse_result(input: &str, expected: CfgExpr) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt_ast.syntax(),
+        tt.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
     );
     let cfg = CfgExpr::parse(&tt);
     assert_eq!(cfg, expected);
-    let cfg = parse_ast_cfg(&tt_ast);
-    assert_eq!(cfg, expected);
 }
 
-#[track_caller]
 fn check_dnf(input: &str, expect: Expect) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt_ast.syntax(),
+        tt.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
@@ -47,17 +35,13 @@ fn check_dnf(input: &str, expect: Expect) {
     let cfg = CfgExpr::parse(&tt);
     let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
     expect.assert_eq(&actual);
-    let cfg = parse_ast_cfg(&tt_ast);
-    let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
-    expect.assert_eq(&actual);
 }
 
-#[track_caller]
 fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt_ast.syntax(),
+        tt.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
@@ -66,18 +50,14 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
     let dnf = DnfExpr::new(&cfg);
     let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
     expect.assert_eq(&why_inactive);
-    let cfg = parse_ast_cfg(&tt_ast);
-    let dnf = DnfExpr::new(&cfg);
-    let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
-    expect.assert_eq(&why_inactive);
 }
 
 #[track_caller]
 fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
     let source_file = ast::SourceFile::parse(input, Edition::CURRENT).ok().unwrap();
-    let tt_ast = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+    let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
     let tt = syntax_node_to_token_tree(
-        tt_ast.syntax(),
+        tt.syntax(),
         DummyTestSpanMap,
         DUMMY,
         DocCommentDesugarMode::ProcMacro,
@@ -86,10 +66,6 @@ fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
     let dnf = DnfExpr::new(&cfg);
     let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>();
     assert_eq!(hints, expected_hints);
-    let cfg = parse_ast_cfg(&tt_ast);
-    let dnf = DnfExpr::new(&cfg);
-    let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>();
-    assert_eq!(hints, expected_hints);
 }
 
 #[test]
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index e1f60742d3249..abb4819a7672a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -45,8 +45,7 @@ mbe.workspace = true
 cfg.workspace = true
 tt.workspace = true
 span.workspace = true
-thin-vec.workspace = true
-syntax-bridge.workspace = true
+thin-vec = "0.2.14"
 
 [dev-dependencies]
 expect-test.workspace = true
@@ -54,6 +53,7 @@ expect-test.workspace = true
 # local deps
 test-utils.workspace = true
 test-fixture.workspace = true
+syntax-bridge.workspace = true
 
 [features]
 in-rust-tree = ["hir-expand/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
new file mode 100644
index 0000000000000..b4fcfa11aea74
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -0,0 +1,901 @@
+//! A higher level attributes based on TokenTree, with also some shortcuts.
+
+use std::{borrow::Cow, convert::identity, hash::Hash, ops};
+
+use base_db::Crate;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+    HirFileId, InFile,
+    attrs::{Attr, AttrId, RawAttrs, collect_attrs},
+    span_map::SpanMapRef,
+};
+use intern::{Symbol, sym};
+use la_arena::{ArenaMap, Idx, RawIdx};
+use mbe::DelimiterKind;
+use rustc_abi::ReprOptions;
+use span::AstIdNode;
+use syntax::{
+    AstPtr,
+    ast::{self, HasAttrs},
+};
+use triomphe::Arc;
+use tt::iter::{TtElement, TtIter};
+
+use crate::{
+    AdtId, AstIdLoc, AttrDefId, GenericParamId, HasModule, LocalFieldId, Lookup, MacroId,
+    VariantId,
+    db::DefDatabase,
+    item_tree::block_item_tree_query,
+    lang_item::LangItem,
+    nameres::{ModuleOrigin, ModuleSource},
+    src::{HasChildSource, HasSource},
+};
+
+/// Desugared attributes of an item post `cfg_attr` expansion.
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct Attrs(RawAttrs);
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct AttrsWithOwner {
+    attrs: Attrs,
+    owner: AttrDefId,
+}
+
+impl Attrs {
+    pub fn new(
+        db: &dyn DefDatabase,
+        owner: &dyn ast::HasAttrs,
+        span_map: SpanMapRef<'_>,
+        cfg_options: &CfgOptions,
+    ) -> Self {
+        Attrs(RawAttrs::new_expanded(db, owner, span_map, cfg_options))
+    }
+
+    pub fn get(&self, id: AttrId) -> Option<&Attr> {
+        (**self).iter().find(|attr| attr.id == id)
+    }
+
+    pub(crate) fn expand_cfg_attr(
+        db: &dyn DefDatabase,
+        krate: Crate,
+        raw_attrs: RawAttrs,
+    ) -> Attrs {
+        Attrs(raw_attrs.expand_cfg_attr(db, krate))
+    }
+
+    pub(crate) fn is_cfg_enabled_for(
+        db: &dyn DefDatabase,
+        owner: &dyn ast::HasAttrs,
+        span_map: SpanMapRef<'_>,
+        cfg_options: &CfgOptions,
+    ) -> Result<(), CfgExpr> {
+        RawAttrs::attrs_iter_expanded::(db, owner, span_map, cfg_options)
+            .filter_map(|attr| attr.cfg())
+            .find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
+                true => None,
+                false => Some(cfg),
+            })
+            .map_or(Ok(()), Err)
+    }
+}
+
+impl ops::Deref for Attrs {
+    type Target = [Attr];
+
+    fn deref(&self) -> &[Attr] {
+        &self.0
+    }
+}
+
+impl ops::Deref for AttrsWithOwner {
+    type Target = Attrs;
+
+    fn deref(&self) -> &Attrs {
+        &self.attrs
+    }
+}
+
+impl Attrs {
+    pub const EMPTY: Self = Self(RawAttrs::EMPTY);
+
+    pub(crate) fn fields_attrs_query(
+        db: &dyn DefDatabase,
+        v: VariantId,
+    ) -> Arc> {
+        let _p = tracing::info_span!("fields_attrs_query").entered();
+        let mut res = ArenaMap::default();
+        let (fields, file_id, krate) = match v {
+            VariantId::EnumVariantId(it) => {
+                let loc = it.lookup(db);
+                let krate = loc.parent.lookup(db).container.krate;
+                let source = loc.source(db);
+                (source.value.field_list(), source.file_id, krate)
+            }
+            VariantId::StructId(it) => {
+                let loc = it.lookup(db);
+                let krate = loc.container.krate;
+                let source = loc.source(db);
+                (source.value.field_list(), source.file_id, krate)
+            }
+            VariantId::UnionId(it) => {
+                let loc = it.lookup(db);
+                let krate = loc.container.krate;
+                let source = loc.source(db);
+                (
+                    source.value.record_field_list().map(ast::FieldList::RecordFieldList),
+                    source.file_id,
+                    krate,
+                )
+            }
+        };
+        let Some(fields) = fields else {
+            return Arc::new(res);
+        };
+
+        let cfg_options = krate.cfg_options(db);
+        let span_map = db.span_map(file_id);
+
+        match fields {
+            ast::FieldList::RecordFieldList(fields) => {
+                let mut idx = 0;
+                for field in fields.fields() {
+                    let attrs =
+                        Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
+                    if attrs.is_cfg_enabled(cfg_options).is_ok() {
+                        res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+                        idx += 1;
+                    }
+                }
+            }
+            ast::FieldList::TupleFieldList(fields) => {
+                let mut idx = 0;
+                for field in fields.fields() {
+                    let attrs =
+                        Attrs(RawAttrs::new_expanded(db, &field, span_map.as_ref(), cfg_options));
+                    if attrs.is_cfg_enabled(cfg_options).is_ok() {
+                        res.insert(Idx::from_raw(RawIdx::from(idx)), attrs);
+                        idx += 1;
+                    }
+                }
+            }
+        }
+
+        res.shrink_to_fit();
+        Arc::new(res)
+    }
+}
+
+impl Attrs {
+    #[inline]
+    pub fn by_key(&self, key: Symbol) -> AttrQuery<'_> {
+        AttrQuery { attrs: self, key }
+    }
+
+    #[inline]
+    pub fn rust_analyzer_tool(&self) -> impl Iterator {
+        self.iter()
+            .filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer))
+    }
+
+    #[inline]
+    pub fn cfg(&self) -> Option {
+        let mut cfgs = self.by_key(sym::cfg).tt_values().map(CfgExpr::parse);
+        let first = cfgs.next()?;
+        match cfgs.next() {
+            Some(second) => {
+                let cfgs = [first, second].into_iter().chain(cfgs);
+                Some(CfgExpr::All(cfgs.collect()))
+            }
+            None => Some(first),
+        }
+    }
+
+    #[inline]
+    pub fn cfgs(&self) -> impl Iterator + '_ {
+        self.by_key(sym::cfg).tt_values().map(CfgExpr::parse)
+    }
+
+    #[inline]
+    pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Result<(), CfgExpr> {
+        self.cfgs().try_for_each(|cfg| {
+            if cfg_options.check(&cfg) != Some(false) { Ok(()) } else { Err(cfg) }
+        })
+    }
+
+    #[inline]
+    pub fn lang(&self) -> Option<&Symbol> {
+        self.by_key(sym::lang).string_value()
+    }
+
+    #[inline]
+    pub fn lang_item(&self) -> Option {
+        self.by_key(sym::lang).string_value().and_then(LangItem::from_symbol)
+    }
+
+    #[inline]
+    pub fn has_doc_hidden(&self) -> bool {
+        self.by_key(sym::doc).tt_values().any(|tt| {
+            tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
+                matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::hidden)
+        })
+    }
+
+    #[inline]
+    pub fn has_doc_notable_trait(&self) -> bool {
+        self.by_key(sym::doc).tt_values().any(|tt| {
+            tt.top_subtree().delimiter.kind == DelimiterKind::Parenthesis &&
+                matches!(tt.token_trees().flat_tokens(), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::notable_trait)
+        })
+    }
+
+    #[inline]
+    pub fn doc_exprs(&self) -> impl Iterator + '_ {
+        self.by_key(sym::doc).tt_values().map(DocExpr::parse)
+    }
+
+    #[inline]
+    pub fn doc_aliases(&self) -> impl Iterator + '_ {
+        self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
+    }
+
+    #[inline]
+    pub fn export_name(&self) -> Option<&Symbol> {
+        self.by_key(sym::export_name).string_value()
+    }
+
+    #[inline]
+    pub fn is_proc_macro(&self) -> bool {
+        self.by_key(sym::proc_macro).exists()
+    }
+
+    #[inline]
+    pub fn is_proc_macro_attribute(&self) -> bool {
+        self.by_key(sym::proc_macro_attribute).exists()
+    }
+
+    #[inline]
+    pub fn is_proc_macro_derive(&self) -> bool {
+        self.by_key(sym::proc_macro_derive).exists()
+    }
+
+    #[inline]
+    pub fn is_test(&self) -> bool {
+        self.iter().any(|it| {
+            it.path()
+                .segments()
+                .iter()
+                .rev()
+                .zip([sym::core, sym::prelude, sym::v1, sym::test].iter().rev())
+                .all(|it| it.0 == it.1)
+        })
+    }
+
+    #[inline]
+    pub fn is_ignore(&self) -> bool {
+        self.by_key(sym::ignore).exists()
+    }
+
+    #[inline]
+    pub fn is_bench(&self) -> bool {
+        self.by_key(sym::bench).exists()
+    }
+
+    #[inline]
+    pub fn is_unstable(&self) -> bool {
+        self.by_key(sym::unstable).exists()
+    }
+
+    #[inline]
+    pub fn rustc_legacy_const_generics(&self) -> Option>> {
+        self.by_key(sym::rustc_legacy_const_generics)
+            .tt_values()
+            .next()
+            .map(parse_rustc_legacy_const_generics)
+            .filter(|it| !it.is_empty())
+            .map(Box::new)
+    }
+
+    #[inline]
+    pub fn repr(&self) -> Option {
+        self.by_key(sym::repr).tt_values().filter_map(parse_repr_tt).fold(None, |acc, repr| {
+            acc.map_or(Some(repr), |mut acc| {
+                merge_repr(&mut acc, repr);
+                Some(acc)
+            })
+        })
+    }
+}
+
+fn parse_rustc_legacy_const_generics(tt: &crate::tt::TopSubtree) -> Box<[u32]> {
+    let mut indices = Vec::new();
+    let mut iter = tt.iter();
+    while let (Some(first), second) = (iter.next(), iter.next()) {
+        match first {
+            TtElement::Leaf(tt::Leaf::Literal(lit)) => match lit.symbol.as_str().parse() {
+                Ok(index) => indices.push(index),
+                Err(_) => break,
+            },
+            _ => break,
+        }
+
+        if let Some(comma) = second {
+            match comma {
+                TtElement::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
+                _ => break,
+            }
+        }
+    }
+
+    indices.into_boxed_slice()
+}
+
+fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
+    let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
+    flags.insert(other.flags);
+    *align = (*align).max(other.align);
+    *pack = match (*pack, other.pack) {
+        (Some(pack), None) | (None, Some(pack)) => Some(pack),
+        _ => (*pack).min(other.pack),
+    };
+    if other.int.is_some() {
+        *int = other.int;
+    }
+}
+
+fn parse_repr_tt(tt: &crate::tt::TopSubtree) -> Option {
+    use crate::builtin_type::{BuiltinInt, BuiltinUint};
+    use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
+
+    match tt.top_subtree().delimiter {
+        tt::Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
+        _ => return None,
+    }
+
+    let mut acc = ReprOptions::default();
+    let mut tts = tt.iter();
+    while let Some(tt) = tts.next() {
+        let TtElement::Leaf(tt::Leaf::Ident(ident)) = tt else {
+            continue;
+        };
+        let repr = match &ident.sym {
+            s if *s == sym::packed => {
+                let pack = if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
+                    tts.next();
+                    if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next() {
+                        lit.symbol.as_str().parse().unwrap_or_default()
+                    } else {
+                        0
+                    }
+                } else {
+                    0
+                };
+                let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
+                ReprOptions { pack, ..Default::default() }
+            }
+            s if *s == sym::align => {
+                let mut align = None;
+                if let Some(TtElement::Subtree(_, mut tt_iter)) = tts.peek() {
+                    tts.next();
+                    if let Some(TtElement::Leaf(tt::Leaf::Literal(lit))) = tt_iter.next()
+                        && let Ok(a) = lit.symbol.as_str().parse()
+                    {
+                        align = Align::from_bytes(a).ok();
+                    }
+                }
+                ReprOptions { align, ..Default::default() }
+            }
+            s if *s == sym::C => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
+            s if *s == sym::transparent => {
+                ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() }
+            }
+            s if *s == sym::simd => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
+            repr => {
+                let mut int = None;
+                if let Some(builtin) = BuiltinInt::from_suffix_sym(repr)
+                    .map(Either::Left)
+                    .or_else(|| BuiltinUint::from_suffix_sym(repr).map(Either::Right))
+                {
+                    int = Some(match builtin {
+                        Either::Left(bi) => match bi {
+                            BuiltinInt::Isize => IntegerType::Pointer(true),
+                            BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
+                            BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
+                            BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
+                            BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
+                            BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
+                        },
+                        Either::Right(bu) => match bu {
+                            BuiltinUint::Usize => IntegerType::Pointer(false),
+                            BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
+                            BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
+                            BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
+                            BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
+                            BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
+                        },
+                    });
+                }
+                ReprOptions { int, ..Default::default() }
+            }
+        };
+        merge_repr(&mut acc, repr);
+    }
+
+    Some(acc)
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum DocAtom {
+    /// eg. `#[doc(hidden)]`
+    Flag(Symbol),
+    /// eg. `#[doc(alias = "it")]`
+    ///
+    /// Note that a key can have multiple values that are all considered "active" at the same time.
+    /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
+    KeyValue { key: Symbol, value: Symbol },
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum DocExpr {
+    Invalid,
+    /// eg. `#[doc(hidden)]`, `#[doc(alias = "x")]`
+    Atom(DocAtom),
+    /// eg. `#[doc(alias("x", "y"))]`
+    Alias(Vec),
+}
+
+impl From for DocExpr {
+    fn from(atom: DocAtom) -> Self {
+        DocExpr::Atom(atom)
+    }
+}
+
+impl DocExpr {
+    fn parse(tt: &tt::TopSubtree) -> DocExpr {
+        next_doc_expr(tt.iter()).unwrap_or(DocExpr::Invalid)
+    }
+
+    pub fn aliases(&self) -> &[Symbol] {
+        match self {
+            DocExpr::Atom(DocAtom::KeyValue { key, value }) if *key == sym::alias => {
+                std::slice::from_ref(value)
+            }
+            DocExpr::Alias(aliases) => aliases,
+            _ => &[],
+        }
+    }
+}
+
+fn next_doc_expr(mut it: TtIter<'_, S>) -> Option {
+    let name = match it.next() {
+        None => return None,
+        Some(TtElement::Leaf(tt::Leaf::Ident(ident))) => ident.sym.clone(),
+        Some(_) => return Some(DocExpr::Invalid),
+    };
+
+    // Peek
+    let ret = match it.peek() {
+        Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => {
+            it.next();
+            match it.next() {
+                Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
+                    symbol: text,
+                    kind: tt::LitKind::Str,
+                    ..
+                }))) => DocAtom::KeyValue { key: name, value: text.clone() }.into(),
+                _ => return Some(DocExpr::Invalid),
+            }
+        }
+        Some(TtElement::Subtree(_, subtree_iter)) => {
+            it.next();
+            let subs = parse_comma_sep(subtree_iter);
+            match &name {
+                s if *s == sym::alias => DocExpr::Alias(subs),
+                _ => DocExpr::Invalid,
+            }
+        }
+        _ => DocAtom::Flag(name).into(),
+    };
+    Some(ret)
+}
+
+fn parse_comma_sep(iter: TtIter<'_, S>) -> Vec {
+    iter.filter_map(|tt| match tt {
+        TtElement::Leaf(tt::Leaf::Literal(tt::Literal {
+            kind: tt::LitKind::Str, symbol, ..
+        })) => Some(symbol.clone()),
+        _ => None,
+    })
+    .collect()
+}
+
+impl AttrsWithOwner {
+    pub fn new(db: &dyn DefDatabase, owner: AttrDefId) -> Self {
+        Self { attrs: db.attrs(owner), owner }
+    }
+
+    pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
+        let _p = tracing::info_span!("attrs_query").entered();
+        // FIXME: this should use `Trace` to avoid duplication in `source_map` below
+        match def {
+            AttrDefId::ModuleId(module) => {
+                let def_map = module.def_map(db);
+                let mod_data = &def_map[module.local_id];
+
+                let raw_attrs = match mod_data.origin {
+                    ModuleOrigin::File { definition, declaration_tree_id, declaration, .. } => {
+                        let decl_attrs = declaration_tree_id
+                            .item_tree(db)
+                            .raw_attrs(declaration.upcast())
+                            .clone();
+                        let tree = db.file_item_tree(definition.into());
+                        let def_attrs = tree.top_level_raw_attrs().clone();
+                        decl_attrs.merge(def_attrs)
+                    }
+                    ModuleOrigin::CrateRoot { definition } => {
+                        let tree = db.file_item_tree(definition.into());
+                        tree.top_level_raw_attrs().clone()
+                    }
+                    ModuleOrigin::Inline { definition_tree_id, definition } => {
+                        definition_tree_id.item_tree(db).raw_attrs(definition.upcast()).clone()
+                    }
+                    ModuleOrigin::BlockExpr { id, .. } => {
+                        let tree = block_item_tree_query(db, id);
+                        tree.top_level_raw_attrs().clone()
+                    }
+                };
+                Attrs::expand_cfg_attr(db, module.krate, raw_attrs)
+            }
+            AttrDefId::FieldId(it) => db.fields_attrs(it.parent)[it.local_id].clone(),
+            AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::AdtId(it) => match it {
+                AdtId::StructId(it) => attrs_from_ast_id_loc(db, it),
+                AdtId::EnumId(it) => attrs_from_ast_id_loc(db, it),
+                AdtId::UnionId(it) => attrs_from_ast_id_loc(db, it),
+            },
+            AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::MacroId(it) => match it {
+                MacroId::Macro2Id(it) => attrs_from_ast_id_loc(db, it),
+                MacroId::MacroRulesId(it) => attrs_from_ast_id_loc(db, it),
+                MacroId::ProcMacroId(it) => attrs_from_ast_id_loc(db, it),
+            },
+            AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::GenericParamId(it) => match it {
+                GenericParamId::ConstParamId(it) => {
+                    let src = it.parent().child_source(db);
+                    // FIXME: We should be never getting `None` here.
+                    Attrs(match src.value.get(it.local_id()) {
+                        Some(val) => RawAttrs::new_expanded(
+                            db,
+                            val,
+                            db.span_map(src.file_id).as_ref(),
+                            def.krate(db).cfg_options(db),
+                        ),
+                        None => RawAttrs::EMPTY,
+                    })
+                }
+                GenericParamId::TypeParamId(it) => {
+                    let src = it.parent().child_source(db);
+                    // FIXME: We should be never getting `None` here.
+                    Attrs(match src.value.get(it.local_id()) {
+                        Some(val) => RawAttrs::new_expanded(
+                            db,
+                            val,
+                            db.span_map(src.file_id).as_ref(),
+                            def.krate(db).cfg_options(db),
+                        ),
+                        None => RawAttrs::EMPTY,
+                    })
+                }
+                GenericParamId::LifetimeParamId(it) => {
+                    let src = it.parent.child_source(db);
+                    // FIXME: We should be never getting `None` here.
+                    Attrs(match src.value.get(it.local_id) {
+                        Some(val) => RawAttrs::new_expanded(
+                            db,
+                            val,
+                            db.span_map(src.file_id).as_ref(),
+                            def.krate(db).cfg_options(db),
+                        ),
+                        None => RawAttrs::EMPTY,
+                    })
+                }
+            },
+            AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
+            AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
+        }
+    }
+
+    pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
+        let owner = match self.owner {
+            AttrDefId::ModuleId(module) => {
+                // Modules can have 2 attribute owners (the `mod x;` item, and the module file itself).
+
+                let def_map = module.def_map(db);
+                let mod_data = &def_map[module.local_id];
+                match mod_data.declaration_source(db) {
+                    Some(it) => {
+                        let mut map = AttrSourceMap::new(InFile::new(it.file_id, &it.value));
+                        if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
+                            mod_data.definition_source(db)
+                        {
+                            map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
+                                file_id, &file,
+                            )));
+                        }
+                        return map;
+                    }
+                    None => {
+                        let InFile { file_id, value } = mod_data.definition_source(db);
+                        let attrs_owner = match &value {
+                            ModuleSource::SourceFile(file) => file as &dyn ast::HasAttrs,
+                            ModuleSource::Module(module) => module as &dyn ast::HasAttrs,
+                            ModuleSource::BlockExpr(block) => block as &dyn ast::HasAttrs,
+                        };
+                        return AttrSourceMap::new(InFile::new(file_id, attrs_owner));
+                    }
+                }
+            }
+            AttrDefId::FieldId(id) => {
+                let map = db.fields_attrs_source_map(id.parent);
+                let file_id = id.parent.file_id(db);
+                let root = db.parse_or_expand(file_id);
+                let owner = ast::AnyHasAttrs::new(map[id.local_id].to_node(&root));
+                InFile::new(file_id, owner)
+            }
+            AttrDefId::AdtId(adt) => match adt {
+                AdtId::StructId(id) => any_has_attrs(db, id),
+                AdtId::UnionId(id) => any_has_attrs(db, id),
+                AdtId::EnumId(id) => any_has_attrs(db, id),
+            },
+            AttrDefId::FunctionId(id) => any_has_attrs(db, id),
+            AttrDefId::EnumVariantId(id) => any_has_attrs(db, id),
+            AttrDefId::StaticId(id) => any_has_attrs(db, id),
+            AttrDefId::ConstId(id) => any_has_attrs(db, id),
+            AttrDefId::TraitId(id) => any_has_attrs(db, id),
+            AttrDefId::TypeAliasId(id) => any_has_attrs(db, id),
+            AttrDefId::MacroId(id) => match id {
+                MacroId::Macro2Id(id) => any_has_attrs(db, id),
+                MacroId::MacroRulesId(id) => any_has_attrs(db, id),
+                MacroId::ProcMacroId(id) => any_has_attrs(db, id),
+            },
+            AttrDefId::ImplId(id) => any_has_attrs(db, id),
+            AttrDefId::GenericParamId(id) => match id {
+                GenericParamId::ConstParamId(id) => id
+                    .parent()
+                    .child_source(db)
+                    .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
+                GenericParamId::TypeParamId(id) => id
+                    .parent()
+                    .child_source(db)
+                    .map(|source| ast::AnyHasAttrs::new(source[id.local_id()].clone())),
+                GenericParamId::LifetimeParamId(id) => id
+                    .parent
+                    .child_source(db)
+                    .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
+            },
+            AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
+            AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
+            AttrDefId::UseId(id) => any_has_attrs(db, id),
+        };
+
+        AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
+    }
+}
+
+#[derive(Debug)]
+pub struct AttrSourceMap {
+    source: Vec>,
+    file_id: HirFileId,
+    /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
+    /// while `file_id` will be the one of the module declaration site.
+    /// The usize is the index into `source` from which point on the entries reside in the def site
+    /// file.
+    mod_def_site_file_id: Option<(HirFileId, usize)>,
+}
+
+impl AttrSourceMap {
+    fn new(owner: InFile<&dyn ast::HasAttrs>) -> Self {
+        Self {
+            source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
+            file_id: owner.file_id,
+            mod_def_site_file_id: None,
+        }
+    }
+
+    /// Append a second source map to this one, this is required for modules, whose outline and inline
+    /// attributes can reside in different files
+    fn append_module_inline_attrs(&mut self, other: Self) {
+        assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
+        let len = self.source.len();
+        self.source.extend(other.source);
+        if other.file_id != self.file_id {
+            self.mod_def_site_file_id = Some((other.file_id, len));
+        }
+    }
+
+    /// Maps the lowered `Attr` back to its original syntax node.
+    ///
+    /// `attr` must come from the `owner` used for AttrSourceMap
+    ///
+    /// Note that the returned syntax node might be a `#[cfg_attr]`, or a doc comment, instead of
+    /// the attribute represented by `Attr`.
+    pub fn source_of(&self, attr: &Attr) -> InFile<&Either> {
+        self.source_of_id(attr.id)
+    }
+
+    pub fn source_of_id(&self, id: AttrId) -> InFile<&Either> {
+        let ast_idx = id.ast_index();
+        let file_id = match self.mod_def_site_file_id {
+            Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
+            _ => self.file_id,
+        };
+
+        self.source
+            .get(ast_idx)
+            .map(|it| InFile::new(file_id, it))
+            .unwrap_or_else(|| panic!("cannot find attr at index {id:?}"))
+    }
+}
+
+#[derive(Debug, Clone)]
+pub struct AttrQuery<'attr> {
+    attrs: &'attr Attrs,
+    key: Symbol,
+}
+
+impl<'attr> AttrQuery<'attr> {
+    #[inline]
+    pub fn tt_values(self) -> impl Iterator {
+        self.attrs().filter_map(|attr| attr.token_tree_value())
+    }
+
+    #[inline]
+    pub fn string_value(self) -> Option<&'attr Symbol> {
+        self.attrs().find_map(|attr| attr.string_value())
+    }
+
+    #[inline]
+    pub fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
+        self.attrs().find_map(|attr| attr.string_value_with_span())
+    }
+
+    #[inline]
+    pub fn string_value_unescape(self) -> Option> {
+        self.attrs().find_map(|attr| attr.string_value_unescape())
+    }
+
+    #[inline]
+    pub fn exists(self) -> bool {
+        self.attrs().next().is_some()
+    }
+
+    #[inline]
+    pub fn attrs(self) -> impl Iterator + Clone {
+        let key = self.key;
+        self.attrs.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
+    }
+
+    /// Find string value for a specific key inside token tree
+    ///
+    /// ```ignore
+    /// #[doc(html_root_url = "url")]
+    ///       ^^^^^^^^^^^^^ key
+    /// ```
+    #[inline]
+    pub fn find_string_value_in_tt(self, key: Symbol) -> Option<&'attr str> {
+        self.tt_values().find_map(|tt| {
+            let name = tt.iter()
+                .skip_while(|tt| !matches!(tt, TtElement::Leaf(tt::Leaf::Ident(tt::Ident { sym, ..} )) if *sym == key))
+                .nth(2);
+
+            match name {
+                Some(TtElement::Leaf(tt::Leaf::Literal(tt::Literal{  symbol: text, kind: tt::LitKind::Str | tt::LitKind::StrRaw(_) , ..}))) => Some(text.as_str()),
+                _ => None
+            }
+        })
+    }
+}
+
+fn any_has_attrs<'db>(
+    db: &(dyn DefDatabase + 'db),
+    id: impl Lookup>,
+) -> InFile {
+    id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
+}
+
+fn attrs_from_ast_id_loc<'db, N: AstIdNode + HasAttrs>(
+    db: &(dyn DefDatabase + 'db),
+    lookup: impl Lookup + HasModule>,
+) -> Attrs {
+    let loc = lookup.lookup(db);
+    let source = loc.source(db);
+    let span_map = db.span_map(source.file_id);
+    let cfg_options = loc.krate(db).cfg_options(db);
+    Attrs(RawAttrs::new_expanded(db, &source.value, span_map.as_ref(), cfg_options))
+}
+
+pub(crate) fn fields_attrs_source_map(
+    db: &dyn DefDatabase,
+    def: VariantId,
+) -> Arc>>> {
+    let mut res = ArenaMap::default();
+    let child_source = def.child_source(db);
+
+    for (idx, variant) in child_source.value.iter() {
+        res.insert(
+            idx,
+            variant
+                .as_ref()
+                .either(|l| AstPtr::new(l).wrap_left(), |r| AstPtr::new(r).wrap_right()),
+        );
+    }
+
+    Arc::new(res)
+}
+
+#[cfg(test)]
+mod tests {
+    //! This module contains tests for doc-expression parsing.
+    //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
+
+    use intern::Symbol;
+    use span::EditionedFileId;
+    use triomphe::Arc;
+
+    use hir_expand::span_map::{RealSpanMap, SpanMap};
+    use span::FileId;
+    use syntax::{AstNode, TextRange, ast};
+    use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
+
+    use crate::attr::{DocAtom, DocExpr};
+
+    fn assert_parse_result(input: &str, expected: DocExpr) {
+        let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
+        let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+        let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(
+            EditionedFileId::current_edition(FileId::from_raw(0)),
+        )));
+        let tt = syntax_node_to_token_tree(
+            tt.syntax(),
+            map.as_ref(),
+            map.span_for_range(TextRange::empty(0.into())),
+            DocCommentDesugarMode::ProcMacro,
+        );
+        let cfg = DocExpr::parse(&tt);
+        assert_eq!(cfg, expected);
+    }
+
+    #[test]
+    fn test_doc_expr_parser() {
+        assert_parse_result("#![doc(hidden)]", DocAtom::Flag(Symbol::intern("hidden")).into());
+
+        assert_parse_result(
+            r#"#![doc(alias = "foo")]"#,
+            DocAtom::KeyValue { key: Symbol::intern("alias"), value: Symbol::intern("foo") }.into(),
+        );
+
+        assert_parse_result(
+            r#"#![doc(alias("foo"))]"#,
+            DocExpr::Alias([Symbol::intern("foo")].into()),
+        );
+        assert_parse_result(
+            r#"#![doc(alias("foo", "bar", "baz"))]"#,
+            DocExpr::Alias(
+                [Symbol::intern("foo"), Symbol::intern("bar"), Symbol::intern("baz")].into(),
+            ),
+        );
+
+        assert_parse_result(
+            r#"
+        #[doc(alias("Bar", "Qux"))]
+        struct Foo;"#,
+            DocExpr::Alias([Symbol::intern("Bar"), Symbol::intern("Qux")].into()),
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs
deleted file mode 100644
index 1897cb5205aaa..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/attrs.rs
+++ /dev/null
@@ -1,1613 +0,0 @@
-//! Attributes for anything that is not name resolution.
-//!
-//! The fundamental idea of this module stems from the observation that most "interesting"
-//! attributes have a more memory-compact form than storing their full syntax, and
-//! that most of the attributes are flags, and those that are not are rare. Therefore,
-//! this module defines [`AttrFlags`], which is a bitflag enum that contains only a yes/no
-//! answer to whether an attribute is present on an item. For most attributes, that's all
-//! that is interesting us; for the rest of them, we define another query that extracts
-//! their data. A key part is that every one of those queries will have a wrapper method
-//! that queries (or is given) the `AttrFlags` and checks for the presence of the attribute;
-//! if it is not present, we do not call the query, to prevent Salsa from needing to record
-//! its value. This way, queries are only called on items that have the attribute, which is
-//! usually only a few.
-//!
-//! An exception to this model that is also defined in this module is documentation (doc
-//! comments and `#[doc = "..."]` attributes). But it also has a more compact form than
-//! the attribute: a concatenated string of the full docs as well as a source map
-//! to map it back to AST (which is needed for things like resolving links in doc comments
-//! and highlight injection). The lowering and upmapping of doc comments is a bit complicated,
-//! but it is encapsulated in the [`Docs`] struct.
-
-use std::{
-    convert::Infallible,
-    iter::Peekable,
-    ops::{ControlFlow, Range},
-};
-
-use base_db::Crate;
-use cfg::{CfgExpr, CfgOptions};
-use either::Either;
-use hir_expand::{
-    HirFileId, InFile, Lookup,
-    attrs::{Meta, expand_cfg_attr, expand_cfg_attr_with_doc_comments},
-};
-use intern::Symbol;
-use itertools::Itertools;
-use la_arena::ArenaMap;
-use rustc_abi::ReprOptions;
-use rustc_hash::FxHashSet;
-use smallvec::SmallVec;
-use syntax::{
-    AstNode, AstToken, NodeOrToken, SmolStr, SyntaxNode, SyntaxToken, T,
-    ast::{self, AttrDocCommentIter, HasAttrs, IsString, TokenTreeChildren},
-};
-use tt::{TextRange, TextSize};
-
-use crate::{
-    AdtId, AstIdLoc, AttrDefId, FieldId, FunctionId, GenericDefId, HasModule, InternedModuleId,
-    LifetimeParamId, LocalFieldId, MacroId, TypeOrConstParamId, VariantId,
-    db::DefDatabase,
-    hir::generics::{GenericParams, LocalLifetimeParamId, LocalTypeOrConstParamId},
-    lang_item::LangItem,
-    nameres::ModuleOrigin,
-    src::{HasChildSource, HasSource},
-};
-
-#[inline]
-fn attrs_from_ast_id_loc>(
-    db: &dyn DefDatabase,
-    lookup: impl Lookup + HasModule>,
-) -> (InFile, Crate) {
-    let loc = lookup.lookup(db);
-    let source = loc.source(db);
-    let krate = loc.krate(db);
-    (source.map(|it| it.into()), krate)
-}
-
-#[inline]
-fn extract_doc_tt_attr(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
-    for atom in DocAtom::parse(tt) {
-        match atom {
-            DocAtom::Flag(flag) => match &*flag {
-                "notable_trait" => attr_flags.insert(AttrFlags::IS_DOC_NOTABLE_TRAIT),
-                "hidden" => attr_flags.insert(AttrFlags::IS_DOC_HIDDEN),
-                _ => {}
-            },
-            DocAtom::KeyValue { key, value: _ } => match &*key {
-                "alias" => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES),
-                "keyword" => attr_flags.insert(AttrFlags::HAS_DOC_KEYWORD),
-                _ => {}
-            },
-            DocAtom::Alias(_) => attr_flags.insert(AttrFlags::HAS_DOC_ALIASES),
-        }
-    }
-}
-
-fn extract_ra_completions(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
-    let tt = TokenTreeChildren::new(&tt);
-    if let Ok(NodeOrToken::Token(option)) = tt.exactly_one()
-        && option.kind().is_any_identifier()
-    {
-        match option.text() {
-            "ignore_flyimport" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT),
-            "ignore_methods" => attr_flags.insert(AttrFlags::COMPLETE_IGNORE_METHODS),
-            "ignore_flyimport_methods" => {
-                attr_flags.insert(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS)
-            }
-            _ => {}
-        }
-    }
-}
-
-fn extract_rustc_skip_during_method_dispatch(attr_flags: &mut AttrFlags, tt: ast::TokenTree) {
-    let iter = TokenTreeChildren::new(&tt);
-    for kind in iter {
-        if let NodeOrToken::Token(kind) = kind
-            && kind.kind().is_any_identifier()
-        {
-            match kind.text() {
-                "array" => attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH),
-                "boxed_slice" => {
-                    attr_flags.insert(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH)
-                }
-                _ => {}
-            }
-        }
-    }
-}
-
-#[inline]
-fn match_attr_flags(attr_flags: &mut AttrFlags, attr: Meta) -> ControlFlow {
-    match attr {
-        Meta::NamedKeyValue { name: Some(name), value, .. } => match name.text() {
-            "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
-            "lang" => attr_flags.insert(AttrFlags::LANG_ITEM),
-            "path" => attr_flags.insert(AttrFlags::HAS_PATH),
-            "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
-            "export_name" => {
-                if let Some(value) = value
-                    && let Some(value) = ast::String::cast(value)
-                    && let Ok(value) = value.value()
-                    && *value == *"main"
-                {
-                    attr_flags.insert(AttrFlags::IS_EXPORT_NAME_MAIN);
-                }
-            }
-            _ => {}
-        },
-        Meta::TokenTree { path, tt } => match path.segments.len() {
-            1 => match path.segments[0].text() {
-                "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
-                "cfg" => attr_flags.insert(AttrFlags::HAS_CFG),
-                "doc" => extract_doc_tt_attr(attr_flags, tt),
-                "repr" => attr_flags.insert(AttrFlags::HAS_REPR),
-                "target_feature" => attr_flags.insert(AttrFlags::HAS_TARGET_FEATURE),
-                "proc_macro_derive" | "rustc_builtin_macro" => {
-                    attr_flags.insert(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO)
-                }
-                "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
-                "rustc_layout_scalar_valid_range_start" | "rustc_layout_scalar_valid_range_end" => {
-                    attr_flags.insert(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
-                }
-                "rustc_legacy_const_generics" => {
-                    attr_flags.insert(AttrFlags::HAS_LEGACY_CONST_GENERICS)
-                }
-                "rustc_skip_during_method_dispatch" => {
-                    extract_rustc_skip_during_method_dispatch(attr_flags, tt)
-                }
-                _ => {}
-            },
-            2 => match path.segments[0].text() {
-                "rust_analyzer" => match path.segments[1].text() {
-                    "completions" => extract_ra_completions(attr_flags, tt),
-                    _ => {}
-                },
-                _ => {}
-            },
-            _ => {}
-        },
-        Meta::Path { path } => {
-            match path.segments.len() {
-                1 => match path.segments[0].text() {
-                    "rustc_has_incoherent_inherent_impls" => {
-                        attr_flags.insert(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
-                    }
-                    "rustc_allow_incoherent_impl" => {
-                        attr_flags.insert(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
-                    }
-                    "fundamental" => attr_flags.insert(AttrFlags::FUNDAMENTAL),
-                    "no_std" => attr_flags.insert(AttrFlags::IS_NO_STD),
-                    "may_dangle" => attr_flags.insert(AttrFlags::MAY_DANGLE),
-                    "rustc_paren_sugar" => attr_flags.insert(AttrFlags::RUSTC_PAREN_SUGAR),
-                    "rustc_coinductive" => attr_flags.insert(AttrFlags::RUSTC_COINDUCTIVE),
-                    "rustc_force_inline" => attr_flags.insert(AttrFlags::RUSTC_FORCE_INLINE),
-                    "unstable" => attr_flags.insert(AttrFlags::IS_UNSTABLE),
-                    "deprecated" => attr_flags.insert(AttrFlags::IS_DEPRECATED),
-                    "macro_export" => attr_flags.insert(AttrFlags::IS_MACRO_EXPORT),
-                    "no_mangle" => attr_flags.insert(AttrFlags::NO_MANGLE),
-                    "non_exhaustive" => attr_flags.insert(AttrFlags::NON_EXHAUSTIVE),
-                    "ignore" => attr_flags.insert(AttrFlags::IS_IGNORE),
-                    "bench" => attr_flags.insert(AttrFlags::IS_BENCH),
-                    "rustc_const_panic_str" => attr_flags.insert(AttrFlags::RUSTC_CONST_PANIC_STR),
-                    "rustc_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_INTRINSIC),
-                    "rustc_safe_intrinsic" => attr_flags.insert(AttrFlags::RUSTC_SAFE_INTRINSIC),
-                    "rustc_intrinsic_must_be_overridden" => {
-                        attr_flags.insert(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN)
-                    }
-                    "rustc_allocator" => attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR),
-                    "rustc_deallocator" => attr_flags.insert(AttrFlags::RUSTC_DEALLOCATOR),
-                    "rustc_reallocator" => attr_flags.insert(AttrFlags::RUSTC_REALLOCATOR),
-                    "rustc_allocator_zeroed" => {
-                        attr_flags.insert(AttrFlags::RUSTC_ALLOCATOR_ZEROED)
-                    }
-                    "rustc_reservation_impl" => {
-                        attr_flags.insert(AttrFlags::RUSTC_RESERVATION_IMPL)
-                    }
-                    "rustc_deprecated_safe_2024" => {
-                        attr_flags.insert(AttrFlags::RUSTC_DEPRECATED_SAFE_2024)
-                    }
-                    "rustc_skip_array_during_method_dispatch" => {
-                        attr_flags.insert(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH)
-                    }
-                    _ => {}
-                },
-                2 => match path.segments[0].text() {
-                    "rust_analyzer" => match path.segments[1].text() {
-                        "skip" => attr_flags.insert(AttrFlags::RUST_ANALYZER_SKIP),
-                        _ => {}
-                    },
-                    _ => {}
-                },
-                _ => {}
-            }
-
-            if path.is_test {
-                attr_flags.insert(AttrFlags::IS_TEST);
-            }
-        }
-        _ => {}
-    };
-    ControlFlow::Continue(())
-}
-
-bitflags::bitflags! {
-    #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-    pub struct AttrFlags: u64 {
-        const RUST_ANALYZER_SKIP = 1 << 0;
-
-        const LANG_ITEM = 1 << 1;
-
-        const HAS_DOC_ALIASES = 1 << 2;
-        const HAS_DOC_KEYWORD = 1 << 3;
-        const IS_DOC_NOTABLE_TRAIT = 1 << 4;
-        const IS_DOC_HIDDEN = 1 << 5;
-
-        const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 6;
-        const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 7;
-        const FUNDAMENTAL = 1 << 8;
-        const RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 9;
-        const RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 10;
-        const HAS_REPR = 1 << 11;
-        const HAS_TARGET_FEATURE = 1 << 12;
-        const RUSTC_DEPRECATED_SAFE_2024 = 1 << 13;
-        const HAS_LEGACY_CONST_GENERICS = 1 << 14;
-        const NO_MANGLE = 1 << 15;
-        const NON_EXHAUSTIVE = 1 << 16;
-        const RUSTC_RESERVATION_IMPL = 1 << 17;
-        const RUSTC_CONST_PANIC_STR = 1 << 18;
-        const MAY_DANGLE = 1 << 19;
-
-        const RUSTC_INTRINSIC = 1 << 20;
-        const RUSTC_SAFE_INTRINSIC = 1 << 21;
-        const RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN = 1 << 22;
-        const RUSTC_ALLOCATOR = 1 << 23;
-        const RUSTC_DEALLOCATOR = 1 << 24;
-        const RUSTC_REALLOCATOR = 1 << 25;
-        const RUSTC_ALLOCATOR_ZEROED = 1 << 26;
-
-        const IS_UNSTABLE = 1 << 27;
-        const IS_IGNORE = 1 << 28;
-        // FIXME: `IS_TEST` and `IS_BENCH` should be based on semantic information, not textual match.
-        const IS_BENCH = 1 << 29;
-        const IS_TEST = 1 << 30;
-        const IS_EXPORT_NAME_MAIN = 1 << 31;
-        const IS_MACRO_EXPORT = 1 << 32;
-        const IS_NO_STD = 1 << 33;
-        const IS_DERIVE_OR_BUILTIN_MACRO = 1 << 34;
-        const IS_DEPRECATED = 1 << 35;
-        const HAS_PATH = 1 << 36;
-        const HAS_CFG = 1 << 37;
-
-        const COMPLETE_IGNORE_FLYIMPORT = 1 << 38;
-        const COMPLETE_IGNORE_FLYIMPORT_METHODS = 1 << 39;
-        const COMPLETE_IGNORE_METHODS = 1 << 40;
-
-        const RUSTC_LAYOUT_SCALAR_VALID_RANGE = 1 << 41;
-        const RUSTC_PAREN_SUGAR = 1 << 42;
-        const RUSTC_COINDUCTIVE = 1 << 43;
-        const RUSTC_FORCE_INLINE = 1 << 44;
-    }
-}
-
-fn attrs_source(
-    db: &dyn DefDatabase,
-    owner: AttrDefId,
-) -> (InFile, Option>, Crate) {
-    let (owner, krate) = match owner {
-        AttrDefId::ModuleId(id) => {
-            let id = id.loc(db);
-            let def_map = id.def_map(db);
-            let (definition, declaration) = match def_map[id.local_id].origin {
-                ModuleOrigin::CrateRoot { definition } => {
-                    let file = db.parse(definition).tree();
-                    (InFile::new(definition.into(), ast::AnyHasAttrs::from(file)), None)
-                }
-                ModuleOrigin::File { declaration, declaration_tree_id, definition, .. } => {
-                    let declaration = InFile::new(declaration_tree_id.file_id(), declaration);
-                    let declaration = declaration.with_value(declaration.to_node(db));
-                    let definition_source = db.parse(definition).tree();
-                    (InFile::new(definition.into(), definition_source.into()), Some(declaration))
-                }
-                ModuleOrigin::Inline { definition_tree_id, definition } => {
-                    let definition = InFile::new(definition_tree_id.file_id(), definition);
-                    let definition = definition.with_value(definition.to_node(db).into());
-                    (definition, None)
-                }
-                ModuleOrigin::BlockExpr { block, .. } => {
-                    let definition = block.to_node(db);
-                    (block.with_value(definition.into()), None)
-                }
-            };
-            return (definition, declaration, id.krate);
-        }
-        AttrDefId::AdtId(AdtId::StructId(it)) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::AdtId(AdtId::UnionId(it)) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::AdtId(AdtId::EnumId(it)) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::FunctionId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::StaticId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::ConstId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::TraitId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::TypeAliasId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::MacroId(MacroId::MacroRulesId(it)) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::MacroId(MacroId::Macro2Id(it)) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::MacroId(MacroId::ProcMacroId(it)) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::ImplId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::ExternBlockId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::ExternCrateId(it) => attrs_from_ast_id_loc(db, it),
-        AttrDefId::UseId(it) => attrs_from_ast_id_loc(db, it),
-    };
-    (owner, None, krate)
-}
-
-fn collect_attrs(
-    db: &dyn DefDatabase,
-    owner: AttrDefId,
-    mut callback: impl FnMut(Meta) -> ControlFlow,
-) -> Option {
-    let (source, outer_mod_decl, krate) = attrs_source(db, owner);
-
-    let mut cfg_options = None;
-    expand_cfg_attr(
-        outer_mod_decl
-            .into_iter()
-            .flat_map(|it| it.value.attrs())
-            .chain(ast::attrs_including_inner(&source.value)),
-        || cfg_options.get_or_insert_with(|| krate.cfg_options(db)),
-        move |meta, _, _, _| callback(meta),
-    )
-}
-
-fn collect_field_attrs(
-    db: &dyn DefDatabase,
-    variant: VariantId,
-    mut field_attrs: impl FnMut(&CfgOptions, InFile) -> T,
-) -> ArenaMap {
-    let (variant_syntax, krate) = match variant {
-        VariantId::EnumVariantId(it) => attrs_from_ast_id_loc(db, it),
-        VariantId::StructId(it) => attrs_from_ast_id_loc(db, it),
-        VariantId::UnionId(it) => attrs_from_ast_id_loc(db, it),
-    };
-    let cfg_options = krate.cfg_options(db);
-    let variant_syntax = variant_syntax
-        .with_value(ast::VariantDef::cast(variant_syntax.value.syntax().clone()).unwrap());
-    let fields = match &variant_syntax.value {
-        ast::VariantDef::Struct(it) => it.field_list(),
-        ast::VariantDef::Union(it) => it.record_field_list().map(ast::FieldList::RecordFieldList),
-        ast::VariantDef::Variant(it) => it.field_list(),
-    };
-    let Some(fields) = fields else {
-        return ArenaMap::new();
-    };
-
-    let mut result = ArenaMap::new();
-    let mut idx = 0;
-    match fields {
-        ast::FieldList::RecordFieldList(fields) => {
-            for field in fields.fields() {
-                if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() {
-                    result.insert(
-                        la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)),
-                        field_attrs(cfg_options, variant_syntax.with_value(field.into())),
-                    );
-                    idx += 1;
-                }
-            }
-        }
-        ast::FieldList::TupleFieldList(fields) => {
-            for field in fields.fields() {
-                if AttrFlags::is_cfg_enabled_for(&field, cfg_options).is_ok() {
-                    result.insert(
-                        la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx)),
-                        field_attrs(cfg_options, variant_syntax.with_value(field.into())),
-                    );
-                    idx += 1;
-                }
-            }
-        }
-    }
-    result.shrink_to_fit();
-    result
-}
-
-#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct RustcLayoutScalarValidRange {
-    pub start: Option,
-    pub end: Option,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-struct DocsSourceMapLine {
-    /// The offset in [`Docs::docs`].
-    string_offset: TextSize,
-    /// The offset in the AST of the text.
-    ast_offset: TextSize,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Docs {
-    /// The concatenated string of all `#[doc = "..."]` attributes and documentation comments.
-    docs: String,
-    /// A sorted map from an offset in `docs` to an offset in the source code.
-    docs_source_map: Vec,
-    /// If the item is an outlined module (`mod foo;`), `docs_source_map` store the concatenated
-    /// list of the outline and inline docs (outline first). Then, this field contains the [`HirFileId`]
-    /// of the outline declaration, and the index in `docs` from which the inline docs
-    /// begin.
-    outline_mod: Option<(HirFileId, usize)>,
-    inline_file: HirFileId,
-    /// The size the prepended prefix, which does not map to real doc comments.
-    prefix_len: TextSize,
-    /// The offset in `docs` from which the docs are inner attributes/comments.
-    inline_inner_docs_start: Option,
-    /// Like `inline_inner_docs_start`, but for `outline_mod`. This can happen only when merging `Docs`
-    /// (as outline modules don't have inner attributes).
-    outline_inner_docs_start: Option,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub enum IsInnerDoc {
-    No,
-    Yes,
-}
-
-impl IsInnerDoc {
-    #[inline]
-    pub fn yes(self) -> bool {
-        self == IsInnerDoc::Yes
-    }
-}
-
-impl Docs {
-    #[inline]
-    pub fn docs(&self) -> &str {
-        &self.docs
-    }
-
-    #[inline]
-    pub fn into_docs(self) -> String {
-        self.docs
-    }
-
-    pub fn find_ast_range(
-        &self,
-        mut string_range: TextRange,
-    ) -> Option<(InFile, IsInnerDoc)> {
-        if string_range.start() < self.prefix_len {
-            return None;
-        }
-        string_range -= self.prefix_len;
-
-        let mut file = self.inline_file;
-        let mut inner_docs_start = self.inline_inner_docs_start;
-        // Check whether the range is from the outline, the inline, or both.
-        let source_map = if let Some((outline_mod_file, outline_mod_end)) = self.outline_mod {
-            if let Some(first_inline) = self.docs_source_map.get(outline_mod_end) {
-                if string_range.end() <= first_inline.string_offset {
-                    // The range is completely in the outline.
-                    file = outline_mod_file;
-                    inner_docs_start = self.outline_inner_docs_start;
-                    &self.docs_source_map[..outline_mod_end]
-                } else if string_range.start() >= first_inline.string_offset {
-                    // The range is completely in the inline.
-                    &self.docs_source_map[outline_mod_end..]
-                } else {
-                    // The range is combined from the outline and the inline - cannot map it back.
-                    return None;
-                }
-            } else {
-                // There is no inline.
-                file = outline_mod_file;
-                inner_docs_start = self.outline_inner_docs_start;
-                &self.docs_source_map
-            }
-        } else {
-            // There is no outline.
-            &self.docs_source_map
-        };
-
-        let after_range =
-            source_map.partition_point(|line| line.string_offset <= string_range.start()) - 1;
-        let after_range = &source_map[after_range..];
-        let line = after_range.first()?;
-        if after_range.get(1).is_some_and(|next_line| next_line.string_offset < string_range.end())
-        {
-            // The range is combined from two lines - cannot map it back.
-            return None;
-        }
-        let ast_range = string_range - line.string_offset + line.ast_offset;
-        let is_inner = if inner_docs_start
-            .is_some_and(|inner_docs_start| string_range.start() >= inner_docs_start)
-        {
-            IsInnerDoc::Yes
-        } else {
-            IsInnerDoc::No
-        };
-        Some((InFile::new(file, ast_range), is_inner))
-    }
-
-    #[inline]
-    pub fn shift_by(&mut self, offset: TextSize) {
-        self.prefix_len += offset;
-    }
-
-    pub fn prepend_str(&mut self, s: &str) {
-        self.prefix_len += TextSize::of(s);
-        self.docs.insert_str(0, s);
-    }
-
-    pub fn append_str(&mut self, s: &str) {
-        self.docs.push_str(s);
-    }
-
-    pub fn append(&mut self, other: &Docs) {
-        let other_offset = TextSize::of(&self.docs);
-
-        assert!(
-            self.outline_mod.is_none() && other.outline_mod.is_none(),
-            "cannot merge `Docs` that have `outline_mod` set"
-        );
-        self.outline_mod = Some((self.inline_file, self.docs_source_map.len()));
-        self.inline_file = other.inline_file;
-        self.outline_inner_docs_start = self.inline_inner_docs_start;
-        self.inline_inner_docs_start = other.inline_inner_docs_start.map(|it| it + other_offset);
-
-        self.docs.push_str(&other.docs);
-        self.docs_source_map.extend(other.docs_source_map.iter().map(
-            |&DocsSourceMapLine { string_offset, ast_offset }| DocsSourceMapLine {
-                ast_offset,
-                string_offset: string_offset + other_offset,
-            },
-        ));
-    }
-
-    fn extend_with_doc_comment(&mut self, comment: ast::Comment, indent: &mut usize) {
-        let Some((doc, offset)) = comment.doc_comment() else { return };
-        self.extend_with_doc_str(doc, comment.syntax().text_range().start() + offset, indent);
-    }
-
-    fn extend_with_doc_attr(&mut self, value: SyntaxToken, indent: &mut usize) {
-        let Some(value) = ast::String::cast(value) else { return };
-        let Some(value_offset) = value.text_range_between_quotes() else { return };
-        let value_offset = value_offset.start();
-        let Ok(value) = value.value() else { return };
-        // FIXME: Handle source maps for escaped text.
-        self.extend_with_doc_str(&value, value_offset, indent);
-    }
-
-    fn extend_with_doc_str(&mut self, doc: &str, mut offset_in_ast: TextSize, indent: &mut usize) {
-        for line in doc.split('\n') {
-            self.docs_source_map.push(DocsSourceMapLine {
-                string_offset: TextSize::of(&self.docs),
-                ast_offset: offset_in_ast,
-            });
-            offset_in_ast += TextSize::of(line) + TextSize::of("\n");
-
-            let line = line.trim_end();
-            if let Some(line_indent) = line.chars().position(|ch| !ch.is_whitespace()) {
-                // Empty lines are handled because `position()` returns `None` for them.
-                *indent = std::cmp::min(*indent, line_indent);
-            }
-            self.docs.push_str(line);
-            self.docs.push('\n');
-        }
-    }
-
-    fn remove_indent(&mut self, indent: usize, start_source_map_index: usize) {
-        /// In case of panics, we want to avoid corrupted UTF-8 in `self.docs`, so we clear it.
-        struct Guard<'a>(&'a mut Docs);
-        impl Drop for Guard<'_> {
-            fn drop(&mut self) {
-                let Docs {
-                    docs,
-                    docs_source_map,
-                    outline_mod,
-                    inline_file: _,
-                    prefix_len: _,
-                    inline_inner_docs_start: _,
-                    outline_inner_docs_start: _,
-                } = self.0;
-                // Don't use `String::clear()` here because it's not guaranteed to not do UTF-8-dependent things,
-                // and we may have temporarily broken the string's encoding.
-                unsafe { docs.as_mut_vec() }.clear();
-                // This is just to avoid panics down the road.
-                docs_source_map.clear();
-                *outline_mod = None;
-            }
-        }
-
-        if self.docs.is_empty() {
-            return;
-        }
-
-        let guard = Guard(self);
-        let source_map = &mut guard.0.docs_source_map[start_source_map_index..];
-        let Some(&DocsSourceMapLine { string_offset: mut copy_into, .. }) = source_map.first()
-        else {
-            return;
-        };
-        // We basically want to remove multiple ranges from a string. Doing this efficiently (without O(N^2)
-        // or allocations) requires unsafe. Basically, for each line, we copy the line minus the indent into
-        // consecutive to the previous line (which may have moved). Then at the end we truncate.
-        let mut accumulated_offset = TextSize::new(0);
-        for idx in 0..source_map.len() {
-            let string_end_offset = source_map
-                .get(idx + 1)
-                .map_or_else(|| TextSize::of(&guard.0.docs), |next_attr| next_attr.string_offset);
-            let line_source = &mut source_map[idx];
-            let line_docs =
-                &guard.0.docs[TextRange::new(line_source.string_offset, string_end_offset)];
-            let line_docs_len = TextSize::of(line_docs);
-            let indent_size = line_docs.char_indices().nth(indent).map_or_else(
-                || TextSize::of(line_docs) - TextSize::of("\n"),
-                |(offset, _)| TextSize::new(offset as u32),
-            );
-            unsafe { guard.0.docs.as_bytes_mut() }.copy_within(
-                Range::::from(TextRange::new(
-                    line_source.string_offset + indent_size,
-                    string_end_offset,
-                )),
-                copy_into.into(),
-            );
-            copy_into += line_docs_len - indent_size;
-
-            if let Some(inner_attrs_start) = &mut guard.0.inline_inner_docs_start
-                && *inner_attrs_start == line_source.string_offset
-            {
-                *inner_attrs_start -= accumulated_offset;
-            }
-            // The removals in the string accumulate, but in the AST not, because it already points
-            // to the beginning of each attribute.
-            // Also, we need to shift the AST offset of every line, but the string offset of the first
-            // line should not get shifted (in general, the shift for the string offset is by the
-            // number of lines until the current one, excluding the current one).
-            line_source.string_offset -= accumulated_offset;
-            line_source.ast_offset += indent_size;
-
-            accumulated_offset += indent_size;
-        }
-        // Don't use `String::truncate()` here because it's not guaranteed to not do UTF-8-dependent things,
-        // and we may have temporarily broken the string's encoding.
-        unsafe { guard.0.docs.as_mut_vec() }.truncate(copy_into.into());
-
-        std::mem::forget(guard);
-    }
-
-    fn remove_last_newline(&mut self) {
-        self.docs.truncate(self.docs.len().saturating_sub(1));
-    }
-
-    fn shrink_to_fit(&mut self) {
-        let Docs {
-            docs,
-            docs_source_map,
-            outline_mod: _,
-            inline_file: _,
-            prefix_len: _,
-            inline_inner_docs_start: _,
-            outline_inner_docs_start: _,
-        } = self;
-        docs.shrink_to_fit();
-        docs_source_map.shrink_to_fit();
-    }
-}
-
-#[derive(Debug, PartialEq, Eq, Hash)]
-pub struct DeriveInfo {
-    pub trait_name: Symbol,
-    pub helpers: Box<[Symbol]>,
-}
-
-fn extract_doc_aliases(result: &mut Vec, attr: Meta) -> ControlFlow {
-    if let Meta::TokenTree { path, tt } = attr
-        && path.is1("doc")
-    {
-        for atom in DocAtom::parse(tt) {
-            match atom {
-                DocAtom::Alias(aliases) => {
-                    result.extend(aliases.into_iter().map(|alias| Symbol::intern(&alias)))
-                }
-                DocAtom::KeyValue { key, value } if key == "alias" => {
-                    result.push(Symbol::intern(&value))
-                }
-                _ => {}
-            }
-        }
-    }
-    ControlFlow::Continue(())
-}
-
-fn extract_cfgs(result: &mut Vec, attr: Meta) -> ControlFlow {
-    if let Meta::TokenTree { path, tt } = attr
-        && path.is1("cfg")
-    {
-        result.push(CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable()));
-    }
-    ControlFlow::Continue(())
-}
-
-fn extract_docs<'a>(
-    get_cfg_options: &dyn Fn() -> &'a CfgOptions,
-    source: InFile,
-    outer_mod_decl: Option>,
-    inner_attrs_node: Option,
-) -> Option> {
-    let mut result = Docs {
-        docs: String::new(),
-        docs_source_map: Vec::new(),
-        outline_mod: None,
-        inline_file: source.file_id,
-        prefix_len: TextSize::new(0),
-        inline_inner_docs_start: None,
-        outline_inner_docs_start: None,
-    };
-
-    let mut cfg_options = None;
-    let mut extend_with_attrs =
-        |result: &mut Docs, node: &SyntaxNode, expect_inner_attrs, indent: &mut usize| {
-            expand_cfg_attr_with_doc_comments::<_, Infallible>(
-                AttrDocCommentIter::from_syntax_node(node).filter(|attr| match attr {
-                    Either::Left(attr) => attr.kind().is_inner() == expect_inner_attrs,
-                    Either::Right(comment) => comment.kind().doc.is_some_and(|kind| {
-                        (kind == ast::CommentPlacement::Inner) == expect_inner_attrs
-                    }),
-                }),
-                || cfg_options.get_or_insert_with(get_cfg_options),
-                |attr| {
-                    match attr {
-                        Either::Right(doc_comment) => {
-                            result.extend_with_doc_comment(doc_comment, indent)
-                        }
-                        Either::Left((attr, _, _, _)) => match attr {
-                            // FIXME: Handle macros: `#[doc = concat!("foo", "bar")]`.
-                            Meta::NamedKeyValue {
-                                name: Some(name), value: Some(value), ..
-                            } if name.text() == "doc" => {
-                                result.extend_with_doc_attr(value, indent);
-                            }
-                            _ => {}
-                        },
-                    }
-                    ControlFlow::Continue(())
-                },
-            );
-        };
-
-    if let Some(outer_mod_decl) = outer_mod_decl {
-        let mut indent = usize::MAX;
-        extend_with_attrs(&mut result, outer_mod_decl.value.syntax(), false, &mut indent);
-        result.remove_indent(indent, 0);
-        result.outline_mod = Some((outer_mod_decl.file_id, result.docs_source_map.len()));
-    }
-
-    let inline_source_map_start = result.docs_source_map.len();
-    let mut indent = usize::MAX;
-    extend_with_attrs(&mut result, source.value.syntax(), false, &mut indent);
-    if let Some(inner_attrs_node) = &inner_attrs_node {
-        result.inline_inner_docs_start = Some(TextSize::of(&result.docs));
-        extend_with_attrs(&mut result, inner_attrs_node, true, &mut indent);
-    }
-    result.remove_indent(indent, inline_source_map_start);
-
-    result.remove_last_newline();
-
-    result.shrink_to_fit();
-
-    if result.docs.is_empty() { None } else { Some(Box::new(result)) }
-}
-
-#[salsa::tracked]
-impl AttrFlags {
-    #[salsa::tracked]
-    pub fn query(db: &dyn DefDatabase, owner: AttrDefId) -> AttrFlags {
-        let mut attr_flags = AttrFlags::empty();
-        collect_attrs(db, owner, |attr| match_attr_flags(&mut attr_flags, attr));
-        attr_flags
-    }
-
-    #[inline]
-    pub fn query_field(db: &dyn DefDatabase, field: FieldId) -> AttrFlags {
-        return field_attr_flags(db, field.parent)
-            .get(field.local_id)
-            .copied()
-            .unwrap_or_else(AttrFlags::empty);
-
-        #[salsa::tracked(returns(ref))]
-        fn field_attr_flags(
-            db: &dyn DefDatabase,
-            variant: VariantId,
-        ) -> ArenaMap {
-            collect_field_attrs(db, variant, |cfg_options, field| {
-                let mut attr_flags = AttrFlags::empty();
-                expand_cfg_attr(
-                    field.value.attrs(),
-                    || cfg_options,
-                    |attr, _, _, _| match_attr_flags(&mut attr_flags, attr),
-                );
-                attr_flags
-            })
-        }
-    }
-
-    #[inline]
-    pub fn query_generic_params(
-        db: &dyn DefDatabase,
-        def: GenericDefId,
-    ) -> &(ArenaMap, ArenaMap)
-    {
-        let generic_params = GenericParams::new(db, def);
-        let params_count_excluding_self =
-            generic_params.len() - usize::from(generic_params.trait_self_param().is_some());
-        if params_count_excluding_self == 0 {
-            return const { &(ArenaMap::new(), ArenaMap::new()) };
-        }
-        return generic_params_attr_flags(db, def);
-
-        #[salsa::tracked(returns(ref))]
-        fn generic_params_attr_flags(
-            db: &dyn DefDatabase,
-            def: GenericDefId,
-        ) -> (ArenaMap, ArenaMap)
-        {
-            let mut lifetimes = ArenaMap::new();
-            let mut type_and_consts = ArenaMap::new();
-
-            let mut cfg_options = None;
-            let mut cfg_options =
-                || *cfg_options.get_or_insert_with(|| def.krate(db).cfg_options(db));
-
-            let lifetimes_source = HasChildSource::::child_source(&def, db);
-            for (lifetime_id, lifetime) in lifetimes_source.value.iter() {
-                let mut attr_flags = AttrFlags::empty();
-                expand_cfg_attr(lifetime.attrs(), &mut cfg_options, |attr, _, _, _| {
-                    match_attr_flags(&mut attr_flags, attr)
-                });
-                if !attr_flags.is_empty() {
-                    lifetimes.insert(lifetime_id, attr_flags);
-                }
-            }
-
-            let type_and_consts_source =
-                HasChildSource::::child_source(&def, db);
-            for (type_or_const_id, type_or_const) in type_and_consts_source.value.iter() {
-                let mut attr_flags = AttrFlags::empty();
-                expand_cfg_attr(type_or_const.attrs(), &mut cfg_options, |attr, _, _, _| {
-                    match_attr_flags(&mut attr_flags, attr)
-                });
-                if !attr_flags.is_empty() {
-                    type_and_consts.insert(type_or_const_id, attr_flags);
-                }
-            }
-
-            lifetimes.shrink_to_fit();
-            type_and_consts.shrink_to_fit();
-            (lifetimes, type_and_consts)
-        }
-    }
-
-    #[inline]
-    pub fn query_lifetime_param(db: &dyn DefDatabase, owner: LifetimeParamId) -> AttrFlags {
-        AttrFlags::query_generic_params(db, owner.parent)
-            .0
-            .get(owner.local_id)
-            .copied()
-            .unwrap_or_else(AttrFlags::empty)
-    }
-    #[inline]
-    pub fn query_type_or_const_param(db: &dyn DefDatabase, owner: TypeOrConstParamId) -> AttrFlags {
-        AttrFlags::query_generic_params(db, owner.parent)
-            .1
-            .get(owner.local_id)
-            .copied()
-            .unwrap_or_else(AttrFlags::empty)
-    }
-
-    pub(crate) fn is_cfg_enabled_for(
-        owner: &dyn HasAttrs,
-        cfg_options: &CfgOptions,
-    ) -> Result<(), CfgExpr> {
-        let attrs = ast::attrs_including_inner(owner);
-        let result = expand_cfg_attr(
-            attrs,
-            || cfg_options,
-            |attr, _, _, _| {
-                if let Meta::TokenTree { path, tt } = attr
-                    && path.is1("cfg")
-                    && let cfg =
-                        CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(&tt).peekable())
-                    && cfg_options.check(&cfg) == Some(false)
-                {
-                    ControlFlow::Break(cfg)
-                } else {
-                    ControlFlow::Continue(())
-                }
-            },
-        );
-        match result {
-            Some(cfg) => Err(cfg),
-            None => Ok(()),
-        }
-    }
-
-    #[inline]
-    pub fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option {
-        AttrFlags::query(db, owner).lang_item_with_attrs(db, owner)
-    }
-
-    #[inline]
-    pub fn lang_item_with_attrs(self, db: &dyn DefDatabase, owner: AttrDefId) -> Option {
-        if !self.contains(AttrFlags::LANG_ITEM) {
-            // Don't create the query in case this is not a lang item, this wastes memory.
-            return None;
-        }
-
-        return lang_item(db, owner);
-
-        #[salsa::tracked]
-        fn lang_item(db: &dyn DefDatabase, owner: AttrDefId) -> Option {
-            collect_attrs(db, owner, |attr| {
-                if let Meta::NamedKeyValue { name: Some(name), value: Some(value), .. } = attr
-                    && name.text() == "lang"
-                    && let Some(value) = ast::String::cast(value)
-                    && let Ok(value) = value.value()
-                    && let symbol = Symbol::intern(&value)
-                    && let Some(lang_item) = LangItem::from_symbol(&symbol)
-                {
-                    ControlFlow::Break(lang_item)
-                } else {
-                    ControlFlow::Continue(())
-                }
-            })
-        }
-    }
-
-    #[inline]
-    pub fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option {
-        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_REPR) {
-            // Don't create the query in case this has no repr, this wastes memory.
-            return None;
-        }
-
-        return repr(db, owner);
-
-        #[salsa::tracked]
-        fn repr(db: &dyn DefDatabase, owner: AdtId) -> Option {
-            let mut result = None;
-            collect_attrs::(db, owner.into(), |attr| {
-                if let Meta::TokenTree { path, tt } = attr
-                    && path.is1("repr")
-                    && let Some(repr) = parse_repr_tt(&tt)
-                {
-                    match &mut result {
-                        Some(existing) => merge_repr(existing, repr),
-                        None => result = Some(repr),
-                    }
-                }
-                ControlFlow::Continue(())
-            });
-            result
-        }
-    }
-
-    /// Call this only if there are legacy const generics, to save memory.
-    #[salsa::tracked(returns(ref))]
-    pub(crate) fn legacy_const_generic_indices(
-        db: &dyn DefDatabase,
-        owner: FunctionId,
-    ) -> Option> {
-        let result = collect_attrs(db, owner.into(), |attr| {
-            if let Meta::TokenTree { path, tt } = attr
-                && path.is1("rustc_legacy_const_generics")
-            {
-                let result = parse_rustc_legacy_const_generics(tt);
-                ControlFlow::Break(result)
-            } else {
-                ControlFlow::Continue(())
-            }
-        });
-        result.filter(|it| !it.is_empty())
-    }
-
-    // There aren't typically many crates, so it's okay to always make this a query without a flag.
-    #[salsa::tracked(returns(ref))]
-    pub fn doc_html_root_url(db: &dyn DefDatabase, krate: Crate) -> Option {
-        let root_file_id = krate.root_file_id(db);
-        let syntax = db.parse(root_file_id).tree();
-
-        let mut cfg_options = None;
-        expand_cfg_attr(
-            syntax.attrs(),
-            || cfg_options.get_or_insert(krate.cfg_options(db)),
-            |attr, _, _, _| {
-                if let Meta::TokenTree { path, tt } = attr
-                    && path.is1("doc")
-                    && let Some(result) = DocAtom::parse(tt).into_iter().find_map(|atom| {
-                        if let DocAtom::KeyValue { key, value } = atom
-                            && key == "html_root_url"
-                        {
-                            Some(value)
-                        } else {
-                            None
-                        }
-                    })
-                {
-                    ControlFlow::Break(result)
-                } else {
-                    ControlFlow::Continue(())
-                }
-            },
-        )
-    }
-
-    #[inline]
-    pub fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> &FxHashSet {
-        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::HAS_TARGET_FEATURE) {
-            return const { &FxHashSet::with_hasher(rustc_hash::FxBuildHasher) };
-        }
-
-        return target_features(db, owner);
-
-        #[salsa::tracked(returns(ref))]
-        fn target_features(db: &dyn DefDatabase, owner: FunctionId) -> FxHashSet {
-            let mut result = FxHashSet::default();
-            collect_attrs::(db, owner.into(), |attr| {
-                if let Meta::TokenTree { path, tt } = attr
-                    && path.is1("target_feature")
-                    && let mut tt = TokenTreeChildren::new(&tt)
-                    && let Some(NodeOrToken::Token(enable_ident)) = tt.next()
-                    && enable_ident.text() == "enable"
-                    && let Some(NodeOrToken::Token(eq_token)) = tt.next()
-                    && eq_token.kind() == T![=]
-                    && let Some(NodeOrToken::Token(features)) = tt.next()
-                    && let Some(features) = ast::String::cast(features)
-                    && let Ok(features) = features.value()
-                    && tt.next().is_none()
-                {
-                    result.extend(features.split(',').map(Symbol::intern));
-                }
-                ControlFlow::Continue(())
-            });
-            result.shrink_to_fit();
-            result
-        }
-    }
-
-    #[inline]
-    pub fn rustc_layout_scalar_valid_range(
-        db: &dyn DefDatabase,
-        owner: AdtId,
-    ) -> RustcLayoutScalarValidRange {
-        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::RUSTC_LAYOUT_SCALAR_VALID_RANGE)
-        {
-            return RustcLayoutScalarValidRange::default();
-        }
-
-        return rustc_layout_scalar_valid_range(db, owner);
-
-        #[salsa::tracked]
-        fn rustc_layout_scalar_valid_range(
-            db: &dyn DefDatabase,
-            owner: AdtId,
-        ) -> RustcLayoutScalarValidRange {
-            let mut result = RustcLayoutScalarValidRange::default();
-            collect_attrs::(db, owner.into(), |attr| {
-                if let Meta::TokenTree { path, tt } = attr
-                    && (path.is1("rustc_layout_scalar_valid_range_start")
-                        || path.is1("rustc_layout_scalar_valid_range_end"))
-                    && let tt = TokenTreeChildren::new(&tt)
-                    && let Ok(NodeOrToken::Token(value)) = tt.exactly_one()
-                    && let Some(value) = ast::IntNumber::cast(value)
-                    && let Ok(value) = value.value()
-                {
-                    if path.is1("rustc_layout_scalar_valid_range_start") {
-                        result.start = Some(value)
-                    } else {
-                        result.end = Some(value);
-                    }
-                }
-                ControlFlow::Continue(())
-            });
-            result
-        }
-    }
-
-    #[inline]
-    pub fn doc_aliases(self, db: &dyn DefDatabase, owner: Either) -> &[Symbol] {
-        if !self.contains(AttrFlags::HAS_DOC_ALIASES) {
-            return &[];
-        }
-        return match owner {
-            Either::Left(it) => doc_aliases(db, it),
-            Either::Right(field) => fields_doc_aliases(db, field.parent)
-                .get(field.local_id)
-                .map(|it| &**it)
-                .unwrap_or_default(),
-        };
-
-        #[salsa::tracked(returns(ref))]
-        fn doc_aliases(db: &dyn DefDatabase, owner: AttrDefId) -> Box<[Symbol]> {
-            let mut result = Vec::new();
-            collect_attrs::(db, owner, |attr| extract_doc_aliases(&mut result, attr));
-            result.into_boxed_slice()
-        }
-
-        #[salsa::tracked(returns(ref))]
-        fn fields_doc_aliases(
-            db: &dyn DefDatabase,
-            variant: VariantId,
-        ) -> ArenaMap> {
-            collect_field_attrs(db, variant, |cfg_options, field| {
-                let mut result = Vec::new();
-                expand_cfg_attr(
-                    field.value.attrs(),
-                    || cfg_options,
-                    |attr, _, _, _| extract_doc_aliases(&mut result, attr),
-                );
-                result.into_boxed_slice()
-            })
-        }
-    }
-
-    #[inline]
-    pub fn cfgs(self, db: &dyn DefDatabase, owner: Either) -> Option<&CfgExpr> {
-        if !self.contains(AttrFlags::HAS_CFG) {
-            return None;
-        }
-        return match owner {
-            Either::Left(it) => cfgs(db, it).as_ref(),
-            Either::Right(field) => {
-                fields_cfgs(db, field.parent).get(field.local_id).and_then(|it| it.as_ref())
-            }
-        };
-
-        // We LRU this query because it is only used by IDE.
-        #[salsa::tracked(returns(ref), lru = 250)]
-        fn cfgs(db: &dyn DefDatabase, owner: AttrDefId) -> Option {
-            let mut result = Vec::new();
-            collect_attrs::(db, owner, |attr| extract_cfgs(&mut result, attr));
-            match result.len() {
-                0 => None,
-                1 => result.into_iter().next(),
-                _ => Some(CfgExpr::All(result.into_boxed_slice())),
-            }
-        }
-
-        // We LRU this query because it is only used by IDE.
-        #[salsa::tracked(returns(ref), lru = 50)]
-        fn fields_cfgs(
-            db: &dyn DefDatabase,
-            variant: VariantId,
-        ) -> ArenaMap> {
-            collect_field_attrs(db, variant, |cfg_options, field| {
-                let mut result = Vec::new();
-                expand_cfg_attr(
-                    field.value.attrs(),
-                    || cfg_options,
-                    |attr, _, _, _| extract_cfgs(&mut result, attr),
-                );
-                match result.len() {
-                    0 => None,
-                    1 => result.into_iter().next(),
-                    _ => Some(CfgExpr::All(result.into_boxed_slice())),
-                }
-            })
-        }
-    }
-
-    #[inline]
-    pub fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option {
-        if !AttrFlags::query(db, AttrDefId::ModuleId(owner)).contains(AttrFlags::HAS_DOC_KEYWORD) {
-            return None;
-        }
-        return doc_keyword(db, owner);
-
-        #[salsa::tracked]
-        fn doc_keyword(db: &dyn DefDatabase, owner: InternedModuleId) -> Option {
-            collect_attrs(db, AttrDefId::ModuleId(owner), |attr| {
-                if let Meta::TokenTree { path, tt } = attr
-                    && path.is1("doc")
-                {
-                    for atom in DocAtom::parse(tt) {
-                        if let DocAtom::KeyValue { key, value } = atom
-                            && key == "keyword"
-                        {
-                            return ControlFlow::Break(Symbol::intern(&value));
-                        }
-                    }
-                }
-                ControlFlow::Continue(())
-            })
-        }
-    }
-
-    // We LRU this query because it is only used by IDE.
-    #[salsa::tracked(returns(ref), lru = 250)]
-    pub fn docs(db: &dyn DefDatabase, owner: AttrDefId) -> Option> {
-        let (source, outer_mod_decl, krate) = attrs_source(db, owner);
-        let inner_attrs_node = source.value.inner_attributes_node();
-        extract_docs(&|| krate.cfg_options(db), source, outer_mod_decl, inner_attrs_node)
-    }
-
-    #[inline]
-    pub fn field_docs(db: &dyn DefDatabase, field: FieldId) -> Option<&Docs> {
-        return fields_docs(db, field.parent).get(field.local_id).and_then(|it| it.as_deref());
-
-        // We LRU this query because it is only used by IDE.
-        #[salsa::tracked(returns(ref), lru = 50)]
-        pub fn fields_docs(
-            db: &dyn DefDatabase,
-            variant: VariantId,
-        ) -> ArenaMap>> {
-            collect_field_attrs(db, variant, |cfg_options, field| {
-                extract_docs(&|| cfg_options, field, None, None)
-            })
-        }
-    }
-
-    #[inline]
-    pub fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option<&DeriveInfo> {
-        if !AttrFlags::query(db, owner.into()).contains(AttrFlags::IS_DERIVE_OR_BUILTIN_MACRO) {
-            return None;
-        }
-
-        return derive_info(db, owner).as_ref();
-
-        #[salsa::tracked(returns(ref))]
-        fn derive_info(db: &dyn DefDatabase, owner: MacroId) -> Option {
-            collect_attrs(db, owner.into(), |attr| {
-                if let Meta::TokenTree { path, tt } = attr
-                    && path.segments.len() == 1
-                    && matches!(
-                        path.segments[0].text(),
-                        "proc_macro_derive" | "rustc_builtin_macro"
-                    )
-                    && let mut tt = TokenTreeChildren::new(&tt)
-                    && let Some(NodeOrToken::Token(trait_name)) = tt.next()
-                    && trait_name.kind().is_any_identifier()
-                {
-                    let trait_name = Symbol::intern(trait_name.text());
-
-                    let helpers = if let Some(NodeOrToken::Token(comma)) = tt.next()
-                        && comma.kind() == T![,]
-                        && let Some(NodeOrToken::Token(attributes)) = tt.next()
-                        && attributes.text() == "attributes"
-                        && let Some(NodeOrToken::Node(attributes)) = tt.next()
-                    {
-                        attributes
-                            .syntax()
-                            .children_with_tokens()
-                            .filter_map(NodeOrToken::into_token)
-                            .filter(|it| it.kind().is_any_identifier())
-                            .map(|it| Symbol::intern(it.text()))
-                            .collect::>()
-                    } else {
-                        Box::new([])
-                    };
-
-                    ControlFlow::Break(DeriveInfo { trait_name, helpers })
-                } else {
-                    ControlFlow::Continue(())
-                }
-            })
-        }
-    }
-}
-
-fn merge_repr(this: &mut ReprOptions, other: ReprOptions) {
-    let ReprOptions { int, align, pack, flags, field_shuffle_seed: _ } = this;
-    flags.insert(other.flags);
-    *align = (*align).max(other.align);
-    *pack = match (*pack, other.pack) {
-        (Some(pack), None) | (None, Some(pack)) => Some(pack),
-        _ => (*pack).min(other.pack),
-    };
-    if other.int.is_some() {
-        *int = other.int;
-    }
-}
-
-fn parse_repr_tt(tt: &ast::TokenTree) -> Option {
-    use crate::builtin_type::{BuiltinInt, BuiltinUint};
-    use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
-
-    let mut tts = TokenTreeChildren::new(tt).peekable();
-
-    let mut acc = ReprOptions::default();
-    while let Some(tt) = tts.next() {
-        let NodeOrToken::Token(ident) = tt else {
-            continue;
-        };
-        if !ident.kind().is_any_identifier() {
-            continue;
-        }
-        let repr = match ident.text() {
-            "packed" => {
-                let pack = if let Some(NodeOrToken::Node(tt)) = tts.peek() {
-                    let tt = tt.clone();
-                    tts.next();
-                    let mut tt_iter = TokenTreeChildren::new(&tt);
-                    if let Some(NodeOrToken::Token(lit)) = tt_iter.next()
-                        && let Some(lit) = ast::IntNumber::cast(lit)
-                        && let Ok(lit) = lit.value()
-                        && let Ok(lit) = lit.try_into()
-                    {
-                        lit
-                    } else {
-                        0
-                    }
-                } else {
-                    0
-                };
-                let pack = Some(Align::from_bytes(pack).unwrap_or(Align::ONE));
-                ReprOptions { pack, ..Default::default() }
-            }
-            "align" => {
-                let mut align = None;
-                if let Some(NodeOrToken::Node(tt)) = tts.peek() {
-                    let tt = tt.clone();
-                    tts.next();
-                    let mut tt_iter = TokenTreeChildren::new(&tt);
-                    if let Some(NodeOrToken::Token(lit)) = tt_iter.next()
-                        && let Some(lit) = ast::IntNumber::cast(lit)
-                        && let Ok(lit) = lit.value()
-                        && let Ok(lit) = lit.try_into()
-                    {
-                        align = Align::from_bytes(lit).ok();
-                    }
-                }
-                ReprOptions { align, ..Default::default() }
-            }
-            "C" => ReprOptions { flags: ReprFlags::IS_C, ..Default::default() },
-            "transparent" => ReprOptions { flags: ReprFlags::IS_TRANSPARENT, ..Default::default() },
-            "simd" => ReprOptions { flags: ReprFlags::IS_SIMD, ..Default::default() },
-            repr => {
-                let mut int = None;
-                if let Some(builtin) = BuiltinInt::from_suffix(repr)
-                    .map(Either::Left)
-                    .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right))
-                {
-                    int = Some(match builtin {
-                        Either::Left(bi) => match bi {
-                            BuiltinInt::Isize => IntegerType::Pointer(true),
-                            BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true),
-                            BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true),
-                            BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true),
-                            BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true),
-                            BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true),
-                        },
-                        Either::Right(bu) => match bu {
-                            BuiltinUint::Usize => IntegerType::Pointer(false),
-                            BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false),
-                            BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false),
-                            BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false),
-                            BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false),
-                            BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false),
-                        },
-                    });
-                }
-                ReprOptions { int, ..Default::default() }
-            }
-        };
-        merge_repr(&mut acc, repr);
-    }
-
-    Some(acc)
-}
-
-fn parse_rustc_legacy_const_generics(tt: ast::TokenTree) -> Box<[u32]> {
-    TokenTreeChildren::new(&tt)
-        .filter_map(|param| {
-            ast::IntNumber::cast(param.into_token()?)?.value().ok()?.try_into().ok()
-        })
-        .collect()
-}
-
-#[derive(Debug)]
-enum DocAtom {
-    /// eg. `#[doc(hidden)]`
-    Flag(SmolStr),
-    /// eg. `#[doc(alias = "it")]`
-    ///
-    /// Note that a key can have multiple values that are all considered "active" at the same time.
-    /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
-    KeyValue { key: SmolStr, value: SmolStr },
-    /// eg. `#[doc(alias("x", "y"))]`
-    Alias(Vec),
-}
-
-impl DocAtom {
-    fn parse(tt: ast::TokenTree) -> SmallVec<[DocAtom; 1]> {
-        let mut iter = TokenTreeChildren::new(&tt).peekable();
-        let mut result = SmallVec::new();
-        while iter.peek().is_some() {
-            if let Some(expr) = next_doc_expr(&mut iter) {
-                result.push(expr);
-            }
-        }
-        result
-    }
-}
-
-fn next_doc_expr(it: &mut Peekable) -> Option {
-    let name = match it.next() {
-        Some(NodeOrToken::Token(token)) if token.kind().is_any_identifier() => {
-            SmolStr::new(token.text())
-        }
-        _ => return None,
-    };
-
-    let ret = match it.peek() {
-        Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
-            it.next();
-            if let Some(NodeOrToken::Token(value)) = it.next()
-                && let Some(value) = ast::String::cast(value)
-                && let Ok(value) = value.value()
-            {
-                DocAtom::KeyValue { key: name, value: SmolStr::new(&*value) }
-            } else {
-                return None;
-            }
-        }
-        Some(NodeOrToken::Node(subtree)) => {
-            if name != "alias" {
-                return None;
-            }
-            let aliases = TokenTreeChildren::new(subtree)
-                .filter_map(|alias| {
-                    Some(SmolStr::new(&*ast::String::cast(alias.into_token()?)?.value().ok()?))
-                })
-                .collect();
-            it.next();
-            DocAtom::Alias(aliases)
-        }
-        _ => DocAtom::Flag(name),
-    };
-    Some(ret)
-}
-
-#[cfg(test)]
-mod tests {
-    use expect_test::expect;
-    use hir_expand::InFile;
-    use test_fixture::WithFixture;
-    use tt::{TextRange, TextSize};
-
-    use crate::attrs::IsInnerDoc;
-    use crate::{attrs::Docs, test_db::TestDB};
-
-    #[test]
-    fn docs() {
-        let (_db, file_id) = TestDB::with_single_file("");
-        let mut docs = Docs {
-            docs: String::new(),
-            docs_source_map: Vec::new(),
-            outline_mod: None,
-            inline_file: file_id.into(),
-            prefix_len: TextSize::new(0),
-            inline_inner_docs_start: None,
-            outline_inner_docs_start: None,
-        };
-        let mut indent = usize::MAX;
-
-        let outer = " foo\n\tbar  baz";
-        let mut ast_offset = TextSize::new(123);
-        for line in outer.split('\n') {
-            docs.extend_with_doc_str(line, ast_offset, &mut indent);
-            ast_offset += TextSize::of(line) + TextSize::of("\n");
-        }
-
-        docs.inline_inner_docs_start = Some(TextSize::of(&docs.docs));
-        ast_offset += TextSize::new(123);
-        let inner = " bar \n baz";
-        for line in inner.split('\n') {
-            docs.extend_with_doc_str(line, ast_offset, &mut indent);
-            ast_offset += TextSize::of(line) + TextSize::of("\n");
-        }
-
-        assert_eq!(indent, 1);
-        expect![[r#"
-            [
-                DocsSourceMapLine {
-                    string_offset: 0,
-                    ast_offset: 123,
-                },
-                DocsSourceMapLine {
-                    string_offset: 5,
-                    ast_offset: 128,
-                },
-                DocsSourceMapLine {
-                    string_offset: 15,
-                    ast_offset: 261,
-                },
-                DocsSourceMapLine {
-                    string_offset: 20,
-                    ast_offset: 267,
-                },
-            ]
-        "#]]
-        .assert_debug_eq(&docs.docs_source_map);
-
-        docs.remove_indent(indent, 0);
-
-        assert_eq!(docs.inline_inner_docs_start, Some(TextSize::new(13)));
-
-        assert_eq!(docs.docs, "foo\nbar  baz\nbar\nbaz\n");
-        expect![[r#"
-            [
-                DocsSourceMapLine {
-                    string_offset: 0,
-                    ast_offset: 124,
-                },
-                DocsSourceMapLine {
-                    string_offset: 4,
-                    ast_offset: 129,
-                },
-                DocsSourceMapLine {
-                    string_offset: 13,
-                    ast_offset: 262,
-                },
-                DocsSourceMapLine {
-                    string_offset: 17,
-                    ast_offset: 268,
-                },
-            ]
-        "#]]
-        .assert_debug_eq(&docs.docs_source_map);
-
-        docs.append(&docs.clone());
-        docs.prepend_str("prefix---");
-        assert_eq!(docs.docs, "prefix---foo\nbar  baz\nbar\nbaz\nfoo\nbar  baz\nbar\nbaz\n");
-        expect![[r#"
-            [
-                DocsSourceMapLine {
-                    string_offset: 0,
-                    ast_offset: 124,
-                },
-                DocsSourceMapLine {
-                    string_offset: 4,
-                    ast_offset: 129,
-                },
-                DocsSourceMapLine {
-                    string_offset: 13,
-                    ast_offset: 262,
-                },
-                DocsSourceMapLine {
-                    string_offset: 17,
-                    ast_offset: 268,
-                },
-                DocsSourceMapLine {
-                    string_offset: 21,
-                    ast_offset: 124,
-                },
-                DocsSourceMapLine {
-                    string_offset: 25,
-                    ast_offset: 129,
-                },
-                DocsSourceMapLine {
-                    string_offset: 34,
-                    ast_offset: 262,
-                },
-                DocsSourceMapLine {
-                    string_offset: 38,
-                    ast_offset: 268,
-                },
-            ]
-        "#]]
-        .assert_debug_eq(&docs.docs_source_map);
-
-        let range = |start, end| TextRange::new(TextSize::new(start), TextSize::new(end));
-        let in_file = |range| InFile::new(file_id.into(), range);
-        assert_eq!(docs.find_ast_range(range(0, 2)), None);
-        assert_eq!(docs.find_ast_range(range(8, 10)), None);
-        assert_eq!(
-            docs.find_ast_range(range(9, 10)),
-            Some((in_file(range(124, 125)), IsInnerDoc::No))
-        );
-        assert_eq!(docs.find_ast_range(range(20, 23)), None);
-        assert_eq!(
-            docs.find_ast_range(range(23, 25)),
-            Some((in_file(range(263, 265)), IsInnerDoc::Yes))
-        );
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index ad29900876727..4e1d598623abe 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -1,21 +1,23 @@
 //! Defines database & queries for name resolution.
 use base_db::{Crate, RootQueryDb, SourceDatabase};
+use either::Either;
 use hir_expand::{
     EditionedFileId, HirFileId, InFile, Lookup, MacroCallId, MacroDefId, MacroDefKind,
     db::ExpandDatabase,
 };
+use intern::sym;
 use la_arena::ArenaMap;
+use syntax::{AstPtr, ast};
 use triomphe::Arc;
 
 use crate::{
-    AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, CrateRootModuleId, DefWithBodyId,
-    EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId,
-    ExternCrateLoc, FunctionId, FunctionLoc, GenericDefId, HasModule, ImplId, ImplLoc,
-    InternedModuleId, LocalFieldId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
-    MacroRulesLoc, MacroRulesLocFlags, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
-    StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc,
-    VariantId,
-    attrs::AttrFlags,
+    AssocItemId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc,
+    EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc,
+    FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalFieldId, Macro2Id, Macro2Loc,
+    MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ProcMacroId,
+    ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId,
+    TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
+    attr::{Attrs, AttrsWithOwner},
     expr_store::{
         Body, BodySourceMap, ExpressionStore, ExpressionStoreSourceMap, scope::ExprScopes,
     },
@@ -28,6 +30,7 @@ use crate::{
         ConstSignature, EnumSignature, FunctionSignature, ImplSignature, StaticSignature,
         StructSignature, TraitSignature, TypeAliasSignature, UnionSignature,
     },
+    tt,
     visibility::{self, Visibility},
 };
 
@@ -235,11 +238,28 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
         def: GenericDefId,
     ) -> (Arc, Arc, Arc);
 
-    // FIXME: Get rid of this, call `AttrFlags::lang_item()` directly.
+    // region:attrs
+
+    #[salsa::invoke(Attrs::fields_attrs_query)]
+    fn fields_attrs(&self, def: VariantId) -> Arc>;
+
+    // should this really be a query?
+    #[salsa::invoke(crate::attr::fields_attrs_source_map)]
+    fn fields_attrs_source_map(
+        &self,
+        def: VariantId,
+    ) -> Arc>>>;
+
+    // FIXME: Make this a non-interned query.
+    #[salsa::invoke_interned(AttrsWithOwner::attrs_query)]
+    fn attrs(&self, def: AttrDefId) -> Attrs;
+
     #[salsa::transparent]
     #[salsa::invoke(lang_item::lang_attr)]
     fn lang_attr(&self, def: AttrDefId) -> Option;
 
+    // endregion:attrs
+
     #[salsa::invoke(ImportMap::import_map_query)]
     fn import_map(&self, krate: Crate) -> Arc;
 
@@ -283,9 +303,36 @@ fn include_macro_invoc(
 }
 
 fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: Crate) -> bool {
-    let root_module = CrateRootModuleId::from(crate_id).module(db);
-    let attrs = AttrFlags::query(db, AttrDefId::ModuleId(InternedModuleId::new(db, root_module)));
-    attrs.contains(AttrFlags::IS_NO_STD)
+    let file = crate_id.data(db).root_file_id(db);
+    let item_tree = db.file_item_tree(file.into());
+    let attrs = item_tree.top_level_raw_attrs();
+    for attr in &**attrs {
+        match attr.path().as_ident() {
+            Some(ident) if *ident == sym::no_std => return true,
+            Some(ident) if *ident == sym::cfg_attr => {}
+            _ => continue,
+        }
+
+        // This is a `cfg_attr`; check if it could possibly expand to `no_std`.
+        // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
+        let tt = match attr.token_tree_value() {
+            Some(tt) => tt.token_trees(),
+            None => continue,
+        };
+
+        let segments =
+            tt.split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
+        for output in segments.skip(1) {
+            match output.flat_tokens() {
+                [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.sym == sym::no_std => {
+                    return true;
+                }
+                _ => {}
+            }
+        }
+    }
+
+    false
 }
 
 fn macro_def(db: &dyn DefDatabase, id: MacroId) -> MacroDefId {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
index 6a2f06b0a6f68..23b9712d1e6c1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/expander.rs
@@ -17,10 +17,11 @@ use syntax::{AstNode, Parse, ast};
 use triomphe::Arc;
 use tt::TextRange;
 
-use crate::{
-    MacroId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::HygieneId,
-    macro_call_as_call_id, nameres::DefMap,
-};
+use crate::attr::Attrs;
+use crate::expr_store::HygieneId;
+use crate::macro_call_as_call_id;
+use crate::nameres::DefMap;
+use crate::{MacroId, UnresolvedMacro, db::DefDatabase};
 
 #[derive(Debug)]
 pub(super) struct Expander {
@@ -69,10 +70,11 @@ impl Expander {
 
     pub(super) fn is_cfg_enabled(
         &self,
-        owner: &dyn HasAttrs,
+        db: &dyn DefDatabase,
+        has_attrs: &dyn HasAttrs,
         cfg_options: &CfgOptions,
     ) -> Result<(), cfg::CfgExpr> {
-        AttrFlags::is_cfg_enabled_for(owner, cfg_options)
+        Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
     }
 
     pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
index fbe0b1ab95965..3794cb18e9360 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
@@ -12,6 +12,7 @@ use cfg::CfgOptions;
 use either::Either;
 use hir_expand::{
     HirFileId, InFile, MacroDefId,
+    mod_path::tool_path,
     name::{AsName, Name},
     span_map::SpanMapRef,
 };
@@ -33,7 +34,6 @@ use tt::TextRange;
 use crate::{
     AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId,
     ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro,
-    attrs::AttrFlags,
     builtin_type::BuiltinUint,
     db::DefDatabase,
     expr_store::{
@@ -87,16 +87,14 @@ pub(super) fn lower_body(
     let mut params = vec![];
     let mut collector = ExprCollector::new(db, module, current_file_id);
 
-    let skip_body = AttrFlags::query(
-        db,
-        match owner {
-            DefWithBodyId::FunctionId(it) => it.into(),
-            DefWithBodyId::StaticId(it) => it.into(),
-            DefWithBodyId::ConstId(it) => it.into(),
-            DefWithBodyId::VariantId(it) => it.into(),
-        },
-    )
-    .contains(AttrFlags::RUST_ANALYZER_SKIP);
+    let skip_body = match owner {
+        DefWithBodyId::FunctionId(it) => db.attrs(it.into()),
+        DefWithBodyId::StaticId(it) => db.attrs(it.into()),
+        DefWithBodyId::ConstId(it) => db.attrs(it.into()),
+        DefWithBodyId::VariantId(it) => db.attrs(it.into()),
+    }
+    .rust_analyzer_tool()
+    .any(|attr| *attr.path() == tool_path![skip]);
     // If #[rust_analyzer::skip] annotated, only construct enough information for the signature
     // and skip the body.
     if skip_body {
@@ -2487,7 +2485,7 @@ impl ExprCollector<'_> {
     /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
     /// not.
     fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
-        let enabled = self.expander.is_cfg_enabled(owner, self.cfg_options);
+        let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
         match enabled {
             Ok(()) => true,
             Err(cfg) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
index e386e8d0c596c..5b9da3c5e6680 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs
@@ -12,8 +12,7 @@ use span::Edition;
 use syntax::ast::HasName;
 
 use crate::{
-    AdtId, DefWithBodyId, FunctionId, GenericDefId, StructId, TypeParamId, VariantId,
-    attrs::AttrFlags,
+    AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId,
     expr_store::path::{GenericArg, GenericArgs},
     hir::{
         Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement,
@@ -168,7 +167,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
         GenericDefId::AdtId(id) => match id {
             AdtId::StructId(id) => {
                 let signature = db.struct_signature(id);
-                print_struct(db, id, &signature, edition)
+                print_struct(db, &signature, edition)
             }
             AdtId::UnionId(id) => {
                 format!("unimplemented {id:?}")
@@ -180,7 +179,7 @@ pub fn print_signature(db: &dyn DefDatabase, owner: GenericDefId, edition: Editi
         GenericDefId::ConstId(id) => format!("unimplemented {id:?}"),
         GenericDefId::FunctionId(id) => {
             let signature = db.function_signature(id);
-            print_function(db, id, &signature, edition)
+            print_function(db, &signature, edition)
         }
         GenericDefId::ImplId(id) => format!("unimplemented {id:?}"),
         GenericDefId::StaticId(id) => format!("unimplemented {id:?}"),
@@ -209,8 +208,7 @@ pub fn print_path(
 
 pub fn print_struct(
     db: &dyn DefDatabase,
-    id: StructId,
-    StructSignature { name, generic_params, store, flags, shape }: &StructSignature,
+    StructSignature { name, generic_params, store, flags, shape, repr }: &StructSignature,
     edition: Edition,
 ) -> String {
     let mut p = Printer {
@@ -221,7 +219,7 @@ pub fn print_struct(
         line_format: LineFormat::Newline,
         edition,
     };
-    if let Some(repr) = AttrFlags::repr(db, id.into()) {
+    if let Some(repr) = repr {
         if repr.c() {
             wln!(p, "#[repr(C)]");
         }
@@ -257,8 +255,7 @@ pub fn print_struct(
 
 pub fn print_function(
     db: &dyn DefDatabase,
-    id: FunctionId,
-    signature @ FunctionSignature {
+    FunctionSignature {
         name,
         generic_params,
         store,
@@ -266,10 +263,10 @@ pub fn print_function(
         ret_type,
         abi,
         flags,
+        legacy_const_generics_indices,
     }: &FunctionSignature,
     edition: Edition,
 ) -> String {
-    let legacy_const_generics_indices = signature.legacy_const_generics_indices(db, id);
     let mut p = Printer {
         db,
         store,
@@ -301,7 +298,7 @@ pub fn print_function(
         if i != 0 {
             w!(p, ", ");
         }
-        if legacy_const_generics_indices.is_some_and(|idx| idx.contains(&(i as u32))) {
+        if legacy_const_generics_indices.as_ref().is_some_and(|idx| idx.contains(&(i as u32))) {
             w!(p, "const: ");
         }
         p.print_type_ref(*param);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
index 0cb9325b502e2..c7707378a5b31 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -189,8 +189,8 @@ fn f() {
 }
     "#,
         expect![[r#"
-            BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::(1) }
-            BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::(0) }
+            BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::(1) }
+            BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::(0) }
             crate scope
         "#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
index 2dac4e7fc84b6..b68674c7a74f4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/signatures.rs
@@ -38,24 +38,14 @@ fn lower_and_print(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expe
         match def {
             GenericDefId::AdtId(adt_id) => match adt_id {
                 crate::AdtId::StructId(struct_id) => {
-                    out += &print_struct(
-                        &db,
-                        struct_id,
-                        &db.struct_signature(struct_id),
-                        Edition::CURRENT,
-                    );
+                    out += &print_struct(&db, &db.struct_signature(struct_id), Edition::CURRENT);
                 }
                 crate::AdtId::UnionId(_id) => (),
                 crate::AdtId::EnumId(_id) => (),
             },
             GenericDefId::ConstId(_id) => (),
             GenericDefId::FunctionId(function_id) => {
-                out += &print_function(
-                    &db,
-                    function_id,
-                    &db.function_signature(function_id),
-                    Edition::CURRENT,
-                )
+                out += &print_function(&db, &db.function_signature(function_id), Edition::CURRENT)
             }
 
             GenericDefId::ImplId(_id) => (),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index 67cf466276c5d..f31f355cfa5d7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -13,8 +13,7 @@ use stdx::format_to;
 use triomphe::Arc;
 
 use crate::{
-    AssocItemId, AttrDefId, Complete, FxIndexMap, InternedModuleId, ModuleDefId, ModuleId, TraitId,
-    attrs::AttrFlags,
+    AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
     db::DefDatabase,
     item_scope::{ImportOrExternCrate, ItemInNs},
     nameres::{DefMap, assoc::TraitItems, crate_def_map},
@@ -166,34 +165,17 @@ impl ImportMap {
                         }
                     } else {
                         match item {
-                            ItemInNs::Types(id) | ItemInNs::Values(id) => match id {
-                                ModuleDefId::ModuleId(it) => {
-                                    Some(AttrDefId::ModuleId(InternedModuleId::new(db, it)))
-                                }
-                                ModuleDefId::FunctionId(it) => Some(it.into()),
-                                ModuleDefId::AdtId(it) => Some(it.into()),
-                                ModuleDefId::EnumVariantId(it) => Some(it.into()),
-                                ModuleDefId::ConstId(it) => Some(it.into()),
-                                ModuleDefId::StaticId(it) => Some(it.into()),
-                                ModuleDefId::TraitId(it) => Some(it.into()),
-                                ModuleDefId::TypeAliasId(it) => Some(it.into()),
-                                ModuleDefId::MacroId(it) => Some(it.into()),
-                                ModuleDefId::BuiltinType(_) => None,
-                            },
+                            ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
                             ItemInNs::Macros(id) => Some(id.into()),
                         }
                     };
                     let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id {
                         None => (false, false, Complete::Yes),
                         Some(attr_id) => {
-                            let attrs = AttrFlags::query(db, attr_id);
+                            let attrs = db.attrs(attr_id);
                             let do_not_complete =
-                                Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), attrs);
-                            (
-                                attrs.contains(AttrFlags::IS_DOC_HIDDEN),
-                                attrs.contains(AttrFlags::IS_UNSTABLE),
-                                do_not_complete,
-                            )
+                                Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs);
+                            (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete)
                         }
                     };
 
@@ -257,15 +239,15 @@ impl ImportMap {
             };
 
             let attr_id = item.into();
-            let attrs = AttrFlags::query(db, attr_id);
+            let attrs = &db.attrs(attr_id);
             let item_do_not_complete = Complete::extract(false, attrs);
             let do_not_complete =
                 Complete::for_trait_item(trait_import_info.complete, item_do_not_complete);
             let assoc_item_info = ImportInfo {
                 container: trait_import_info.container,
                 name: assoc_item_name.clone(),
-                is_doc_hidden: attrs.contains(AttrFlags::IS_DOC_HIDDEN),
-                is_unstable: attrs.contains(AttrFlags::IS_UNSTABLE),
+                is_doc_hidden: attrs.has_doc_hidden(),
+                is_unstable: attrs.is_unstable(),
                 complete: do_not_complete,
             };
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index 2a104fff2b92c..f35df8d3a7e11 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -30,7 +30,6 @@
 //! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
 //! surface syntax.
 
-mod attrs;
 mod lower;
 mod pretty;
 #[cfg(test)]
@@ -44,8 +43,10 @@ use std::{
 };
 
 use ast::{AstNode, StructKind};
+use base_db::Crate;
 use hir_expand::{
     ExpandTo, HirFileId,
+    attrs::RawAttrs,
     mod_path::{ModPath, PathKind},
     name::Name,
 };
@@ -58,12 +59,9 @@ use syntax::{SyntaxKind, ast, match_ast};
 use thin_vec::ThinVec;
 use triomphe::Arc;
 
-use crate::{BlockId, Lookup, db::DefDatabase};
+use crate::{BlockId, Lookup, attr::Attrs, db::DefDatabase};
 
-pub(crate) use crate::item_tree::{
-    attrs::*,
-    lower::{lower_use_tree, visibility_from_ast},
-};
+pub(crate) use crate::item_tree::lower::{lower_use_tree, visibility_from_ast};
 
 #[derive(Copy, Clone, Eq, PartialEq)]
 pub(crate) struct RawVisibilityId(u32);
@@ -98,7 +96,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
     let mut item_tree = match_ast! {
         match syntax {
             ast::SourceFile(file) => {
-                let top_attrs = ctx.lower_attrs(&file);
+                let top_attrs = RawAttrs::new(db, &file, ctx.span_map());
                 let mut item_tree = ctx.lower_module_items(&file);
                 item_tree.top_attrs = top_attrs;
                 item_tree
@@ -134,7 +132,7 @@ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) ->
                     attrs: FxHashMap::default(),
                     small_data: FxHashMap::default(),
                     big_data: FxHashMap::default(),
-                    top_attrs: AttrsOrCfg::empty(),
+                    top_attrs: RawAttrs::EMPTY,
                     vis: ItemVisibilities { arena: ThinVec::new() },
                 })
             })
@@ -170,7 +168,7 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
                     attrs: FxHashMap::default(),
                     small_data: FxHashMap::default(),
                     big_data: FxHashMap::default(),
-                    top_attrs: AttrsOrCfg::empty(),
+                    top_attrs: RawAttrs::EMPTY,
                     vis: ItemVisibilities { arena: ThinVec::new() },
                 })
             })
@@ -184,8 +182,8 @@ pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc
 #[derive(Debug, Default, Eq, PartialEq)]
 pub struct ItemTree {
     top_level: Box<[ModItemId]>,
-    top_attrs: AttrsOrCfg,
-    attrs: FxHashMap, AttrsOrCfg>,
+    top_attrs: RawAttrs,
+    attrs: FxHashMap, RawAttrs>,
     vis: ItemVisibilities,
     big_data: FxHashMap, BigModItem>,
     small_data: FxHashMap, SmallModItem>,
@@ -199,12 +197,26 @@ impl ItemTree {
     }
 
     /// Returns the inner attributes of the source file.
-    pub(crate) fn top_level_attrs(&self) -> &AttrsOrCfg {
+    pub(crate) fn top_level_raw_attrs(&self) -> &RawAttrs {
         &self.top_attrs
     }
 
-    pub(crate) fn attrs(&self, of: FileAstId) -> Option<&AttrsOrCfg> {
-        self.attrs.get(&of)
+    /// Returns the inner attributes of the source file.
+    pub(crate) fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
+        Attrs::expand_cfg_attr(db, krate, self.top_attrs.clone())
+    }
+
+    pub(crate) fn raw_attrs(&self, of: FileAstId) -> &RawAttrs {
+        self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
+    }
+
+    pub(crate) fn attrs(
+        &self,
+        db: &dyn DefDatabase,
+        krate: Crate,
+        of: FileAstId,
+    ) -> Attrs {
+        Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
     }
 
     /// Returns a count of a few, expensive items.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs
deleted file mode 100644
index 5c635a4b3831c..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/attrs.rs
+++ /dev/null
@@ -1,220 +0,0 @@
-//! Defines attribute helpers for name resolution.
-//!
-//! Notice we don't preserve all attributes for name resolution, to save space:
-//! for example, we skip doc comments (desugared to `#[doc = "..."]` attributes)
-//! and `#[inline]`. The filtered attributes are listed in [`hir_expand::attrs`].
-
-use std::{
-    borrow::Cow,
-    convert::Infallible,
-    ops::{self, ControlFlow},
-};
-
-use cfg::{CfgExpr, CfgOptions};
-use either::Either;
-use hir_expand::{
-    attrs::{Attr, AttrId, AttrInput, Meta, collect_item_tree_attrs},
-    mod_path::ModPath,
-    name::Name,
-    span_map::SpanMapRef,
-};
-use intern::{Interned, Symbol, sym};
-use syntax::{AstNode, T, ast};
-use syntax_bridge::DocCommentDesugarMode;
-use tt::token_to_literal;
-
-use crate::{db::DefDatabase, item_tree::lower::Ctx};
-
-#[derive(Debug, PartialEq, Eq)]
-pub(crate) enum AttrsOrCfg {
-    Enabled {
-        attrs: AttrsOwned,
-    },
-    /// This only collects the attributes up to the disabled `cfg` (this is what needed for crate-level attributes.)
-    CfgDisabled(Box<(CfgExpr, AttrsOwned)>),
-}
-
-impl Default for AttrsOrCfg {
-    #[inline]
-    fn default() -> Self {
-        AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
-    }
-}
-
-impl AttrsOrCfg {
-    pub(crate) fn lower<'a>(
-        db: &dyn DefDatabase,
-        owner: &dyn ast::HasAttrs,
-        cfg_options: &dyn Fn() -> &'a CfgOptions,
-        span_map: SpanMapRef<'_>,
-    ) -> AttrsOrCfg {
-        let mut attrs = Vec::new();
-        let result =
-            collect_item_tree_attrs::(owner, cfg_options, |meta, container, _, _| {
-                // NOTE: We cannot early return from this function, *every* attribute must be pushed, otherwise we'll mess the `AttrId`
-                // tracking.
-                let (span, path_range, input) = match meta {
-                    Meta::NamedKeyValue { path_range, name: _, value } => {
-                        let span = span_map.span_for_range(path_range);
-                        let input = value.map(|value| {
-                            Box::new(AttrInput::Literal(token_to_literal(
-                                value.text(),
-                                span_map.span_for_range(value.text_range()),
-                            )))
-                        });
-                        (span, path_range, input)
-                    }
-                    Meta::TokenTree { path, tt } => {
-                        let span = span_map.span_for_range(path.range);
-                        let tt = syntax_bridge::syntax_node_to_token_tree(
-                            tt.syntax(),
-                            span_map,
-                            span,
-                            DocCommentDesugarMode::ProcMacro,
-                        );
-                        let input = Some(Box::new(AttrInput::TokenTree(tt)));
-                        (span, path.range, input)
-                    }
-                    Meta::Path { path } => {
-                        let span = span_map.span_for_range(path.range);
-                        (span, path.range, None)
-                    }
-                };
-
-                let path = container.token_at_offset(path_range.start()).right_biased().and_then(
-                    |first_path_token| {
-                        let is_abs = matches!(first_path_token.kind(), T![:] | T![::]);
-                        let segments =
-                            std::iter::successors(Some(first_path_token), |it| it.next_token())
-                                .take_while(|it| it.text_range().end() <= path_range.end())
-                                .filter(|it| it.kind().is_any_identifier());
-                        ModPath::from_tokens(
-                            db,
-                            &mut |range| span_map.span_for_range(range).ctx,
-                            is_abs,
-                            segments,
-                        )
-                    },
-                );
-                let path = path.unwrap_or_else(|| Name::missing().into());
-
-                attrs.push(Attr { path: Interned::new(path), input, ctxt: span.ctx });
-                ControlFlow::Continue(())
-            });
-        let attrs = AttrsOwned(attrs.into_boxed_slice());
-        match result {
-            Some(Either::Right(cfg)) => AttrsOrCfg::CfgDisabled(Box::new((cfg, attrs))),
-            None => AttrsOrCfg::Enabled { attrs },
-        }
-    }
-}
-
-#[derive(Debug, PartialEq, Eq)]
-pub(crate) struct AttrsOwned(Box<[Attr]>);
-
-#[derive(Debug, Clone, Copy)]
-pub(crate) struct Attrs<'a>(&'a [Attr]);
-
-impl ops::Deref for Attrs<'_> {
-    type Target = [Attr];
-
-    #[inline]
-    fn deref(&self) -> &Self::Target {
-        self.0
-    }
-}
-
-impl Ctx<'_> {
-    #[inline]
-    pub(super) fn lower_attrs(&self, owner: &dyn ast::HasAttrs) -> AttrsOrCfg {
-        AttrsOrCfg::lower(self.db, owner, &|| self.cfg_options(), self.span_map())
-    }
-}
-
-impl AttrsOwned {
-    #[inline]
-    pub(crate) fn as_ref(&self) -> Attrs<'_> {
-        Attrs(&self.0)
-    }
-}
-
-impl<'a> Attrs<'a> {
-    pub(crate) const EMPTY: Self = Attrs(&[]);
-
-    #[inline]
-    pub(crate) fn by_key(self, key: Symbol) -> AttrQuery<'a> {
-        AttrQuery { attrs: self, key }
-    }
-
-    #[inline]
-    pub(crate) fn iter(self) -> impl Iterator {
-        self.0.iter().enumerate().map(|(id, attr)| (AttrId::from_item_tree_index(id as u32), attr))
-    }
-
-    #[inline]
-    pub(crate) fn iter_after(
-        self,
-        after: Option,
-    ) -> impl Iterator {
-        let skip = after.map_or(0, |after| after.item_tree_index() + 1);
-        self.0[skip as usize..]
-            .iter()
-            .enumerate()
-            .map(move |(id, attr)| (AttrId::from_item_tree_index(id as u32 + skip), attr))
-    }
-
-    #[inline]
-    pub(crate) fn is_proc_macro(&self) -> bool {
-        self.by_key(sym::proc_macro).exists()
-    }
-
-    #[inline]
-    pub(crate) fn is_proc_macro_attribute(&self) -> bool {
-        self.by_key(sym::proc_macro_attribute).exists()
-    }
-}
-#[derive(Debug, Clone)]
-pub(crate) struct AttrQuery<'attr> {
-    attrs: Attrs<'attr>,
-    key: Symbol,
-}
-
-impl<'attr> AttrQuery<'attr> {
-    #[inline]
-    pub(crate) fn tt_values(self) -> impl Iterator {
-        self.attrs().filter_map(|attr| attr.token_tree_value())
-    }
-
-    #[inline]
-    pub(crate) fn string_value_with_span(self) -> Option<(&'attr Symbol, span::Span)> {
-        self.attrs().find_map(|attr| attr.string_value_with_span())
-    }
-
-    #[inline]
-    pub(crate) fn string_value_unescape(self) -> Option> {
-        self.attrs().find_map(|attr| attr.string_value_unescape())
-    }
-
-    #[inline]
-    pub(crate) fn exists(self) -> bool {
-        self.attrs().next().is_some()
-    }
-
-    #[inline]
-    pub(crate) fn attrs(self) -> impl Iterator + Clone {
-        let key = self.key;
-        self.attrs.0.iter().filter(move |attr| attr.path.as_ident().is_some_and(|s| *s == key))
-    }
-}
-
-impl AttrsOrCfg {
-    #[inline]
-    pub(super) fn empty() -> Self {
-        AttrsOrCfg::Enabled { attrs: AttrsOwned(Box::new([])) }
-    }
-
-    #[inline]
-    pub(super) fn is_empty(&self) -> bool {
-        matches!(self, AttrsOrCfg::Enabled { attrs } if attrs.as_ref().is_empty())
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index b50a75169158d..454e06399583c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -1,9 +1,8 @@
 //! AST -> `ItemTree` lowering code.
 
-use std::cell::OnceCell;
+use std::{cell::OnceCell, collections::hash_map::Entry};
 
 use base_db::FxIndexSet;
-use cfg::CfgOptions;
 use hir_expand::{
     HirFileId,
     mod_path::PathKind,
@@ -23,19 +22,18 @@ use crate::{
     item_tree::{
         BigModItem, Const, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl,
         ImportAlias, Interned, ItemTree, ItemTreeAstId, Macro2, MacroCall, MacroRules, Mod,
-        ModItemId, ModKind, ModPath, RawVisibility, RawVisibilityId, SmallModItem, Static, Struct,
-        StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, VisibilityExplicitness,
-        attrs::AttrsOrCfg,
+        ModItemId, ModKind, ModPath, RawAttrs, RawVisibility, RawVisibilityId, SmallModItem,
+        Static, Struct, StructKind, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
+        VisibilityExplicitness,
     },
 };
 
 pub(super) struct Ctx<'a> {
-    pub(super) db: &'a dyn DefDatabase,
+    db: &'a dyn DefDatabase,
     tree: ItemTree,
     source_ast_id_map: Arc,
     span_map: OnceCell,
     file: HirFileId,
-    cfg_options: OnceCell<&'a CfgOptions>,
     top_level: Vec,
     visibilities: FxIndexSet,
 }
@@ -47,18 +45,12 @@ impl<'a> Ctx<'a> {
             tree: ItemTree::default(),
             source_ast_id_map: db.ast_id_map(file),
             file,
-            cfg_options: OnceCell::new(),
             span_map: OnceCell::new(),
             visibilities: FxIndexSet::default(),
             top_level: Vec::new(),
         }
     }
 
-    #[inline]
-    pub(super) fn cfg_options(&self) -> &'a CfgOptions {
-        self.cfg_options.get_or_init(|| self.file.krate(self.db).cfg_options(self.db))
-    }
-
     pub(super) fn span_map(&self) -> SpanMapRef<'_> {
         self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref()
     }
@@ -106,7 +98,7 @@ impl<'a> Ctx<'a> {
     }
 
     pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
-        self.tree.top_attrs = self.lower_attrs(block);
+        self.tree.top_attrs = RawAttrs::new(self.db, block, self.span_map());
         self.top_level = block
             .statements()
             .filter_map(|stmt| match stmt {
@@ -152,15 +144,22 @@ impl<'a> Ctx<'a> {
             // FIXME: Handle `global_asm!()`.
             ast::Item::AsmExpr(_) => return None,
         };
-        let attrs = self.lower_attrs(item);
+        let attrs = RawAttrs::new(self.db, item, self.span_map());
         self.add_attrs(mod_item.ast_id(), attrs);
 
         Some(mod_item)
     }
 
-    fn add_attrs(&mut self, item: FileAstId, attrs: AttrsOrCfg) {
+    fn add_attrs(&mut self, item: FileAstId, attrs: RawAttrs) {
         if !attrs.is_empty() {
-            self.tree.attrs.insert(item, attrs);
+            match self.tree.attrs.entry(item) {
+                Entry::Occupied(mut entry) => {
+                    *entry.get_mut() = entry.get().merge(attrs);
+                }
+                Entry::Vacant(entry) => {
+                    entry.insert(attrs);
+                }
+            }
         }
     }
 
@@ -353,7 +352,7 @@ impl<'a> Ctx<'a> {
                         ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
                         ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
                     };
-                    let attrs = self.lower_attrs(&item);
+                    let attrs = RawAttrs::new(self.db, &item, self.span_map());
                     self.add_attrs(mod_item.ast_id(), attrs);
                     Some(mod_item)
                 })
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 66a2d14a734fe..94a6cce3ce33a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -7,8 +7,8 @@ use span::{Edition, ErasedFileAstId};
 use crate::{
     item_tree::{
         Const, DefDatabase, Enum, ExternBlock, ExternCrate, FieldsShape, Function, Impl, ItemTree,
-        Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawVisibilityId, Static, Struct,
-        Trait, TypeAlias, Union, Use, UseTree, UseTreeKind, attrs::AttrsOrCfg,
+        Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, RawAttrs, RawVisibilityId, Static,
+        Struct, Trait, TypeAlias, Union, Use, UseTree, UseTreeKind,
     },
     visibility::RawVisibility,
 };
@@ -85,13 +85,9 @@ impl Printer<'_> {
         }
     }
 
-    fn print_attrs(&mut self, attrs: &AttrsOrCfg, inner: bool, separated_by: &str) {
-        let AttrsOrCfg::Enabled { attrs } = attrs else {
-            w!(self, "#[cfg(false)]{separated_by}");
-            return;
-        };
+    fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool, separated_by: &str) {
         let inner = if inner { "!" } else { "" };
-        for attr in &*attrs.as_ref() {
+        for attr in &**attrs {
             w!(
                 self,
                 "#{}[{}{}]{}",
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
index a57432f33c3dc..91b42bef8f79f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -30,8 +30,10 @@ use crate::{A, B};
 
 use a::{c, d::{e}};
         "#,
-        expect![[r#"
+        expect![[r##"
+            #![doc = " file comment"]
             #![no_std]
+            #![doc = " another file comment"]
 
             // AstId: ExternCrate[070B, 0]
             pub(self) extern crate self as renamed;
@@ -45,12 +47,13 @@ use a::{c, d::{e}};
             // AstId: Use[0000, 1]
             pub(self) use globs::*;
 
+            #[doc = " docs on import"]
             // AstId: Use[0000, 2]
             pub(self) use crate::{A, B};
 
             // AstId: Use[0000, 3]
             pub(self) use a::{c, d::{e}};
-        "#]],
+        "##]],
     );
 }
 
@@ -192,6 +195,8 @@ mod inline {
 mod outline;
         "#,
         expect![[r##"
+            #[doc = " outer"]
+            #[doc = " inner"]
             // AstId: Module[03AE, 0]
             pub(self) mod inline {
                 // AstId: Use[0000, 0]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
index 4f97baadd1834..df0705bf90cbc 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -10,7 +10,6 @@ use triomphe::Arc;
 use crate::{
     AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
     StaticId, StructId, TraitId, TypeAliasId, UnionId,
-    attrs::AttrFlags,
     db::DefDatabase,
     expr_store::path::Path,
     nameres::{assoc::TraitItems, crate_def_map, crate_local_def_map},
@@ -214,14 +213,14 @@ impl LangItems {
         T: Into + Copy,
     {
         let _p = tracing::info_span!("collect_lang_item").entered();
-        if let Some(lang_item) = AttrFlags::lang_item(db, item.into()) {
+        if let Some(lang_item) = lang_attr(db, item.into()) {
             self.items.entry(lang_item).or_insert_with(|| constructor(item));
         }
     }
 }
 
 pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option {
-    AttrFlags::lang_item(db, item)
+    db.attrs(item).lang_item()
 }
 
 pub(crate) fn notable_traits_in_deps(db: &dyn DefDatabase, krate: Crate) -> Arc<[Arc<[TraitId]>]> {
@@ -241,7 +240,7 @@ pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option
     for (_, module_data) in crate_def_map.modules() {
         for def in module_data.scope.declarations() {
             if let ModuleDefId::TraitId(trait_) = def
-                && AttrFlags::query(db, trait_.into()).contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
+                && db.attrs(trait_.into()).has_doc_notable_trait()
             {
                 traits.push(trait_);
             }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index c3c9fc75252d1..e5c213ca937c8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -19,7 +19,7 @@ extern crate ra_ap_rustc_abi as rustc_abi;
 
 pub mod db;
 
-pub mod attrs;
+pub mod attr;
 pub mod builtin_type;
 pub mod item_scope;
 pub mod per_ns;
@@ -45,7 +45,7 @@ pub mod find_path;
 pub mod import_map;
 pub mod visibility;
 
-use intern::{Interned, Symbol};
+use intern::{Interned, Symbol, sym};
 pub use rustc_abi as layout;
 use thin_vec::ThinVec;
 use triomphe::Arc;
@@ -80,7 +80,7 @@ use syntax::{AstNode, ast};
 pub use hir_expand::{Intern, Lookup, tt};
 
 use crate::{
-    attrs::AttrFlags,
+    attr::Attrs,
     builtin_type::BuiltinType,
     db::DefDatabase,
     expr_store::ExpressionStoreSourceMap,
@@ -956,16 +956,10 @@ impl CallableDefId {
     }
 }
 
-// FIXME: We probably should use this in more places.
-/// This is used to avoid interning the whole `AttrDefId`, so we intern just modules and not everything.
-#[salsa_macros::interned(debug, no_lifetime)]
-pub struct InternedModuleId {
-    pub loc: ModuleId,
-}
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, salsa_macros::Supertype)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 pub enum AttrDefId {
-    ModuleId(InternedModuleId),
+    ModuleId(ModuleId),
+    FieldId(FieldId),
     AdtId(AdtId),
     FunctionId(FunctionId),
     EnumVariantId(EnumVariantId),
@@ -975,12 +969,15 @@ pub enum AttrDefId {
     TypeAliasId(TypeAliasId),
     MacroId(MacroId),
     ImplId(ImplId),
+    GenericParamId(GenericParamId),
     ExternBlockId(ExternBlockId),
     ExternCrateId(ExternCrateId),
     UseId(UseId),
 }
 
 impl_from!(
+    ModuleId,
+    FieldId,
     AdtId(StructId, EnumId, UnionId),
     EnumVariantId,
     StaticId,
@@ -990,11 +987,41 @@ impl_from!(
     TypeAliasId,
     MacroId(Macro2Id, MacroRulesId, ProcMacroId),
     ImplId,
+    GenericParamId,
     ExternCrateId,
     UseId
     for AttrDefId
 );
 
+impl TryFrom for AttrDefId {
+    type Error = ();
+
+    fn try_from(value: ModuleDefId) -> Result {
+        match value {
+            ModuleDefId::ModuleId(it) => Ok(it.into()),
+            ModuleDefId::FunctionId(it) => Ok(it.into()),
+            ModuleDefId::AdtId(it) => Ok(it.into()),
+            ModuleDefId::EnumVariantId(it) => Ok(it.into()),
+            ModuleDefId::ConstId(it) => Ok(it.into()),
+            ModuleDefId::StaticId(it) => Ok(it.into()),
+            ModuleDefId::TraitId(it) => Ok(it.into()),
+            ModuleDefId::TypeAliasId(it) => Ok(it.into()),
+            ModuleDefId::MacroId(id) => Ok(id.into()),
+            ModuleDefId::BuiltinType(_) => Err(()),
+        }
+    }
+}
+
+impl From for AttrDefId {
+    fn from(acid: ItemContainerId) -> Self {
+        match acid {
+            ItemContainerId::ModuleId(mid) => AttrDefId::ModuleId(mid),
+            ItemContainerId::ImplId(iid) => AttrDefId::ImplId(iid),
+            ItemContainerId::TraitId(tid) => AttrDefId::TraitId(tid),
+            ItemContainerId::ExternBlockId(id) => AttrDefId::ExternBlockId(id),
+        }
+    }
+}
 impl From for AttrDefId {
     fn from(assoc: AssocItemId) -> Self {
         match assoc {
@@ -1235,7 +1262,8 @@ impl HasModule for GenericDefId {
 impl HasModule for AttrDefId {
     fn module(&self, db: &dyn DefDatabase) -> ModuleId {
         match self {
-            AttrDefId::ModuleId(it) => it.loc(db),
+            AttrDefId::ModuleId(it) => *it,
+            AttrDefId::FieldId(it) => it.parent.module(db),
             AttrDefId::AdtId(it) => it.module(db),
             AttrDefId::FunctionId(it) => it.module(db),
             AttrDefId::EnumVariantId(it) => it.module(db),
@@ -1245,6 +1273,12 @@ impl HasModule for AttrDefId {
             AttrDefId::TypeAliasId(it) => it.module(db),
             AttrDefId::ImplId(it) => it.module(db),
             AttrDefId::ExternBlockId(it) => it.module(db),
+            AttrDefId::GenericParamId(it) => match it {
+                GenericParamId::TypeParamId(it) => it.parent(),
+                GenericParamId::ConstParamId(it) => it.parent(),
+                GenericParamId::LifetimeParamId(it) => it.parent,
+            }
+            .module(db),
             AttrDefId::MacroId(it) => it.module(db),
             AttrDefId::ExternCrateId(it) => it.module(db),
             AttrDefId::UseId(it) => it.module(db),
@@ -1368,18 +1402,32 @@ pub enum Complete {
 }
 
 impl Complete {
-    #[inline]
-    pub fn extract(is_trait: bool, attrs: AttrFlags) -> Complete {
-        if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT) {
-            return Complete::IgnoreFlyimport;
-        } else if is_trait {
-            if attrs.contains(AttrFlags::COMPLETE_IGNORE_METHODS) {
-                return Complete::IgnoreMethods;
-            } else if attrs.contains(AttrFlags::COMPLETE_IGNORE_FLYIMPORT_METHODS) {
-                return Complete::IgnoreFlyimportMethods;
+    pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete {
+        let mut do_not_complete = Complete::Yes;
+        for ra_attr in attrs.rust_analyzer_tool() {
+            let segments = ra_attr.path.segments();
+            if segments.len() != 2 {
+                continue;
+            }
+            let action = segments[1].symbol();
+            if *action == sym::completions {
+                match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) {
+                    Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => {
+                        if ident.sym == sym::ignore_flyimport {
+                            do_not_complete = Complete::IgnoreFlyimport;
+                        } else if is_trait {
+                            if ident.sym == sym::ignore_methods {
+                                do_not_complete = Complete::IgnoreMethods;
+                            } else if ident.sym == sym::ignore_flyimport_methods {
+                                do_not_complete = Complete::IgnoreFlyimportMethods;
+                            }
+                        }
+                    }
+                    _ => {}
+                }
             }
         }
-        Complete::Yes
+        do_not_complete
     }
 
     #[inline]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 115b487b7ac80..c489c1f7c1dad 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -300,21 +300,21 @@ fn match_by_first_token_literally() {
     check(
         r#"
 macro_rules! m {
-    ($i:ident) => ( enum $i {} );
+    ($i:ident) => ( mod $i {} );
     (= $i:ident) => ( fn $i() {} );
     (+ $i:ident) => ( struct $i; )
 }
-m! { Foo }
+m! { foo }
 m! { = bar }
 m! { + Baz }
 "#,
         expect![[r#"
 macro_rules! m {
-    ($i:ident) => ( enum $i {} );
+    ($i:ident) => ( mod $i {} );
     (= $i:ident) => ( fn $i() {} );
     (+ $i:ident) => ( struct $i; )
 }
-enum Foo {}
+mod foo {}
 fn bar() {}
 struct Baz;
 "#]],
@@ -326,21 +326,21 @@ fn match_by_last_token_literally() {
     check(
         r#"
 macro_rules! m {
-    ($i:ident) => ( enum $i {} );
+    ($i:ident) => ( mod $i {} );
     ($i:ident =) => ( fn $i() {} );
     ($i:ident +) => ( struct $i; )
 }
-m! { Foo }
+m! { foo }
 m! { bar = }
 m! { Baz + }
 "#,
         expect![[r#"
 macro_rules! m {
-    ($i:ident) => ( enum $i {} );
+    ($i:ident) => ( mod $i {} );
     ($i:ident =) => ( fn $i() {} );
     ($i:ident +) => ( struct $i; )
 }
-enum Foo {}
+mod foo {}
 fn bar() {}
 struct Baz;
 "#]],
@@ -352,21 +352,21 @@ fn match_by_ident() {
     check(
         r#"
 macro_rules! m {
-    ($i:ident) => ( enum $i {} );
+    ($i:ident) => ( mod $i {} );
     (spam $i:ident) => ( fn $i() {} );
     (eggs $i:ident) => ( struct $i; )
 }
-m! { Foo }
+m! { foo }
 m! { spam bar }
 m! { eggs Baz }
 "#,
         expect![[r#"
 macro_rules! m {
-    ($i:ident) => ( enum $i {} );
+    ($i:ident) => ( mod $i {} );
     (spam $i:ident) => ( fn $i() {} );
     (eggs $i:ident) => ( struct $i; )
 }
-enum Foo {}
+mod foo {}
 fn bar() {}
 struct Baz;
 "#]],
@@ -378,12 +378,12 @@ fn match_by_separator_token() {
     check(
         r#"
 macro_rules! m {
-    ($($i:ident),*) => ($(enum $i {} )*);
+    ($($i:ident),*) => ($(mod $i {} )*);
     ($($i:ident)#*) => ($(fn $i() {} )*);
     ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
 }
 
-m! { Baz, Qux }
+m! { foo, bar }
 
 m! { foo# bar }
 
@@ -391,13 +391,13 @@ m! { Foo,# Bar }
 "#,
         expect![[r#"
 macro_rules! m {
-    ($($i:ident),*) => ($(enum $i {} )*);
+    ($($i:ident),*) => ($(mod $i {} )*);
     ($($i:ident)#*) => ($(fn $i() {} )*);
     ($i:ident ,# $ j:ident) => ( struct $i; struct $ j; )
 }
 
-enum Baz {}
-enum Qux {}
+mod foo {}
+mod bar {}
 
 fn foo() {}
 fn bar() {}
@@ -1114,11 +1114,11 @@ fn test_single_item() {
     check(
         r#"
 macro_rules! m { ($i:item) => ( $i ) }
-m! { struct C {} }
+m! { mod c {} }
 "#,
         expect![[r#"
 macro_rules! m { ($i:item) => ( $i ) }
-struct C {}
+mod c {}
 "#]],
     )
 }
@@ -1144,7 +1144,6 @@ m! {
     type T = u8;
 }
 "#,
-        // The modules are counted twice, once because of the module and once because of the macro call.
         expect![[r#"
 macro_rules! m { ($($i:item)*) => ($($i )*) }
 extern crate a;
@@ -1162,9 +1161,7 @@ trait J {}
 fn h() {}
 extern {}
 type T = u8;
-
-mod b;
-mod c {}"#]],
+"#]],
     );
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index 74393411054e9..e8ae499d27b26 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -245,21 +245,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
         }
     }
 
-    for (_, module) in def_map.modules() {
-        let Some(src) = module.declaration_source(&db) else {
-            continue;
-        };
-        if let Some(macro_file) = src.file_id.macro_file() {
-            let pp = pretty_print_macro_expansion(
-                src.value.syntax().clone(),
-                db.span_map(macro_file.into()).as_ref(),
-                false,
-                false,
-            );
-            format_to!(expanded_text, "\n{}", pp)
-        }
-    }
-
     for impl_id in def_map[local_id].scope.impls() {
         let src = impl_id.lookup(&db).source(&db);
         if let Some(macro_file) = src.file_id.macro_file()
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 3f0afe61e0b85..6952a9da10139 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -9,93 +9,37 @@ use crate::macro_expansion_tests::{check, check_errors};
 
 #[test]
 fn attribute_macro_attr_censoring() {
+    cov_mark::check!(attribute_macro_attr_censoring);
     check(
         r#"
 //- proc_macros: identity
-//- minicore: derive
-#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
+#[attr1] #[proc_macros::identity] #[attr2]
 struct S;
-
-/// Foo
-#[cfg_attr(false, doc = "abc...", attr1)]
-mod foo {
-    #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
-    #![cfg_attr(true, doc = "123...", attr2)]
-    #![attr3]
-
-    #[cfg_attr(true, cfg(false))]
-    fn foo() {}
-
-    #[cfg(true)]
-    fn bar() {}
-}
 "#,
-        expect![[r##"
-#[attr1] #[derive()] #[proc_macros::identity] #[attr2]
+        expect![[r#"
+#[attr1] #[proc_macros::identity] #[attr2]
 struct S;
 
-/// Foo
-#[cfg_attr(false, doc = "abc...", attr1)]
-mod foo {
-    #![cfg_attr(true, cfg_attr(true, foo, cfg_attr(false, bar), proc_macros::identity))]
-    #![cfg_attr(true, doc = "123...", attr2)]
-    #![attr3]
-
-    #[cfg_attr(true, cfg(false))]
-    fn foo() {}
-
-    #[cfg(true)]
-    fn bar() {}
-}
-
 #[attr1]
-#[attr2] struct S;
-#[doc = " Foo"] mod foo {
-    # ![foo]
-    # ![doc = "123..."]
-    # ![attr2]
-    # ![attr3]
-    #[cfg_attr(true , cfg(false ))] fn foo() {}
-    #[cfg(true )] fn bar() {}
-}"##]],
+#[attr2] struct S;"#]],
     );
 }
 
 #[test]
 fn derive_censoring() {
+    cov_mark::check!(derive_censoring);
     check(
         r#"
 //- proc_macros: derive_identity
 //- minicore:derive
-use derive as my_cool_derive;
 #[attr1]
 #[derive(Foo)]
 #[derive(proc_macros::DeriveIdentity)]
 #[derive(Bar)]
 #[attr2]
 struct S;
-
-#[my_cool_derive()]
-#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
-#[my_cool_derive()]
-struct Foo {
-    #[cfg_attr(false, cfg(false), attr2)]
-    v1: i32,
-    #[cfg_attr(true, cfg(false), attr2)]
-    v1: i32,
-    #[cfg_attr(true, attr3)]
-    v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
-    v3: Foo<{
-        #[cfg(false)]
-        let foo = 123;
-        456
-    }>,
-    #[cfg(false)]
-    v4: bool // No comma here
-}
 "#,
         expect![[r#"
-use derive as my_cool_derive;
 #[attr1]
 #[derive(Foo)]
 #[derive(proc_macros::DeriveIdentity)]
@@ -103,32 +47,6 @@ use derive as my_cool_derive;
 #[attr2]
 struct S;
 
-#[my_cool_derive()]
-#[cfg_attr(true, derive(), attr1, derive(proc_macros::DeriveIdentity))]
-#[my_cool_derive()]
-struct Foo {
-    #[cfg_attr(false, cfg(false), attr2)]
-    v1: i32,
-    #[cfg_attr(true, cfg(false), attr2)]
-    v1: i32,
-    #[cfg_attr(true, attr3)]
-    v2: fn(#[cfg(false)] param: i32, #[cfg_attr(true, attr4)] param2: u32),
-    v3: Foo<{
-        #[cfg(false)]
-        let foo = 123;
-        456
-    }>,
-    #[cfg(false)]
-    v4: bool // No comma here
-}
-
-#[attr1]
-#[my_cool_derive()] struct Foo {
-    v1: i32, #[attr3]v2: fn(#[attr4]param2: u32), v3: Foo< {
-        456
-    }
-    >,
-}
 #[attr1]
 #[derive(Bar)]
 #[attr2] struct S;"#]],
@@ -169,7 +87,7 @@ fn foo() { bar.; blub }
 fn foo() { bar.; blub }
 
 fn foo() {
-    bar.;
+    bar. ;
     blub
 }"#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
index e4b95a5a77a55..7d5e627964eb1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -391,14 +391,19 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM
     )
     .entered();
 
-    let root_file_id = crate_id.root_file_id(db);
-    let module_data =
-        ModuleData::new(ModuleOrigin::CrateRoot { definition: root_file_id }, Visibility::Public);
+    let module_data = ModuleData::new(
+        ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
+        Visibility::Public,
+    );
 
     let def_map =
         DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None);
-    let (def_map, local_def_map) =
-        collector::collect_defs(db, def_map, TreeId::new(root_file_id.into(), None), None);
+    let (def_map, local_def_map) = collector::collect_defs(
+        db,
+        def_map,
+        TreeId::new(krate.root_file_id(db).into(), None),
+        None,
+    );
 
     DefMapPair::new(db, def_map, local_def_map)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
index b67853347bdef..8d2a386de8ecc 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/assoc.rs
@@ -4,8 +4,7 @@ use std::mem;
 
 use cfg::CfgOptions;
 use hir_expand::{
-    AstId, AttrMacroAttrIds, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind,
-    MacroDefKind,
+    AstId, ExpandTo, HirFileId, InFile, Intern, Lookup, MacroCallKind, MacroDefKind,
     mod_path::ModPath,
     name::{AsName, Name},
     span_map::SpanMap,
@@ -22,8 +21,8 @@ use triomphe::Arc;
 use crate::{
     AssocItemId, AstIdWithPath, ConstLoc, FunctionId, FunctionLoc, ImplId, ItemContainerId,
     ItemLoc, MacroCallId, ModuleId, TraitId, TypeAliasId, TypeAliasLoc,
+    attr::Attrs,
     db::DefDatabase,
-    item_tree::AttrsOrCfg,
     macro_call_as_call_id,
     nameres::{
         DefMap, LocalDefMap, MacroSubNs,
@@ -192,22 +191,19 @@ impl<'a> AssocItemCollector<'a> {
 
     fn collect_item(&mut self, item: ast::AssocItem) {
         let ast_id = self.ast_id_map.ast_id(&item);
-        let attrs =
-            match AttrsOrCfg::lower(self.db, &item, &|| self.cfg_options, self.span_map.as_ref()) {
-                AttrsOrCfg::Enabled { attrs } => attrs,
-                AttrsOrCfg::CfgDisabled(cfg) => {
-                    self.diagnostics.push(DefDiagnostic::unconfigured_code(
-                        self.module_id.local_id,
-                        InFile::new(self.file_id, ast_id.erase()),
-                        cfg.0,
-                        self.cfg_options.clone(),
-                    ));
-                    return;
-                }
-            };
+        let attrs = Attrs::new(self.db, &item, self.span_map.as_ref(), self.cfg_options);
+        if let Err(cfg) = attrs.is_cfg_enabled(self.cfg_options) {
+            self.diagnostics.push(DefDiagnostic::unconfigured_code(
+                self.module_id.local_id,
+                InFile::new(self.file_id, ast_id.erase()),
+                cfg,
+                self.cfg_options.clone(),
+            ));
+            return;
+        }
         let ast_id = InFile::new(self.file_id, ast_id.upcast());
 
-        'attrs: for (attr_id, attr) in attrs.as_ref().iter() {
+        'attrs: for attr in &*attrs {
             let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
 
             match self.def_map.resolve_attr_macro(
@@ -216,7 +212,6 @@ impl<'a> AssocItemCollector<'a> {
                 self.module_id.local_id,
                 ast_id_with_path,
                 attr,
-                attr_id,
             ) {
                 Ok(ResolvedAttr::Macro(call_id)) => {
                     let loc = self.db.lookup_intern_macro_call(call_id);
@@ -245,12 +240,8 @@ impl<'a> AssocItemCollector<'a> {
                 Err(_) => {
                     self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
                         self.module_id.local_id,
-                        MacroCallKind::Attr {
-                            ast_id,
-                            attr_args: None,
-                            censored_attr_ids: AttrMacroAttrIds::from_one(attr_id),
-                        },
-                        (*attr.path).clone(),
+                        MacroCallKind::Attr { ast_id, attr_args: None, invoc_attr_index: attr.id },
+                        attr.path().clone(),
                     ));
                 }
             }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
index fb755026c3e08..2f56d608fcbf4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
@@ -2,7 +2,7 @@
 
 use base_db::Crate;
 use hir_expand::{
-    AttrMacroAttrIds, MacroCallId, MacroCallKind, MacroDefId,
+    MacroCallId, MacroCallKind, MacroDefId,
     attrs::{Attr, AttrId, AttrInput},
     inert_attr_macro::find_builtin_attr_idx,
     mod_path::{ModPath, PathKind},
@@ -28,7 +28,6 @@ pub enum ResolvedAttr {
 }
 
 impl DefMap {
-    /// This cannot be used to resolve items that allow derives.
     pub(crate) fn resolve_attr_macro(
         &self,
         local_def_map: &LocalDefMap,
@@ -36,7 +35,6 @@ impl DefMap {
         original_module: LocalModuleId,
         ast_id: AstIdWithPath,
         attr: &Attr,
-        attr_id: AttrId,
     ) -> Result {
         // NB: does not currently work for derive helpers as they aren't recorded in the `DefMap`
 
@@ -70,9 +68,6 @@ impl DefMap {
             db,
             &ast_id,
             attr,
-            // There aren't any active attributes before this one, because attribute macros
-            // replace their input, and derive macros are not allowed in this function.
-            AttrMacroAttrIds::from_one(attr_id),
             self.krate,
             db.macro_def(def),
         )))
@@ -107,7 +102,6 @@ pub(super) fn attr_macro_as_call_id(
     db: &dyn DefDatabase,
     item_attr: &AstIdWithPath,
     macro_attr: &Attr,
-    censored_attr_ids: AttrMacroAttrIds,
     krate: Crate,
     def: MacroDefId,
 ) -> MacroCallId {
@@ -127,7 +121,7 @@ pub(super) fn attr_macro_as_call_id(
         MacroCallKind::Attr {
             ast_id: item_attr.ast_id,
             attr_args: arg.map(Arc::new),
-            censored_attr_ids,
+            invoc_attr_index: macro_attr.id,
         },
         macro_attr.ctxt,
     )
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index c3b272b403bb9..a2ce538356515 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -3,14 +3,14 @@
 //! `DefCollector::collect` contains the fixed-point iteration loop which
 //! resolves imports and expands macros.
 
-use std::{cmp::Ordering, iter, mem};
+use std::{cmp::Ordering, iter, mem, ops::Not};
 
 use base_db::{BuiltDependency, Crate, CrateOrigin, LangCrateOrigin};
 use cfg::{CfgAtom, CfgExpr, CfgOptions};
 use either::Either;
 use hir_expand::{
-    AttrMacroAttrIds, EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId,
-    MacroCallKind, MacroDefId, MacroDefKind,
+    EditionedFileId, ErasedAstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
+    MacroDefId, MacroDefKind,
     attrs::{Attr, AttrId},
     builtin::{find_builtin_attr, find_builtin_derive, find_builtin_macro},
     mod_path::{ModPath, PathKind},
@@ -18,10 +18,9 @@ use hir_expand::{
     proc_macro::CustomProcMacroExpander,
 };
 use intern::{Interned, sym};
-use itertools::izip;
+use itertools::{Itertools, izip};
 use la_arena::Idx;
 use rustc_hash::{FxHashMap, FxHashSet};
-use smallvec::SmallVec;
 use span::{Edition, FileAstId, SyntaxContext};
 use syntax::ast;
 use triomphe::Arc;
@@ -33,11 +32,12 @@ use crate::{
     MacroRulesId, MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId,
     ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseId,
     UseLoc,
+    attr::Attrs,
     db::DefDatabase,
     item_scope::{GlobId, ImportId, ImportOrExternCrate, PerNsGlobImports},
     item_tree::{
-        self, Attrs, AttrsOrCfg, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId,
-        Macro2, MacroCall, MacroRules, Mod, ModItemId, ModKind, TreeId,
+        self, FieldsShape, ImportAlias, ImportKind, ItemTree, ItemTreeAstId, Macro2, MacroCall,
+        MacroRules, Mod, ModItemId, ModKind, TreeId,
     },
     macro_call_as_call_id,
     nameres::{
@@ -102,7 +102,6 @@ pub(super) fn collect_defs(
         proc_macros,
         from_glob_import: Default::default(),
         skip_attrs: Default::default(),
-        prev_active_attrs: Default::default(),
         unresolved_extern_crates: Default::default(),
         is_proc_macro: krate.is_proc_macro,
     };
@@ -207,7 +206,6 @@ enum MacroDirectiveKind<'db> {
     },
     Attr {
         ast_id: AstIdWithPath,
-        attr_id: AttrId,
         attr: Attr,
         mod_item: ModItemId,
         /* is this needed? */ tree: TreeId,
@@ -248,27 +246,28 @@ struct DefCollector<'db> {
     /// This also stores the attributes to skip when we resolve derive helpers and non-macro
     /// non-builtin attributes in general.
     // FIXME: There has to be a better way to do this
-    skip_attrs: FxHashMap, AttrId>,
-    /// When we expand attributes, we need to censor all previous active attributes
-    /// on the same item. Therefore, this holds all active attributes that we already
-    /// expanded.
-    prev_active_attrs: FxHashMap, SmallVec<[AttrId; 1]>>,
+    skip_attrs: FxHashMap>, AttrId>,
 }
 
 impl<'db> DefCollector<'db> {
     fn seed_with_top_level(&mut self) {
         let _p = tracing::info_span!("seed_with_top_level").entered();
 
-        let file_id = self.def_map.krate.root_file_id(self.db);
+        let file_id = self.def_map.krate.data(self.db).root_file_id(self.db);
         let item_tree = self.db.file_item_tree(file_id.into());
-        let attrs = match item_tree.top_level_attrs() {
-            AttrsOrCfg::Enabled { attrs } => attrs.as_ref(),
-            AttrsOrCfg::CfgDisabled(it) => it.1.as_ref(),
-        };
+        let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
         let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
 
+        let mut process = true;
+
         // Process other crate-level attributes.
         for attr in &*attrs {
+            if let Some(cfg) = attr.cfg()
+                && self.cfg_options.check(&cfg) == Some(false)
+            {
+                process = false;
+                break;
+            }
             let Some(attr_name) = attr.path.as_ident() else { continue };
 
             match () {
@@ -292,7 +291,7 @@ impl<'db> DefCollector<'db> {
                 () if *attr_name == sym::feature => {
                     let features =
                         attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map(
-                            |(feat, _, _)| match feat.segments() {
+                            |(feat, _)| match feat.segments() {
                                 [name] => Some(name.symbol().clone()),
                                 _ => None,
                             },
@@ -345,7 +344,7 @@ impl<'db> DefCollector<'db> {
 
         self.inject_prelude();
 
-        if matches!(item_tree.top_level_attrs(), AttrsOrCfg::CfgDisabled(_)) {
+        if !process {
             return;
         }
 
@@ -363,7 +362,10 @@ impl<'db> DefCollector<'db> {
 
     fn seed_with_inner(&mut self, tree_id: TreeId) {
         let item_tree = tree_id.item_tree(self.db);
-        let is_cfg_enabled = matches!(item_tree.top_level_attrs(), AttrsOrCfg::Enabled { .. });
+        let is_cfg_enabled = item_tree
+            .top_level_attrs(self.db, self.def_map.krate)
+            .cfg()
+            .is_none_or(|cfg| self.cfg_options.check(&cfg) != Some(false));
         if is_cfg_enabled {
             self.inject_prelude();
 
@@ -454,18 +456,18 @@ impl<'db> DefCollector<'db> {
             self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
                 .kind
             {
-                MacroDirectiveKind::Attr { ast_id, mod_item, attr_id, attr, tree, item_tree } => {
+                MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree, item_tree } => {
                     self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
                         directive.module_id,
                         MacroCallKind::Attr {
                             ast_id: ast_id.ast_id,
                             attr_args: None,
-                            censored_attr_ids: AttrMacroAttrIds::from_one(*attr_id),
+                            invoc_attr_index: attr.id,
                         },
-                        (*attr.path).clone(),
+                        attr.path().clone(),
                     ));
 
-                    self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), *attr_id);
+                    self.skip_attrs.insert(ast_id.ast_id.with_value(mod_item.ast_id()), attr.id);
 
                     Some((idx, directive, *mod_item, *tree, *item_tree))
                 }
@@ -1348,7 +1350,6 @@ impl<'db> DefCollector<'db> {
                 MacroDirectiveKind::Attr {
                     ast_id: file_ast_id,
                     mod_item,
-                    attr_id,
                     attr,
                     tree,
                     item_tree,
@@ -1361,7 +1362,7 @@ impl<'db> DefCollector<'db> {
                         let mod_dir = collector.mod_dirs[&directive.module_id].clone();
                         collector
                             .skip_attrs
-                            .insert(InFile::new(file_id, mod_item.ast_id()), *attr_id);
+                            .insert(InFile::new(file_id, mod_item.ast_id()), attr.id);
 
                         ModCollector {
                             def_collector: collector,
@@ -1397,6 +1398,7 @@ impl<'db> DefCollector<'db> {
                     // being cfg'ed out).
                     // Ideally we will just expand them to nothing here. But we are only collecting macro calls,
                     // not expanding them, so we have no way to do that.
+                    // If you add an ignored attribute here, also add it to `Semantics::might_be_inside_macro_call()`.
                     if matches!(
                         def.kind,
                         MacroDefKind::BuiltInAttr(_, expander)
@@ -1408,18 +1410,8 @@ impl<'db> DefCollector<'db> {
                         }
                     }
 
-                    let mut call_id = || {
-                        let active_attrs = self.prev_active_attrs.entry(ast_id).or_default();
-                        active_attrs.push(*attr_id);
-
-                        attr_macro_as_call_id(
-                            self.db,
-                            file_ast_id,
-                            attr,
-                            AttrMacroAttrIds::from_many(active_attrs),
-                            self.def_map.krate,
-                            def,
-                        )
+                    let call_id = || {
+                        attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def)
                     };
                     if matches!(def,
                         MacroDefId { kind: MacroDefKind::BuiltInAttr(_, exp), .. }
@@ -1437,7 +1429,7 @@ impl<'db> DefCollector<'db> {
                                 let diag = DefDiagnostic::invalid_derive_target(
                                     directive.module_id,
                                     ast_id,
-                                    *attr_id,
+                                    attr.id,
                                 );
                                 self.def_map.diagnostics.push(diag);
                                 return recollect_without(self);
@@ -1450,7 +1442,7 @@ impl<'db> DefCollector<'db> {
                             Some(derive_macros) => {
                                 let call_id = call_id();
                                 let mut len = 0;
-                                for (idx, (path, call_site, _)) in derive_macros.enumerate() {
+                                for (idx, (path, call_site)) in derive_macros.enumerate() {
                                     let ast_id = AstIdWithPath::new(
                                         file_id,
                                         ast_id.value,
@@ -1461,7 +1453,7 @@ impl<'db> DefCollector<'db> {
                                         depth: directive.depth + 1,
                                         kind: MacroDirectiveKind::Derive {
                                             ast_id,
-                                            derive_attr: *attr_id,
+                                            derive_attr: attr.id,
                                             derive_pos: idx,
                                             ctxt: call_site.ctx,
                                             derive_macro_id: call_id,
@@ -1477,13 +1469,13 @@ impl<'db> DefCollector<'db> {
                                 // Check the comment in [`builtin_attr_macro`].
                                 self.def_map.modules[directive.module_id]
                                     .scope
-                                    .init_derive_attribute(ast_id, *attr_id, call_id, len + 1);
+                                    .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
                             }
                             None => {
                                 let diag = DefDiagnostic::malformed_derive(
                                     directive.module_id,
                                     ast_id,
-                                    *attr_id,
+                                    attr.id,
                                 );
                                 self.def_map.diagnostics.push(diag);
                             }
@@ -1720,17 +1712,16 @@ impl ModCollector<'_, '_> {
         };
 
         let mut process_mod_item = |item: ModItemId| {
-            let attrs = match self.item_tree.attrs(item.ast_id()) {
-                Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
-                None => Attrs::EMPTY,
-                Some(AttrsOrCfg::CfgDisabled(cfg)) => {
-                    let ast_id = item.ast_id().erase();
-                    self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg.0);
-                    return;
-                }
-            };
+            let attrs = self.item_tree.attrs(db, krate, item.ast_id());
+            if let Some(cfg) = attrs.cfg()
+                && !self.is_cfg_enabled(&cfg)
+            {
+                let ast_id = item.ast_id().erase();
+                self.emit_unconfigured_diagnostic(InFile::new(self.file_id(), ast_id), &cfg);
+                return;
+            }
 
-            if let Err(()) = self.resolve_attributes(attrs, item, container) {
+            if let Err(()) = self.resolve_attributes(&attrs, item, container) {
                 // Do not process the item. It has at least one non-builtin attribute, so the
                 // fixed-point algorithm is required to resolve the rest of them.
                 return;
@@ -1742,7 +1733,7 @@ impl ModCollector<'_, '_> {
                 self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map);
 
             match item {
-                ModItemId::Mod(m) => self.collect_module(m, attrs),
+                ModItemId::Mod(m) => self.collect_module(m, &attrs),
                 ModItemId::Use(item_tree_id) => {
                     let id =
                         UseLoc { container: module, id: InFile::new(self.file_id(), item_tree_id) }
@@ -2015,7 +2006,7 @@ impl ModCollector<'_, '_> {
                 );
                 return;
             };
-            for (path, _, _) in paths {
+            for (path, _) in paths {
                 if let Some(name) = path.as_ident() {
                     single_imports.push(name.clone());
                 }
@@ -2029,7 +2020,7 @@ impl ModCollector<'_, '_> {
         );
     }
 
-    fn collect_module(&mut self, module_ast_id: ItemTreeAstId, attrs: Attrs<'_>) {
+    fn collect_module(&mut self, module_ast_id: ItemTreeAstId, attrs: &Attrs) {
         let path_attr = attrs.by_key(sym::path).string_value_unescape();
         let is_macro_use = attrs.by_key(sym::macro_use).exists();
         let module = &self.item_tree[module_ast_id];
@@ -2070,18 +2061,23 @@ impl ModCollector<'_, '_> {
                     self.file_id(),
                     &module.name,
                     path_attr.as_deref(),
-                    self.def_collector.def_map.krate,
                 ) {
                     Ok((file_id, is_mod_rs, mod_dir)) => {
                         let item_tree = db.file_item_tree(file_id.into());
-                        match item_tree.top_level_attrs() {
-                            AttrsOrCfg::CfgDisabled(cfg) => {
+                        let krate = self.def_collector.def_map.krate;
+                        let is_enabled = item_tree
+                            .top_level_attrs(db, krate)
+                            .cfg()
+                            .and_then(|cfg| self.is_cfg_enabled(&cfg).not().then_some(cfg))
+                            .map_or(Ok(()), Err);
+                        match is_enabled {
+                            Err(cfg) => {
                                 self.emit_unconfigured_diagnostic(
                                     InFile::new(self.file_id(), module_ast_id.erase()),
-                                    &cfg.0,
+                                    &cfg,
                                 );
                             }
-                            AttrsOrCfg::Enabled { attrs } => {
+                            Ok(()) => {
                                 let module_id = self.push_child_module(
                                     module.name.clone(),
                                     ast_id.value,
@@ -2097,8 +2093,11 @@ impl ModCollector<'_, '_> {
                                     mod_dir,
                                 }
                                 .collect_in_top_module(item_tree.top_level_items());
-                                let is_macro_use =
-                                    is_macro_use || attrs.as_ref().by_key(sym::macro_use).exists();
+                                let is_macro_use = is_macro_use
+                                    || item_tree
+                                        .top_level_attrs(db, krate)
+                                        .by_key(sym::macro_use)
+                                        .exists();
                                 if is_macro_use {
                                     self.import_all_legacy_macros(module_id);
                                 }
@@ -2186,16 +2185,36 @@ impl ModCollector<'_, '_> {
     /// assumed to be resolved already.
     fn resolve_attributes(
         &mut self,
-        attrs: Attrs<'_>,
+        attrs: &Attrs,
         mod_item: ModItemId,
         container: ItemContainerId,
     ) -> Result<(), ()> {
-        let ignore_up_to = self
+        let mut ignore_up_to = self
             .def_collector
             .skip_attrs
             .get(&InFile::new(self.file_id(), mod_item.ast_id()))
             .copied();
-        for (attr_id, attr) in attrs.iter_after(ignore_up_to) {
+        let iter = attrs
+            .iter()
+            .dedup_by(|a, b| {
+                // FIXME: this should not be required, all attributes on an item should have a
+                // unique ID!
+                // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
+                //     #[cfg_attr(not(off), unresolved, unresolved)]
+                //     struct S;
+                // We should come up with a different way to ID attributes.
+                a.id == b.id
+            })
+            .skip_while(|attr| match ignore_up_to {
+                Some(id) if attr.id == id => {
+                    ignore_up_to = None;
+                    true
+                }
+                Some(_) => true,
+                None => false,
+            });
+
+        for attr in iter {
             if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
                 continue;
             }
@@ -2210,7 +2229,6 @@ impl ModCollector<'_, '_> {
                 depth: self.macro_depth + 1,
                 kind: MacroDirectiveKind::Attr {
                     ast_id,
-                    attr_id,
                     attr: attr.clone(),
                     mod_item,
                     tree: self.tree_id,
@@ -2226,14 +2244,9 @@ impl ModCollector<'_, '_> {
     }
 
     fn collect_macro_rules(&mut self, ast_id: ItemTreeAstId, module: ModuleId) {
+        let krate = self.def_collector.def_map.krate;
         let mac = &self.item_tree[ast_id];
-        let attrs = match self.item_tree.attrs(ast_id.upcast()) {
-            Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
-            None => Attrs::EMPTY,
-            Some(AttrsOrCfg::CfgDisabled(_)) => {
-                unreachable!("we only get here if the macro is not cfg'ed out")
-            }
-        };
+        let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
         let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
 
         let export_attr = || attrs.by_key(sym::macro_export);
@@ -2313,14 +2326,9 @@ impl ModCollector<'_, '_> {
     }
 
     fn collect_macro_def(&mut self, ast_id: ItemTreeAstId, module: ModuleId) {
+        let krate = self.def_collector.def_map.krate;
         let mac = &self.item_tree[ast_id];
-        let attrs = match self.item_tree.attrs(ast_id.upcast()) {
-            Some(AttrsOrCfg::Enabled { attrs }) => attrs.as_ref(),
-            None => Attrs::EMPTY,
-            Some(AttrsOrCfg::CfgDisabled(_)) => {
-                unreachable!("we only get here if the macro is not cfg'ed out")
-            }
-        };
+        let attrs = self.item_tree.attrs(self.def_collector.db, krate, ast_id.upcast());
         let f_ast_id = InFile::new(self.file_id(), ast_id.upcast());
 
         // Case 1: builtin macros
@@ -2506,6 +2514,10 @@ impl ModCollector<'_, '_> {
         Some((a, b))
     }
 
+    fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
+        self.def_collector.cfg_options.check(cfg) != Some(false)
+    }
+
     fn emit_unconfigured_diagnostic(&mut self, ast_id: ErasedAstId, cfg: &CfgExpr) {
         self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
             self.module_id,
@@ -2545,7 +2557,6 @@ mod tests {
             proc_macros: Default::default(),
             from_glob_import: Default::default(),
             skip_attrs: Default::default(),
-            prev_active_attrs: Default::default(),
             is_proc_macro: false,
             unresolved_extern_crates: Default::default(),
         };
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
index 6a07c56aeebef..c495a07449196 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -17,8 +17,8 @@ pub enum DefDiagnosticKind {
     UnconfiguredCode { ast_id: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
     UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
     UnimplementedBuiltinMacro { ast: AstId },
-    InvalidDeriveTarget { ast: AstId, id: AttrId },
-    MalformedDerive { ast: AstId, id: AttrId },
+    InvalidDeriveTarget { ast: AstId, id: usize },
+    MalformedDerive { ast: AstId, id: usize },
     MacroDefError { ast: AstId, message: String },
     MacroError { ast: AstId, path: ModPath, err: ExpandErrorKind },
 }
@@ -119,7 +119,10 @@ impl DefDiagnostic {
         ast: AstId,
         id: AttrId,
     ) -> Self {
-        Self { in_module: container, kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id } }
+        Self {
+            in_module: container,
+            kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() },
+        }
     }
 
     pub(super) fn malformed_derive(
@@ -127,6 +130,9 @@ impl DefDiagnostic {
         ast: AstId,
         id: AttrId,
     ) -> Self {
-        Self { in_module: container, kind: DefDiagnosticKind::MalformedDerive { ast, id } }
+        Self {
+            in_module: container,
+            kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() },
+        }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
index 140b77ac002f9..0c50f13edfb6c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,6 +1,6 @@
 //! This module resolves `mod foo;` declaration to file.
 use arrayvec::ArrayVec;
-use base_db::{AnchoredPath, Crate};
+use base_db::AnchoredPath;
 use hir_expand::{EditionedFileId, name::Name};
 
 use crate::{HirFileId, db::DefDatabase};
@@ -62,7 +62,6 @@ impl ModDir {
         file_id: HirFileId,
         name: &Name,
         attr_path: Option<&str>,
-        krate: Crate,
     ) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
         let name = name.as_str();
 
@@ -92,7 +91,7 @@ impl ModDir {
                 if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
                     return Ok((
                         // FIXME: Edition, is this rightr?
-                        EditionedFileId::new(db, file_id, orig_file_id.edition(db), krate),
+                        EditionedFileId::new(db, file_id, orig_file_id.edition(db)),
                         is_mod_rs,
                         mod_dir,
                     ));
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
index cd45afe57d7cd..cd8882183bb4d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
@@ -3,10 +3,8 @@
 use hir_expand::name::{AsName, Name};
 use intern::sym;
 
-use crate::{
-    item_tree::Attrs,
-    tt::{Leaf, TokenTree, TopSubtree, TtElement},
-};
+use crate::attr::Attrs;
+use crate::tt::{Leaf, TokenTree, TopSubtree, TtElement};
 
 #[derive(Debug, PartialEq, Eq)]
 pub struct ProcMacroDef {
@@ -31,8 +29,8 @@ impl ProcMacroKind {
     }
 }
 
-impl Attrs<'_> {
-    pub(crate) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option {
+impl Attrs {
+    pub fn parse_proc_macro_decl(&self, func_name: &Name) -> Option {
         if self.is_proc_macro() {
             Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Bang })
         } else if self.is_proc_macro_attribute() {
@@ -53,10 +51,15 @@ impl Attrs<'_> {
         }
     }
 
-    pub(crate) fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
+    pub fn parse_proc_macro_derive(&self) -> Option<(Name, Box<[Name]>)> {
         let derive = self.by_key(sym::proc_macro_derive).tt_values().next()?;
         parse_macro_name_and_helper_attrs(derive)
     }
+
+    pub fn parse_rustc_builtin_macro(&self) -> Option<(Name, Box<[Name]>)> {
+        let derive = self.by_key(sym::rustc_builtin_macro).tt_values().next()?;
+        parse_macro_name_and_helper_attrs(derive)
+    }
 }
 
 // This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
@@ -81,11 +84,14 @@ pub(crate) fn parse_macro_name_and_helper_attrs(tt: &TopSubtree) -> Option<(Name
             let helpers = tt::TokenTreesView::new(&tt.token_trees().flat_tokens()[3..]).try_into_subtree()?;
             let helpers = helpers
                 .iter()
-                .filter_map(|tt| match tt {
+                .filter(
+                    |tt| !matches!(tt, TtElement::Leaf(Leaf::Punct(comma)) if comma.char == ','),
+                )
+                .map(|tt| match tt {
                     TtElement::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()),
                     _ => None,
                 })
-                .collect::>();
+                .collect::>>()?;
 
             Some((trait_name.as_name(), helpers))
         }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
index c9e8955ad68c6..ebbf87cad668b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs
@@ -21,7 +21,7 @@ use triomphe::Arc;
 use crate::{
     ConstId, EnumId, EnumVariantId, EnumVariantLoc, ExternBlockId, FunctionId, HasModule, ImplId,
     ItemContainerId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
-    attrs::AttrFlags,
+    attr::Attrs,
     db::DefDatabase,
     expr_store::{
         ExpressionStore, ExpressionStoreSourceMap,
@@ -48,13 +48,12 @@ pub struct StructSignature {
     pub store: Arc,
     pub flags: StructFlags,
     pub shape: FieldsShape,
+    pub repr: Option,
 }
 
 bitflags! {
     #[derive(Debug, Copy, Clone, PartialEq, Eq)]
     pub struct StructFlags: u8 {
-        /// Indicates whether this struct has `#[repr]`.
-        const HAS_REPR = 1 << 0;
         /// Indicates whether the struct has a `#[rustc_has_incoherent_inherent_impls]` attribute.
         const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
         /// Indicates whether the struct has a `#[fundamental]` attribute.
@@ -76,19 +75,16 @@ impl StructSignature {
     pub fn query(db: &dyn DefDatabase, id: StructId) -> (Arc, Arc) {
         let loc = id.lookup(db);
         let InFile { file_id, value: source } = loc.source(db);
-        let attrs = AttrFlags::query(db, id.into());
+        let attrs = db.attrs(id.into());
 
         let mut flags = StructFlags::empty();
-        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
             flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
-        if attrs.contains(AttrFlags::FUNDAMENTAL) {
+        if attrs.by_key(sym::fundamental).exists() {
             flags |= StructFlags::FUNDAMENTAL;
         }
-        if attrs.contains(AttrFlags::HAS_REPR) {
-            flags |= StructFlags::HAS_REPR;
-        }
-        if let Some(lang) = attrs.lang_item_with_attrs(db, id.into()) {
+        if let Some(lang) = attrs.lang_item() {
             match lang {
                 LangItem::PhantomData => flags |= StructFlags::IS_PHANTOM_DATA,
                 LangItem::OwnedBox => flags |= StructFlags::IS_BOX,
@@ -98,6 +94,7 @@ impl StructSignature {
                 _ => (),
             }
         }
+        let repr = attrs.repr();
         let shape = adt_shape(source.kind());
 
         let (store, generic_params, source_map) = lower_generic_params(
@@ -115,19 +112,11 @@ impl StructSignature {
                 flags,
                 shape,
                 name: as_name_opt(source.name()),
+                repr,
             }),
             Arc::new(source_map),
         )
     }
-
-    #[inline]
-    pub fn repr(&self, db: &dyn DefDatabase, id: StructId) -> Option {
-        if self.flags.contains(StructFlags::HAS_REPR) {
-            AttrFlags::repr(db, id.into())
-        } else {
-            None
-        }
-    }
 }
 
 #[inline]
@@ -145,22 +134,22 @@ pub struct UnionSignature {
     pub generic_params: Arc,
     pub store: Arc,
     pub flags: StructFlags,
+    pub repr: Option,
 }
 
 impl UnionSignature {
     pub fn query(db: &dyn DefDatabase, id: UnionId) -> (Arc, Arc) {
         let loc = id.lookup(db);
-        let attrs = AttrFlags::query(db, id.into());
+        let attrs = db.attrs(id.into());
         let mut flags = StructFlags::empty();
-        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
             flags |= StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
-        if attrs.contains(AttrFlags::FUNDAMENTAL) {
+        if attrs.by_key(sym::fundamental).exists() {
             flags |= StructFlags::FUNDAMENTAL;
         }
-        if attrs.contains(AttrFlags::HAS_REPR) {
-            flags |= StructFlags::HAS_REPR;
-        }
+
+        let repr = attrs.repr();
 
         let InFile { file_id, value: source } = loc.source(db);
         let (store, generic_params, source_map) = lower_generic_params(
@@ -176,6 +165,7 @@ impl UnionSignature {
                 generic_params,
                 store,
                 flags,
+                repr,
                 name: as_name_opt(source.name()),
             }),
             Arc::new(source_map),
@@ -196,17 +186,20 @@ pub struct EnumSignature {
     pub generic_params: Arc,
     pub store: Arc,
     pub flags: EnumFlags,
+    pub repr: Option,
 }
 
 impl EnumSignature {
     pub fn query(db: &dyn DefDatabase, id: EnumId) -> (Arc, Arc) {
         let loc = id.lookup(db);
-        let attrs = AttrFlags::query(db, id.into());
+        let attrs = db.attrs(id.into());
         let mut flags = EnumFlags::empty();
-        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
             flags |= EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
 
+        let repr = attrs.repr();
+
         let InFile { file_id, value: source } = loc.source(db);
         let (store, generic_params, source_map) = lower_generic_params(
             db,
@@ -222,14 +215,15 @@ impl EnumSignature {
                 generic_params,
                 store,
                 flags,
+                repr,
                 name: as_name_opt(source.name()),
             }),
             Arc::new(source_map),
         )
     }
 
-    pub fn variant_body_type(db: &dyn DefDatabase, id: EnumId) -> IntegerType {
-        match AttrFlags::repr(db, id.into()) {
+    pub fn variant_body_type(&self) -> IntegerType {
+        match self.repr {
             Some(ReprOptions { int: Some(builtin), .. }) => builtin,
             _ => IntegerType::Pointer(true),
         }
@@ -257,9 +251,9 @@ impl ConstSignature {
         let loc = id.lookup(db);
 
         let module = loc.container.module(db);
-        let attrs = AttrFlags::query(db, id.into());
+        let attrs = db.attrs(id.into());
         let mut flags = ConstFlags::empty();
-        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
             flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
         }
         let source = loc.source(db);
@@ -312,9 +306,9 @@ impl StaticSignature {
         let loc = id.lookup(db);
 
         let module = loc.container.module(db);
-        let attrs = AttrFlags::query(db, id.into());
+        let attrs = db.attrs(id.into());
         let mut flags = StaticFlags::empty();
-        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
             flags |= StaticFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
         }
 
@@ -439,7 +433,7 @@ impl TraitSignature {
         let loc = id.lookup(db);
 
         let mut flags = TraitFlags::empty();
-        let attrs = AttrFlags::query(db, id.into());
+        let attrs = db.attrs(id.into());
         let source = loc.source(db);
         if source.value.auto_token().is_some() {
             flags.insert(TraitFlags::AUTO);
@@ -450,23 +444,34 @@ impl TraitSignature {
         if source.value.eq_token().is_some() {
             flags.insert(TraitFlags::ALIAS);
         }
-        if attrs.contains(AttrFlags::FUNDAMENTAL) {
+        if attrs.by_key(sym::fundamental).exists() {
             flags |= TraitFlags::FUNDAMENTAL;
         }
-        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
             flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
         }
-        if attrs.contains(AttrFlags::RUSTC_PAREN_SUGAR) {
+        if attrs.by_key(sym::rustc_paren_sugar).exists() {
             flags |= TraitFlags::RUSTC_PAREN_SUGAR;
         }
-        if attrs.contains(AttrFlags::RUSTC_COINDUCTIVE) {
+        if attrs.by_key(sym::rustc_coinductive).exists() {
             flags |= TraitFlags::COINDUCTIVE;
         }
+        let mut skip_array_during_method_dispatch =
+            attrs.by_key(sym::rustc_skip_array_during_method_dispatch).exists();
+        let mut skip_boxed_slice_during_method_dispatch = false;
+        for tt in attrs.by_key(sym::rustc_skip_during_method_dispatch).tt_values() {
+            for tt in tt.iter() {
+                if let tt::iter::TtElement::Leaf(tt::Leaf::Ident(ident)) = tt {
+                    skip_array_during_method_dispatch |= ident.sym == sym::array;
+                    skip_boxed_slice_during_method_dispatch |= ident.sym == sym::boxed_slice;
+                }
+            }
+        }
 
-        if attrs.contains(AttrFlags::RUSTC_SKIP_ARRAY_DURING_METHOD_DISPATCH) {
+        if skip_array_during_method_dispatch {
             flags |= TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH;
         }
-        if attrs.contains(AttrFlags::RUSTC_SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH) {
+        if skip_boxed_slice_during_method_dispatch {
             flags |= TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH;
         }
 
@@ -498,8 +503,7 @@ bitflags! {
         const HAS_TARGET_FEATURE = 1 << 9;
         const DEPRECATED_SAFE_2024 = 1 << 10;
         const EXPLICIT_SAFE = 1 << 11;
-        const HAS_LEGACY_CONST_GENERICS = 1 << 12;
-        const RUSTC_INTRINSIC = 1 << 13;
+        const RUSTC_INTRINSIC = 1 << 12;
     }
 }
 
@@ -512,6 +516,8 @@ pub struct FunctionSignature {
     pub ret_type: Option,
     pub abi: Option,
     pub flags: FnFlags,
+    // FIXME: we should put this behind a fn flags + query to avoid bloating the struct
+    pub legacy_const_generics_indices: Option>>,
 }
 
 impl FunctionSignature {
@@ -523,26 +529,23 @@ impl FunctionSignature {
         let module = loc.container.module(db);
 
         let mut flags = FnFlags::empty();
-        let attrs = AttrFlags::query(db, id.into());
-        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
+        let attrs = db.attrs(id.into());
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
             flags.insert(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
         }
 
-        if attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
+        if attrs.by_key(sym::target_feature).exists() {
             flags.insert(FnFlags::HAS_TARGET_FEATURE);
         }
-
-        if attrs.contains(AttrFlags::RUSTC_INTRINSIC) {
+        if attrs.by_key(sym::rustc_intrinsic).exists() {
             flags.insert(FnFlags::RUSTC_INTRINSIC);
         }
-        if attrs.contains(AttrFlags::HAS_LEGACY_CONST_GENERICS) {
-            flags.insert(FnFlags::HAS_LEGACY_CONST_GENERICS);
-        }
+        let legacy_const_generics_indices = attrs.rustc_legacy_const_generics();
 
         let source = loc.source(db);
 
         if source.value.unsafe_token().is_some() {
-            if attrs.contains(AttrFlags::RUSTC_DEPRECATED_SAFE_2024) {
+            if attrs.by_key(sym::rustc_deprecated_safe_2024).exists() {
                 flags.insert(FnFlags::DEPRECATED_SAFE_2024);
             } else {
                 flags.insert(FnFlags::UNSAFE);
@@ -584,6 +587,7 @@ impl FunctionSignature {
                 ret_type,
                 abi,
                 flags,
+                legacy_const_generics_indices,
                 name,
             }),
             Arc::new(source_map),
@@ -632,19 +636,6 @@ impl FunctionSignature {
         self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
     }
 
-    #[inline]
-    pub fn legacy_const_generics_indices<'db>(
-        &self,
-        db: &'db dyn DefDatabase,
-        id: FunctionId,
-    ) -> Option<&'db [u32]> {
-        if !self.flags.contains(FnFlags::HAS_LEGACY_CONST_GENERICS) {
-            return None;
-        }
-
-        AttrFlags::legacy_const_generic_indices(db, id).as_deref()
-    }
-
     pub fn is_intrinsic(db: &dyn DefDatabase, id: FunctionId) -> bool {
         let data = db.function_signature(id);
         data.flags.contains(FnFlags::RUSTC_INTRINSIC)
@@ -688,11 +679,11 @@ impl TypeAliasSignature {
         let loc = id.lookup(db);
 
         let mut flags = TypeAliasFlags::empty();
-        let attrs = AttrFlags::query(db, id.into());
-        if attrs.contains(AttrFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS) {
+        let attrs = db.attrs(id.into());
+        if attrs.by_key(sym::rustc_has_incoherent_inherent_impls).exists() {
             flags.insert(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL);
         }
-        if attrs.contains(AttrFlags::RUSTC_ALLOW_INCOHERENT_IMPL) {
+        if attrs.by_key(sym::rustc_allow_incoherent_impl).exists() {
             flags.insert(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL);
         }
         if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
@@ -875,7 +866,7 @@ fn lower_fields(
     let mut has_fields = false;
     for (ty, field) in fields.value {
         has_fields = true;
-        match AttrFlags::is_cfg_enabled_for(&field, cfg_options) {
+        match Attrs::is_cfg_enabled_for(db, &field, col.span_map(), cfg_options) {
             Ok(()) => {
                 let type_ref =
                     col.lower_type_ref_opt(ty, &mut ExprCollector::impl_trait_error_allocator);
@@ -937,6 +928,7 @@ impl EnumVariants {
         let loc = e.lookup(db);
         let source = loc.source(db);
         let ast_id_map = db.ast_id_map(source.file_id);
+        let span_map = db.span_map(source.file_id);
 
         let mut diagnostics = ThinVec::new();
         let cfg_options = loc.container.krate.cfg_options(db);
@@ -948,7 +940,7 @@ impl EnumVariants {
             .variants()
             .filter_map(|variant| {
                 let ast_id = ast_id_map.ast_id(&variant);
-                match AttrFlags::is_cfg_enabled_for(&variant, cfg_options) {
+                match Attrs::is_cfg_enabled_for(db, &variant, span_map.as_ref(), cfg_options) {
                     Ok(()) => {
                         let enum_variant =
                             EnumVariantLoc { id: source.with_value(ast_id), parent: e, index }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/src.rs b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
index 153fd195f0ad8..367b543cf9080 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/src.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/src.rs
@@ -7,7 +7,7 @@ use syntax::{AstNode, AstPtr, ast};
 
 use crate::{
     AstIdLoc, GenericDefId, LocalFieldId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup,
-    UseId, VariantId, attrs::AttrFlags, db::DefDatabase,
+    UseId, VariantId, attr::Attrs, db::DefDatabase,
 };
 
 pub trait HasSource {
@@ -145,13 +145,15 @@ impl HasChildSource for VariantId {
                 (lookup.source(db).map(|it| it.kind()), lookup.container)
             }
         };
+        let span_map = db.span_map(src.file_id);
         let mut map = ArenaMap::new();
         match &src.value {
             ast::StructKind::Tuple(fl) => {
                 let cfg_options = container.krate.cfg_options(db);
                 let mut idx = 0;
                 for fd in fl.fields() {
-                    let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
+                    let enabled =
+                        Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
                     if !enabled {
                         continue;
                     }
@@ -166,7 +168,8 @@ impl HasChildSource for VariantId {
                 let cfg_options = container.krate.cfg_options(db);
                 let mut idx = 0;
                 for fd in fl.fields() {
-                    let enabled = AttrFlags::is_cfg_enabled_for(&fd, cfg_options).is_ok();
+                    let enabled =
+                        Attrs::is_cfg_enabled_for(db, &fd, span_map.as_ref(), cfg_options).is_ok();
                     if !enabled {
                         continue;
                     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
index 3bb9c361b3c80..12a1c1554cc12 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -190,15 +190,7 @@ impl TestDB {
         let mut res = DefMap::ROOT;
         for (module, data) in def_map.modules() {
             let src = data.definition_source(self);
-            // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
-            // `position.file_id` is created before the def map, causing it to have to wrong crate
-            // attached often, which means it won't compare equal. This should not be a problem in real
-            // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
-            // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
-            let Some(file_id) = src.file_id.file_id() else {
-                continue;
-            };
-            if file_id.file_id(self) != position.file_id.file_id(self) {
+            if src.file_id != position.file_id {
                 continue;
             }
 
@@ -238,15 +230,7 @@ impl TestDB {
         let mut fn_def = None;
         for (_, module) in def_map.modules() {
             let file_id = module.definition_source(self).file_id;
-            // We're not comparing the `base_db::EditionedFileId`, but rather the VFS `FileId`, because
-            // `position.file_id` is created before the def map, causing it to have to wrong crate
-            // attached often, which means it won't compare equal. This should not be a problem in real
-            // r-a session, only in tests, because in real r-a we only guess the crate on syntactic-only
-            // (e.g. on-enter) handlers. The rest pick the `EditionedFileId` from the def map.
-            let Some(file_id) = file_id.file_id() else {
-                continue;
-            };
-            if file_id.file_id(self) != position.file_id.file_id(self) {
+            if file_id != position.file_id {
                 continue;
             }
             for decl in module.scope.declarations() {
@@ -269,25 +253,26 @@ impl TestDB {
                     };
                     if size != Some(new_size) {
                         size = Some(new_size);
-                        fn_def = Some((it, file_id));
+                        fn_def = Some(it);
                     }
                 }
             }
         }
 
         // Find the innermost block expression that has a `DefMap`.
-        let (def_with_body, file_id) = fn_def?;
-        let def_with_body = def_with_body.into();
+        let def_with_body = fn_def?.into();
         let source_map = self.body_with_source_map(def_with_body).1;
         let scopes = self.expr_scopes(def_with_body);
 
-        let root_syntax_node = self.parse(file_id).syntax_node();
+        let root_syntax_node = self.parse(position.file_id).syntax_node();
         let scope_iter =
             algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
                 let block = ast::BlockExpr::cast(node)?;
                 let expr = ast::Expr::from(block);
-                let expr_id =
-                    source_map.node_expr(InFile::new(file_id.into(), &expr))?.as_expr().unwrap();
+                let expr_id = source_map
+                    .node_expr(InFile::new(position.file_id.into(), &expr))?
+                    .as_expr()
+                    .unwrap();
                 let scope = scopes.scope_for(expr_id).unwrap();
                 Some(scope)
             });
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 4fa476afb64a3..80a3c08486531 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -23,8 +23,6 @@ triomphe.workspace = true
 query-group.workspace = true
 salsa.workspace = true
 salsa-macros.workspace = true
-arrayvec.workspace = true
-thin-vec.workspace = true
 
 # local deps
 stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index e1807cd2e1e9d..986f8764f5c9e 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -1,397 +1,200 @@
-//! Defines the basics of attributes lowering.
-//!
-//! The heart and soul of this module is [`expand_cfg_attr()`], alongside its sibling
-//! [`expand_cfg_attr_with_doc_comments()`]. It is used to implement all attribute lowering
-//! in r-a. Its basic job is to list attributes; however, attributes do not necessarily map
-//! into [`ast::Attr`], because `cfg_attr` can map to zero, one, or more attributes
-//! (`#[cfg_attr(predicate, attr1, attr2, ...)]`). To bridge this gap, this module defines
-//! [`Meta`], which represents a desugared attribute. Various bits of r-a need different
-//! things from [`Meta`], therefore it contains many parts. The basic idea is:
-//!
-//!  - There are three kinds of attributes, `path = value`, `path`, and `path(token_tree)`.
-//!  - Most bits of rust-analyzer only need to deal with some paths. Therefore, we keep
-//!    the path only if it has up to 2 segments, or one segment for `path = value`.
-//!    We also only keep the value in `path = value` if it is a literal. However, we always
-//!    save the all relevant ranges of attributes (the path range, and the full attribute range)
-//!    for parts of r-a (e.g. name resolution) that need a faithful representation of the
-//!    attribute.
-//!
-//! [`expand_cfg_attr()`] expands `cfg_attr`s as it goes (as its name implies), to list
-//! all attributes.
-//!
-//! Another thing to note is that we need to be able to map an attribute back to a range
-//! (for diagnostic purposes etc.). This is only ever needed for attributes that participate
-//! in name resolution. An attribute is mapped back by its [`AttrId`], which is just an
-//! index into the item tree attributes list. To minimize the risk of bugs, we have one
-//! place (here) and one function ([`is_item_tree_filtered_attr()`]) that decides whether
-//! an attribute participate in name resolution.
-
-use std::{
-    borrow::Cow, cell::OnceCell, convert::Infallible, fmt, iter::Peekable, ops::ControlFlow,
-};
+//! A higher level attributes based on TokenTree, with also some shortcuts.
+use std::iter;
+use std::{borrow::Cow, fmt, ops};
 
-use ::tt::{TextRange, TextSize};
-use arrayvec::ArrayVec;
 use base_db::Crate;
 use cfg::{CfgExpr, CfgOptions};
 use either::Either;
-use intern::{Interned, Symbol};
+use intern::{Interned, Symbol, sym};
+
 use mbe::{DelimiterKind, Punct};
-use parser::T;
-use smallvec::SmallVec;
-use span::{RealSpanMap, Span, SyntaxContext};
-use syntax::{
-    AstNode, NodeOrToken, SyntaxNode, SyntaxToken,
-    ast::{self, TokenTreeChildren},
-    unescape,
-};
-use syntax_bridge::DocCommentDesugarMode;
+use smallvec::{SmallVec, smallvec};
+use span::{Span, SyntaxContext};
+use syntax::unescape;
+use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
+use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
+use triomphe::ThinArc;
 
 use crate::{
-    AstId,
     db::ExpandDatabase,
     mod_path::ModPath,
+    name::Name,
     span_map::SpanMapRef,
-    tt::{self, TopSubtree},
+    tt::{self, TopSubtree, token_to_literal},
 };
 
-#[derive(Debug)]
-pub struct AttrPath {
-    /// This can be empty if the path is not of 1 or 2 segments exactly.
-    pub segments: ArrayVec,
-    pub range: TextRange,
-    // FIXME: This shouldn't be textual, `#[test]` needs name resolution.
-    // And if textual, it shouldn't be here, it should be in hir-def/src/attrs.rs. But some macros
-    // fully qualify `test` as `core::prelude::vX::test`, and this is more than 2 segments, so hir-def
-    // attrs can't find it. But this will mean we have to push every up-to-4-segments path, which
-    // may impact perf. So it was easier to just hack it here.
-    pub is_test: bool,
+/// Syntactical attributes, without filtering of `cfg_attr`s.
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct RawAttrs {
+    // FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted.
+    entries: Option>,
 }
 
-impl AttrPath {
-    #[inline]
-    fn extract(path: &ast::Path) -> Self {
-        let mut is_test = false;
-        let segments = (|| {
-            let mut segments = ArrayVec::new();
-            let segment2 = path.segment()?.name_ref()?.syntax().first_token()?;
-            if segment2.text() == "test" {
-                // `#[test]` or `#[core::prelude::vX::test]`.
-                is_test = true;
-            }
-            let segment1 = path.qualifier();
-            if let Some(segment1) = segment1 {
-                if segment1.qualifier().is_some() {
-                    None
-                } else {
-                    let segment1 = segment1.segment()?.name_ref()?.syntax().first_token()?;
-                    segments.push(segment1);
-                    segments.push(segment2);
-                    Some(segments)
-                }
-            } else {
-                segments.push(segment2);
-                Some(segments)
-            }
-        })();
-        AttrPath {
-            segments: segments.unwrap_or(ArrayVec::new()),
-            range: path.syntax().text_range(),
-            is_test,
+impl ops::Deref for RawAttrs {
+    type Target = [Attr];
+
+    fn deref(&self) -> &[Attr] {
+        match &self.entries {
+            Some(it) => &it.slice,
+            None => &[],
         }
     }
+}
 
-    #[inline]
-    pub fn is1(&self, segment: &str) -> bool {
-        self.segments.len() == 1 && self.segments[0].text() == segment
+impl RawAttrs {
+    pub const EMPTY: Self = Self { entries: None };
+
+    pub fn new(
+        db: &dyn ExpandDatabase,
+        owner: &dyn ast::HasAttrs,
+        span_map: SpanMapRef<'_>,
+    ) -> Self {
+        let entries: Vec<_> = Self::attrs_iter::(db, owner, span_map).collect();
+
+        let entries = if entries.is_empty() {
+            None
+        } else {
+            Some(ThinArc::from_header_and_iter((), entries.into_iter()))
+        };
+
+        RawAttrs { entries }
     }
-}
 
-#[derive(Debug)]
-pub enum Meta {
-    /// `name` is `None` if not a single token. `value` is a literal or `None`.
-    NamedKeyValue {
-        path_range: TextRange,
-        name: Option,
-        value: Option,
-    },
-    TokenTree {
-        path: AttrPath,
-        tt: ast::TokenTree,
-    },
-    Path {
-        path: AttrPath,
-    },
-}
+    /// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
+    pub fn new_expanded(
+        db: &dyn ExpandDatabase,
+        owner: &dyn ast::HasAttrs,
+        span_map: SpanMapRef<'_>,
+        cfg_options: &CfgOptions,
+    ) -> Self {
+        let entries: Vec<_> =
+            Self::attrs_iter_expanded::(db, owner, span_map, cfg_options).collect();
+
+        let entries = if entries.is_empty() {
+            None
+        } else {
+            Some(ThinArc::from_header_and_iter((), entries.into_iter()))
+        };
 
-impl Meta {
-    #[inline]
-    pub fn path_range(&self) -> TextRange {
-        match self {
-            Meta::NamedKeyValue { path_range, .. } => *path_range,
-            Meta::TokenTree { path, .. } | Meta::Path { path } => path.range,
-        }
+        RawAttrs { entries }
+    }
+
+    pub fn attrs_iter(
+        db: &dyn ExpandDatabase,
+        owner: &dyn ast::HasAttrs,
+        span_map: SpanMapRef<'_>,
+    ) -> impl Iterator {
+        collect_attrs(owner).filter_map(move |(id, attr)| match attr {
+            Either::Left(attr) => {
+                attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
+            }
+            Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
+                let span = span_map.span_for_range(comment.syntax().text_range());
+                let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
+                Attr {
+                    id,
+                    input: Some(Box::new(AttrInput::Literal(tt::Literal {
+                        symbol: text,
+                        span,
+                        kind,
+                        suffix: None,
+                    }))),
+                    path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
+                    ctxt: span.ctx,
+                }
+            }),
+            Either::Right(_) => None,
+        })
     }
 
-    fn extract(iter: &mut Peekable) -> Option<(Self, TextSize)> {
-        let mut start_offset = None;
-        if let Some(NodeOrToken::Token(colon1)) = iter.peek()
-            && colon1.kind() == T![:]
-        {
-            start_offset = Some(colon1.text_range().start());
-            iter.next();
-            iter.next_if(|it| it.as_token().is_some_and(|it| it.kind() == T![:]));
+    pub fn attrs_iter_expanded(
+        db: &dyn ExpandDatabase,
+        owner: &dyn ast::HasAttrs,
+        span_map: SpanMapRef<'_>,
+        cfg_options: &CfgOptions,
+    ) -> impl Iterator {
+        Self::attrs_iter::(db, owner, span_map)
+            .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
+    }
+
+    pub fn merge(&self, other: Self) -> Self {
+        match (&self.entries, other.entries) {
+            (None, None) => Self::EMPTY,
+            (None, entries @ Some(_)) => Self { entries },
+            (Some(entries), None) => Self { entries: Some(entries.clone()) },
+            (Some(a), Some(b)) => {
+                let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1);
+                let items = a
+                    .slice
+                    .iter()
+                    .cloned()
+                    .chain(b.slice.iter().map(|it| {
+                        let mut it = it.clone();
+                        let id = it.id.ast_index() + last_ast_index;
+                        it.id = AttrId::new(id, it.id.is_inner_attr());
+                        it
+                    }))
+                    .collect::>();
+                Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) }
+            }
         }
-        let first_segment = iter
-            .next_if(|it| it.as_token().is_some_and(|it| it.kind().is_any_identifier()))?
-            .into_token()?;
-        let mut is_test = first_segment.text() == "test";
-        let start_offset = start_offset.unwrap_or_else(|| first_segment.text_range().start());
-
-        let mut segments_len = 1;
-        let mut second_segment = None;
-        let mut path_range = first_segment.text_range();
-        while iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
-            && let _ = iter.next()
-            && iter.peek().and_then(NodeOrToken::as_token).is_some_and(|it| it.kind() == T![:])
-            && let _ = iter.next()
-            && let Some(NodeOrToken::Token(segment)) = iter.peek()
-            && segment.kind().is_any_identifier()
-        {
-            segments_len += 1;
-            is_test = segment.text() == "test";
-            second_segment = Some(segment.clone());
-            path_range = TextRange::new(path_range.start(), segment.text_range().end());
-            iter.next();
+    }
+
+    /// Processes `cfg_attr`s
+    pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
+        let has_cfg_attrs =
+            self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
+        if !has_cfg_attrs {
+            return self;
         }
 
-        let segments = |first, second| {
-            let mut segments = ArrayVec::new();
-            if segments_len <= 2 {
-                segments.push(first);
-                if let Some(second) = second {
-                    segments.push(second);
-                }
-            }
-            segments
-        };
-        let meta = match iter.peek() {
-            Some(NodeOrToken::Token(eq)) if eq.kind() == T![=] => {
-                iter.next();
-                let value = match iter.peek() {
-                    Some(NodeOrToken::Token(token)) if token.kind().is_literal() => {
-                        // No need to consume it, it will be consumed by `extract_and_eat_comma()`.
-                        Some(token.clone())
-                    }
-                    _ => None,
-                };
-                let name = if second_segment.is_none() { Some(first_segment) } else { None };
-                Meta::NamedKeyValue { path_range, name, value }
-            }
-            Some(NodeOrToken::Node(tt)) => Meta::TokenTree {
-                path: AttrPath {
-                    segments: segments(first_segment, second_segment),
-                    range: path_range,
-                    is_test,
-                },
-                tt: tt.clone(),
-            },
-            _ => Meta::Path {
-                path: AttrPath {
-                    segments: segments(first_segment, second_segment),
-                    range: path_range,
-                    is_test,
-                },
-            },
+        let cfg_options = krate.cfg_options(db);
+        let new_attrs = self
+            .iter()
+            .cloned()
+            .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
+            .collect::>();
+        let entries = if new_attrs.is_empty() {
+            None
+        } else {
+            Some(ThinArc::from_header_and_iter((), new_attrs.into_iter()))
         };
-        Some((meta, start_offset))
+        RawAttrs { entries }
     }
 
-    fn extract_possibly_unsafe(
-        iter: &mut Peekable,
-        container: &ast::TokenTree,
-    ) -> Option<(Self, TextRange)> {
-        if iter.peek().is_some_and(|it| it.as_token().is_some_and(|it| it.kind() == T![unsafe])) {
-            iter.next();
-            let tt = iter.next()?.into_node()?;
-            let result = Self::extract(&mut TokenTreeChildren::new(&tt).peekable()).map(
-                |(meta, start_offset)| (meta, TextRange::new(start_offset, tt_end_offset(&tt))),
-            );
-            while iter.next().is_some_and(|it| it.as_token().is_none_or(|it| it.kind() != T![,])) {}
-            result
-        } else {
-            Self::extract(iter).map(|(meta, start_offset)| {
-                let end_offset = 'find_end_offset: {
-                    for it in iter {
-                        if let NodeOrToken::Token(it) = it
-                            && it.kind() == T![,]
-                        {
-                            break 'find_end_offset it.text_range().start();
-                        }
-                    }
-                    tt_end_offset(container)
-                };
-                (meta, TextRange::new(start_offset, end_offset))
-            })
-        }
+    pub fn is_empty(&self) -> bool {
+        self.entries.is_none()
     }
 }
 
-fn tt_end_offset(tt: &ast::TokenTree) -> TextSize {
-    tt.syntax().last_token().unwrap().text_range().start()
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct AttrId {
+    id: u32,
 }
 
-/// The callback is passed a desugared form of the attribute ([`Meta`]), a [`SyntaxNode`] fully containing it
-/// (note: it may not be the direct parent), the range within the [`SyntaxNode`] bounding the attribute,
-/// and the outermost `ast::Attr`. Note that one node may map to multiple [`Meta`]s due to `cfg_attr`.
-#[inline]
-pub fn expand_cfg_attr<'a, BreakValue>(
-    attrs: impl Iterator,
-    cfg_options: impl FnMut() -> &'a CfgOptions,
-    mut callback: impl FnMut(Meta, &SyntaxNode, TextRange, &ast::Attr) -> ControlFlow,
-) -> Option {
-    expand_cfg_attr_with_doc_comments::(
-        attrs.map(Either::Left),
-        cfg_options,
-        move |Either::Left((meta, container, range, top_attr))| {
-            callback(meta, container, range, top_attr)
-        },
-    )
-}
+// FIXME: This only handles a single level of cfg_attr nesting
+// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again
+impl AttrId {
+    const INNER_ATTR_SET_BIT: u32 = 1 << 31;
 
-#[inline]
-pub fn expand_cfg_attr_with_doc_comments<'a, DocComment, BreakValue>(
-    mut attrs: impl Iterator>,
-    mut cfg_options: impl FnMut() -> &'a CfgOptions,
-    mut callback: impl FnMut(
-        Either<(Meta, &SyntaxNode, TextRange, &ast::Attr), DocComment>,
-    ) -> ControlFlow,
-) -> Option {
-    let mut stack = SmallVec::<[_; 1]>::new();
-    let result = attrs.try_for_each(|top_attr| {
-        let top_attr = match top_attr {
-            Either::Left(it) => it,
-            Either::Right(comment) => return callback(Either::Right(comment)),
-        };
-        if let Some((attr_name, tt)) = top_attr.as_simple_call()
-            && attr_name == "cfg_attr"
-        {
-            let mut tt_iter = TokenTreeChildren::new(&tt).peekable();
-            let cfg = cfg::CfgExpr::parse_from_ast(&mut tt_iter);
-            if cfg_options().check(&cfg) != Some(false) {
-                stack.push((tt_iter, tt));
-                while let Some((tt_iter, tt)) = stack.last_mut() {
-                    let Some((attr, range)) = Meta::extract_possibly_unsafe(tt_iter, tt) else {
-                        stack.pop();
-                        continue;
-                    };
-                    if let Meta::TokenTree { path, tt: nested_tt } = &attr
-                        && path.is1("cfg_attr")
-                    {
-                        let mut nested_tt_iter = TokenTreeChildren::new(nested_tt).peekable();
-                        let cfg = cfg::CfgExpr::parse_from_ast(&mut nested_tt_iter);
-                        if cfg_options().check(&cfg) != Some(false) {
-                            stack.push((nested_tt_iter, nested_tt.clone()));
-                        }
-                    } else {
-                        callback(Either::Left((attr, tt.syntax(), range, &top_attr)))?;
-                    }
-                }
-            }
-        } else if let Some(ast_meta) = top_attr.meta()
-            && let Some(path) = ast_meta.path()
-        {
-            let path = AttrPath::extract(&path);
-            let meta = if let Some(tt) = ast_meta.token_tree() {
-                Meta::TokenTree { path, tt }
-            } else if let Some(value) = ast_meta.expr() {
-                let value =
-                    if let ast::Expr::Literal(value) = value { Some(value.token()) } else { None };
-                let name =
-                    if path.segments.len() == 1 { Some(path.segments[0].clone()) } else { None };
-                Meta::NamedKeyValue { name, value, path_range: path.range }
-            } else {
-                Meta::Path { path }
-            };
-            callback(Either::Left((
-                meta,
-                ast_meta.syntax(),
-                ast_meta.syntax().text_range(),
-                &top_attr,
-            )))?;
-        }
-        ControlFlow::Continue(())
-    });
-    result.break_value()
-}
+    pub fn new(id: usize, is_inner: bool) -> Self {
+        assert!(id <= !Self::INNER_ATTR_SET_BIT as usize);
+        let id = id as u32;
+        Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } }
+    }
 
-#[inline]
-pub(crate) fn is_item_tree_filtered_attr(name: &str) -> bool {
-    matches!(
-        name,
-        "doc"
-            | "stable"
-            | "unstable"
-            | "target_feature"
-            | "allow"
-            | "expect"
-            | "warn"
-            | "deny"
-            | "forbid"
-            | "repr"
-            | "inline"
-            | "track_caller"
-            | "must_use"
-    )
-}
+    pub fn ast_index(&self) -> usize {
+        (self.id & !Self::INNER_ATTR_SET_BIT) as usize
+    }
 
-/// This collects attributes exactly as the item tree needs them. This is used for the item tree,
-/// as well as for resolving [`AttrId`]s.
-pub fn collect_item_tree_attrs<'a, BreakValue>(
-    owner: &dyn ast::HasAttrs,
-    cfg_options: impl Fn() -> &'a CfgOptions,
-    mut on_attr: impl FnMut(Meta, &SyntaxNode, &ast::Attr, TextRange) -> ControlFlow,
-) -> Option> {
-    let attrs = ast::attrs_including_inner(owner);
-    expand_cfg_attr(
-        attrs,
-        || cfg_options(),
-        |attr, container, range, top_attr| {
-            // We filter builtin attributes that we don't need for nameres, because this saves memory.
-            // I only put the most common attributes, but if some attribute becomes common feel free to add it.
-            // Notice, however: for an attribute to be filtered out, it *must* not be shadowable with a macro!
-            let filter = match &attr {
-                Meta::NamedKeyValue { name: Some(name), .. } => {
-                    is_item_tree_filtered_attr(name.text())
-                }
-                Meta::TokenTree { path, tt } if path.segments.len() == 1 => {
-                    let name = path.segments[0].text();
-                    if name == "cfg" {
-                        let cfg =
-                            CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(tt).peekable());
-                        if cfg_options().check(&cfg) == Some(false) {
-                            return ControlFlow::Break(Either::Right(cfg));
-                        }
-                        true
-                    } else {
-                        is_item_tree_filtered_attr(name)
-                    }
-                }
-                Meta::Path { path } => {
-                    path.segments.len() == 1 && is_item_tree_filtered_attr(path.segments[0].text())
-                }
-                _ => false,
-            };
-            if !filter && let ControlFlow::Break(v) = on_attr(attr, container, top_attr, range) {
-                return ControlFlow::Break(Either::Left(v));
-            }
-            ControlFlow::Continue(())
-        },
-    )
+    pub fn is_inner_attr(&self) -> bool {
+        self.id & Self::INNER_ATTR_SET_BIT != 0
+    }
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
 pub struct Attr {
+    pub id: AttrId,
     pub path: Interned,
     pub input: Option>,
     pub ctxt: SyntaxContext,
@@ -414,6 +217,131 @@ impl fmt::Display for AttrInput {
     }
 }
 
+impl Attr {
+    fn from_src(
+        db: &dyn ExpandDatabase,
+        ast: ast::Meta,
+        span_map: SpanMapRef<'_>,
+        id: AttrId,
+    ) -> Option {
+        let path = ast.path()?;
+        let range = path.syntax().text_range();
+        let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
+            span_map.span_for_range(range).ctx
+        })?);
+        let span = span_map.span_for_range(range);
+        let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
+            let token = lit.token();
+            Some(Box::new(AttrInput::Literal(token_to_literal(token.text(), span))))
+        } else if let Some(tt) = ast.token_tree() {
+            let tree = syntax_node_to_token_tree(
+                tt.syntax(),
+                span_map,
+                span,
+                DocCommentDesugarMode::ProcMacro,
+            );
+            Some(Box::new(AttrInput::TokenTree(tree)))
+        } else {
+            None
+        };
+        Some(Attr { id, path, input, ctxt: span.ctx })
+    }
+
+    fn from_tt(
+        db: &dyn ExpandDatabase,
+        mut tt: tt::TokenTreesView<'_>,
+        id: AttrId,
+    ) -> Option {
+        if matches!(tt.flat_tokens(),
+            [tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { sym, .. })), ..]
+            if *sym == sym::unsafe_
+        ) {
+            match tt.iter().nth(1) {
+                Some(tt::TtElement::Subtree(_, iter)) => tt = iter.remaining(),
+                _ => return None,
+            }
+        }
+        let first = tt.flat_tokens().first()?;
+        let ctxt = first.first_span().ctx;
+        let (path, input) = {
+            let mut iter = tt.iter();
+            let start = iter.savepoint();
+            let mut input = tt::TokenTreesView::new(&[]);
+            let mut path = iter.from_savepoint(start);
+            let mut path_split_savepoint = iter.savepoint();
+            while let Some(tt) = iter.next() {
+                path = iter.from_savepoint(start);
+                if !matches!(
+                    tt,
+                    tt::TtElement::Leaf(
+                        tt::Leaf::Punct(tt::Punct { char: ':' | '$', .. }) | tt::Leaf::Ident(_),
+                    )
+                ) {
+                    input = path_split_savepoint.remaining();
+                    break;
+                }
+                path_split_savepoint = iter.savepoint();
+            }
+            (path, input)
+        };
+
+        let path = Interned::new(ModPath::from_tt(db, path)?);
+
+        let input = match (input.flat_tokens().first(), input.try_into_subtree()) {
+            (_, Some(tree)) => {
+                Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
+            }
+            (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
+                match input.flat_tokens().get(1) {
+                    Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
+                        Some(Box::new(AttrInput::Literal(lit.clone())))
+                    }
+                    _ => None,
+                }
+            }
+            _ => None,
+        };
+        Some(Attr { id, path, input, ctxt })
+    }
+
+    pub fn path(&self) -> &ModPath {
+        &self.path
+    }
+
+    pub fn expand_cfg_attr(
+        self,
+        db: &dyn ExpandDatabase,
+        cfg_options: &CfgOptions,
+    ) -> impl IntoIterator {
+        let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
+        if !is_cfg_attr {
+            return smallvec![self];
+        }
+
+        let subtree = match self.token_tree_value() {
+            Some(it) => it,
+            _ => return smallvec![self.clone()],
+        };
+
+        let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+            Some(it) => it,
+            None => return smallvec![self.clone()],
+        };
+        let index = self.id;
+        let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index));
+
+        let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
+        let cfg = CfgExpr::parse(&cfg);
+        if cfg_options.check(&cfg) == Some(false) {
+            smallvec![]
+        } else {
+            cov_mark::hit!(cfg_attr_active);
+
+            attrs.collect::>()
+        }
+    }
+}
+
 impl Attr {
     /// #[path = "string"]
     pub fn string_value(&self) -> Option<&Symbol> {
@@ -475,26 +403,30 @@ impl Attr {
     pub fn parse_path_comma_token_tree<'a>(
         &'a self,
         db: &'a dyn ExpandDatabase,
-    ) -> Option)> + 'a> {
+    ) -> Option + 'a> {
         let args = self.token_tree_value()?;
 
         if args.top_subtree().delimiter.kind != DelimiterKind::Parenthesis {
             return None;
         }
-        Some(parse_path_comma_token_tree(db, args))
+        let paths = args
+            .token_trees()
+            .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
+            .filter_map(move |tts| {
+                let span = tts.flat_tokens().first()?.first_span();
+                Some((ModPath::from_tt(db, tts)?, span))
+            });
+
+        Some(paths)
     }
-}
 
-fn parse_path_comma_token_tree<'a>(
-    db: &'a dyn ExpandDatabase,
-    args: &'a tt::TopSubtree,
-) -> impl Iterator)> {
-    args.token_trees()
-        .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
-        .filter_map(move |tts| {
-            let span = tts.flat_tokens().first()?.first_span();
-            Some((ModPath::from_tt(db, tts)?, span, tts))
-        })
+    pub fn cfg(&self) -> Option {
+        if *self.path.as_ident()? == sym::cfg {
+            self.token_tree_value().map(CfgExpr::parse)
+        } else {
+            None
+        }
+    }
 }
 
 fn unescape(s: &str) -> Option> {
@@ -523,104 +455,58 @@ fn unescape(s: &str) -> Option> {
     }
 }
 
-/// This is an index of an attribute *that always points to the item tree attributes*.
-///
-/// Outer attributes are counted first, then inner attributes. This does not support
-/// out-of-line modules, which may have attributes spread across 2 files!
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct AttrId {
-    id: u32,
+pub fn collect_attrs(
+    owner: &dyn ast::HasAttrs,
+) -> impl Iterator)> {
+    let inner_attrs =
+        inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true));
+    let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax())
+        .filter(|el| match el {
+            Either::Left(attr) => attr.kind().is_outer(),
+            Either::Right(comment) => comment.is_outer(),
+        })
+        .zip(iter::repeat(false));
+    outer_attrs
+        .chain(inner_attrs)
+        .enumerate()
+        .map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr))
 }
 
-impl AttrId {
-    #[inline]
-    pub fn from_item_tree_index(id: u32) -> Self {
-        Self { id }
-    }
-
-    #[inline]
-    pub fn item_tree_index(self) -> u32 {
-        self.id
-    }
-
-    /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
-    /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
-    /// attribute, and its desugared [`Meta`].
-    pub fn find_attr_range(
-        self,
-        db: &dyn ExpandDatabase,
-        krate: Crate,
-        owner: AstId,
-    ) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
-        self.find_attr_range_with_source(db, krate, &owner.to_node(db))
-    }
-
-    /// Returns the containing `ast::Attr` (note that it may contain other attributes as well due
-    /// to `cfg_attr`), a `SyntaxNode` guaranteed to contain the attribute, the full range of the
-    /// attribute, and its desugared [`Meta`].
-    pub fn find_attr_range_with_source(
-        self,
-        db: &dyn ExpandDatabase,
-        krate: Crate,
-        owner: &dyn ast::HasAttrs,
-    ) -> (ast::Attr, SyntaxNode, TextRange, Meta) {
-        let cfg_options = OnceCell::new();
-        let mut index = 0;
-        let result = collect_item_tree_attrs(
-            owner,
-            || cfg_options.get_or_init(|| krate.cfg_options(db)),
-            |meta, container, top_attr, range| {
-                if index == self.id {
-                    return ControlFlow::Break((top_attr.clone(), container.clone(), range, meta));
+fn inner_attributes(
+    syntax: &SyntaxNode,
+) -> Option>> {
+    let node = match_ast! {
+        match syntax {
+            ast::SourceFile(_) => syntax.clone(),
+            ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
+            ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
+            ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
+            ast::Module(it) => it.item_list()?.syntax().clone(),
+            ast::BlockExpr(it) => {
+                if !it.may_carry_attributes() {
+                    return None
                 }
-                index += 1;
-                ControlFlow::Continue(())
+                syntax.clone()
             },
-        );
-        match result {
-            Some(Either::Left(it)) => it,
-            _ => {
-                panic!("used an incorrect `AttrId`; crate={krate:?}, attr_id={self:?}");
-            }
+            _ => return None,
         }
-    }
+    };
 
-    pub fn find_derive_range(
-        self,
-        db: &dyn ExpandDatabase,
-        krate: Crate,
-        owner: AstId,
-        derive_index: u32,
-    ) -> TextRange {
-        let (_, _, derive_attr_range, derive_attr) = self.find_attr_range(db, krate, owner);
-        let Meta::TokenTree { tt, .. } = derive_attr else {
-            return derive_attr_range;
-        };
-        // Fake the span map, as we don't really need spans here, just the offsets of the node in the file.
-        let span_map = RealSpanMap::absolute(span::EditionedFileId::current_edition(
-            span::FileId::from_raw(0),
-        ));
-        let tt = syntax_bridge::syntax_node_to_token_tree(
-            tt.syntax(),
-            SpanMapRef::RealSpanMap(&span_map),
-            span_map.span_for_range(tt.syntax().text_range()),
-            DocCommentDesugarMode::ProcMacro,
-        );
-        let Some((_, _, derive_tts)) =
-            parse_path_comma_token_tree(db, &tt).nth(derive_index as usize)
-        else {
-            return derive_attr_range;
-        };
-        let (Some(first_tt), Some(last_tt)) =
-            (derive_tts.flat_tokens().first(), derive_tts.flat_tokens().last())
-        else {
-            return derive_attr_range;
-        };
-        let start = first_tt.first_span().range.start();
-        let end = match last_tt {
-            tt::TokenTree::Leaf(it) => it.span().range.end(),
-            tt::TokenTree::Subtree(it) => it.delimiter.close.range.end(),
-        };
-        TextRange::new(start, end)
-    }
+    let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
+        Either::Left(attr) => attr.kind().is_inner(),
+        Either::Right(comment) => comment.is_inner(),
+    });
+    Some(attrs)
+}
+
+// Input subtree is: `(cfg, $(attr),+)`
+// Split it up into a `cfg` subtree and the `attr` subtrees.
+fn parse_cfg_attr_input(
+    subtree: &TopSubtree,
+) -> Option<(tt::TokenTreesView<'_>, impl Iterator>)> {
+    let mut parts = subtree
+        .token_trees()
+        .split(|tt| matches!(tt, tt::TtElement::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
+    let cfg = parts.next()?;
+    Some((cfg, parts.filter(|it| !it.is_empty())))
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
index 92bcd378149ed..6fe63f249cd4a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
@@ -772,7 +772,7 @@ fn relative_file(
     if res == call_site && !allow_recursion {
         Err(ExpandError::other(err_span, format!("recursive inclusion of `{path_str}`")))
     } else {
-        Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition, lookup.krate))
+        Ok(EditionedFileId::new(db, res, lookup.krate.data(db).edition))
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
index 8b82671ed4a08..d5ebd6ee19f5c 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/cfg_process.rs
@@ -1,343 +1,373 @@
 //! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
-use std::{cell::OnceCell, ops::ControlFlow};
+use std::iter::Peekable;
 
-use ::tt::TextRange;
 use base_db::Crate;
-use cfg::CfgExpr;
-use parser::T;
-use smallvec::SmallVec;
+use cfg::{CfgAtom, CfgExpr};
+use intern::{Symbol, sym};
+use rustc_hash::FxHashSet;
 use syntax::{
-    AstNode, PreorderWithTokens, SyntaxElement, SyntaxNode, SyntaxToken, WalkEvent,
-    ast::{self, HasAttrs, TokenTreeChildren},
+    AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
+    ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
 };
-use syntax_bridge::DocCommentDesugarMode;
+use tracing::{debug, warn};
 
-use crate::{
-    attrs::{AttrId, Meta, expand_cfg_attr, is_item_tree_filtered_attr},
-    db::ExpandDatabase,
-    fixup::{self, SyntaxFixupUndoInfo},
-    span_map::SpanMapRef,
-    tt::{self, DelimSpan, Span},
-};
-
-struct ItemIsCfgedOut;
+use crate::{MacroCallLoc, MacroDefKind, db::ExpandDatabase, proc_macro::ProcMacroKind};
 
-#[derive(Debug)]
-struct ExpandedAttrToProcess {
-    range: TextRange,
+fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option {
+    if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
+        return None;
+    }
+    let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
+    let enabled = krate.cfg_options(db).check(&cfg) != Some(false);
+    Some(enabled)
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-enum NextExpandedAttrState {
-    NotStarted,
-    InTheMiddle,
+fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: Crate) -> Option {
+    if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
+        return None;
+    }
+    check_cfg_attr_value(db, &attr.token_tree()?, krate)
 }
 
-#[derive(Debug)]
-struct AstAttrToProcess {
-    range: TextRange,
-    expanded_attrs: SmallVec<[ExpandedAttrToProcess; 1]>,
-    expanded_attrs_idx: usize,
-    next_expanded_attr: NextExpandedAttrState,
-    pound_span: Span,
-    brackets_span: DelimSpan,
-    /// If `Some`, this is an inner attribute.
-    excl_span: Option,
+pub fn check_cfg_attr_value(
+    db: &dyn ExpandDatabase,
+    attr: &TokenTree,
+    krate: Crate,
+) -> Option {
+    let cfg_expr = parse_from_attr_token_tree(attr)?;
+    let enabled = krate.cfg_options(db).check(&cfg_expr) != Some(false);
+    Some(enabled)
 }
 
-fn macro_input_callback(
+fn process_has_attrs_with_possible_comma(
     db: &dyn ExpandDatabase,
-    is_derive: bool,
-    censor_item_tree_attr_ids: &[AttrId],
+    items: impl Iterator,
     krate: Crate,
-    default_span: Span,
-    span_map: SpanMapRef<'_>,
-) -> impl FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec) {
-    let cfg_options = OnceCell::new();
-    let cfg_options = move || *cfg_options.get_or_init(|| krate.cfg_options(db));
-
-    let mut should_strip_attr = {
-        let mut item_tree_attr_id = 0;
-        let mut censor_item_tree_attr_ids_index = 0;
-        move || {
-            let mut result = false;
-            if let Some(&next_censor_attr_id) =
-                censor_item_tree_attr_ids.get(censor_item_tree_attr_ids_index)
-                && next_censor_attr_id.item_tree_index() == item_tree_attr_id
-            {
-                censor_item_tree_attr_ids_index += 1;
-                result = true;
+    remove: &mut FxHashSet,
+) -> Option<()> {
+    for item in items {
+        let field_attrs = item.attrs();
+        'attrs: for attr in field_attrs {
+            if let Some(enabled) = check_cfg(db, &attr, krate) {
+                if enabled {
+                    debug!("censoring {:?}", attr.syntax());
+                    remove.insert(attr.syntax().clone().into());
+                } else {
+                    debug!("censoring {:?}", item.syntax());
+                    remove.insert(item.syntax().clone().into());
+                    // We need to remove the , as well
+                    remove_possible_comma(&item, remove);
+                    break 'attrs;
+                }
             }
-            item_tree_attr_id += 1;
-            result
-        }
-    };
 
-    let mut attrs = Vec::new();
-    let mut attrs_idx = 0;
-    let mut has_inner_attrs_owner = false;
-    let mut in_attr = false;
-    let mut done_with_attrs = false;
-    let mut did_top_attrs = false;
-    move |preorder, event| {
-        match event {
-            WalkEvent::Enter(SyntaxElement::Node(node)) => {
-                if done_with_attrs {
-                    return (true, Vec::new());
+            if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
+                if enabled {
+                    debug!("Removing cfg_attr tokens {:?}", attr);
+                    let meta = attr.meta()?;
+                    let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
+                    remove.extend(removes_from_cfg_attr);
+                } else {
+                    debug!("censoring type cfg_attr {:?}", item.syntax());
+                    remove.insert(attr.syntax().clone().into());
                 }
+            }
+        }
+    }
+    Some(())
+}
 
-                if ast::Attr::can_cast(node.kind()) {
-                    in_attr = true;
-                    let node_range = node.text_range();
-                    while attrs
-                        .get(attrs_idx)
-                        .is_some_and(|it: &AstAttrToProcess| it.range != node_range)
-                    {
-                        attrs_idx += 1;
-                    }
-                } else if let Some(has_attrs) = ast::AnyHasAttrs::cast(node.clone()) {
-                    if has_inner_attrs_owner {
-                        has_inner_attrs_owner = false;
-                        return (true, Vec::new());
-                    }
-
-                    if did_top_attrs && !is_derive {
-                        // Derives need all attributes handled, but attribute macros need only the top attributes handled.
-                        done_with_attrs = true;
-                        return (true, Vec::new());
-                    }
-                    did_top_attrs = true;
-
-                    if let Some(inner_attrs_node) = has_attrs.inner_attributes_node()
-                        && inner_attrs_node != *node
-                    {
-                        has_inner_attrs_owner = true;
-                    }
-
-                    let node_attrs = ast::attrs_including_inner(&has_attrs);
-
-                    attrs.clear();
-                    node_attrs.clone().for_each(|attr| {
-                        let span_for = |token: Option| {
-                            token
-                                .map(|token| span_map.span_for_range(token.text_range()))
-                                .unwrap_or(default_span)
-                        };
-                        attrs.push(AstAttrToProcess {
-                            range: attr.syntax().text_range(),
-                            pound_span: span_for(attr.pound_token()),
-                            brackets_span: DelimSpan {
-                                open: span_for(attr.l_brack_token()),
-                                close: span_for(attr.r_brack_token()),
-                            },
-                            excl_span: attr
-                                .excl_token()
-                                .map(|token| span_map.span_for_range(token.text_range())),
-                            expanded_attrs: SmallVec::new(),
-                            expanded_attrs_idx: 0,
-                            next_expanded_attr: NextExpandedAttrState::NotStarted,
-                        });
-                    });
-
-                    attrs_idx = 0;
-                    let strip_current_item = expand_cfg_attr(
-                        node_attrs,
-                        &cfg_options,
-                        |attr, _container, range, top_attr| {
-                            // Find the attr.
-                            while attrs[attrs_idx].range != top_attr.syntax().text_range() {
-                                attrs_idx += 1;
-                            }
-
-                            let mut strip_current_attr = false;
-                            match attr {
-                                Meta::NamedKeyValue { name, .. } => {
-                                    if name
-                                        .is_none_or(|name| !is_item_tree_filtered_attr(name.text()))
-                                    {
-                                        strip_current_attr = should_strip_attr();
-                                    }
-                                }
-                                Meta::TokenTree { path, tt } => {
-                                    if path.segments.len() != 1
-                                        || !is_item_tree_filtered_attr(path.segments[0].text())
-                                    {
-                                        strip_current_attr = should_strip_attr();
-                                    }
-
-                                    if path.segments.len() == 1 {
-                                        let name = path.segments[0].text();
-
-                                        if name == "cfg" {
-                                            let cfg_expr = CfgExpr::parse_from_ast(
-                                                &mut TokenTreeChildren::new(&tt).peekable(),
-                                            );
-                                            if cfg_options().check(&cfg_expr) == Some(false) {
-                                                return ControlFlow::Break(ItemIsCfgedOut);
-                                            }
-                                            strip_current_attr = true;
-                                        }
-                                    }
-                                }
-                                Meta::Path { path } => {
-                                    if path.segments.len() != 1
-                                        || !is_item_tree_filtered_attr(path.segments[0].text())
-                                    {
-                                        strip_current_attr = should_strip_attr();
-                                    }
-                                }
-                            }
-
-                            if !strip_current_attr {
-                                attrs[attrs_idx]
-                                    .expanded_attrs
-                                    .push(ExpandedAttrToProcess { range });
-                            }
-
-                            ControlFlow::Continue(())
-                        },
-                    );
-                    attrs_idx = 0;
-
-                    if strip_current_item.is_some() {
-                        preorder.skip_subtree();
-                        attrs.clear();
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+enum CfgExprStage {
+    /// Stripping the CFGExpr part of the attribute
+    StrippigCfgExpr,
+    /// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute
+    FoundComma,
+    /// Everything following the attribute. This could be another attribute or the end of the attribute.
+    // FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute
+    // Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
+    EverythingElse,
+}
 
-                        'eat_comma: {
-                            // If there is a comma after this node, eat it too.
-                            let mut events_until_comma = 0;
-                            for event in preorder.clone() {
-                                match event {
-                                    WalkEvent::Enter(SyntaxElement::Node(_))
-                                    | WalkEvent::Leave(_) => {}
-                                    WalkEvent::Enter(SyntaxElement::Token(token)) => {
-                                        let kind = token.kind();
-                                        if kind == T![,] {
-                                            break;
-                                        } else if !kind.is_trivia() {
-                                            break 'eat_comma;
-                                        }
-                                    }
-                                }
-                                events_until_comma += 1;
-                            }
-                            preorder.nth(events_until_comma);
-                        }
+/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
+fn remove_tokens_within_cfg_attr(meta: Meta) -> Option> {
+    let mut remove: FxHashSet = FxHashSet::default();
+    debug!("Enabling attribute {}", meta);
+    let meta_path = meta.path()?;
+    debug!("Removing {:?}", meta_path.syntax());
+    remove.insert(meta_path.syntax().clone().into());
 
-                        return (false, Vec::new());
-                    }
+    let meta_tt = meta.token_tree()?;
+    debug!("meta_tt {}", meta_tt);
+    let mut stage = CfgExprStage::StrippigCfgExpr;
+    for tt in meta_tt.token_trees_and_tokens() {
+        debug!("Checking {:?}. Stage: {:?}", tt, stage);
+        match (stage, tt) {
+            (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => {
+                remove.insert(node.syntax().clone().into());
+            }
+            (CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => {
+                if token.kind() == T![,] {
+                    stage = CfgExprStage::FoundComma;
                 }
+                remove.insert(token.into());
+            }
+            (CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token))
+                if (token.kind() == T![,] || token.kind() == T![')']) =>
+            {
+                // The end of the attribute or separator for the next attribute
+                stage = CfgExprStage::EverythingElse;
+                remove.insert(token.into());
             }
-            WalkEvent::Leave(SyntaxElement::Node(node)) => {
-                if ast::Attr::can_cast(node.kind()) {
-                    in_attr = false;
-                    attrs_idx += 1;
+            (CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => {
+                remove.insert(node.syntax().clone().into());
+            }
+            (CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => {
+                remove.insert(token.into());
+            }
+            // This is an actual attribute
+            _ => {}
+        }
+    }
+    if stage != CfgExprStage::EverythingElse {
+        warn!("Invalid cfg_attr attribute. {:?}", meta_tt);
+        return None;
+    }
+    Some(remove)
+}
+/// Removes a possible comma after the [AstNode]
+fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet) {
+    if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) {
+        res.insert(comma);
+    }
+}
+fn process_enum(
+    db: &dyn ExpandDatabase,
+    variants: VariantList,
+    krate: Crate,
+    remove: &mut FxHashSet,
+) -> Option<()> {
+    'variant: for variant in variants.variants() {
+        for attr in variant.attrs() {
+            if let Some(enabled) = check_cfg(db, &attr, krate) {
+                if enabled {
+                    debug!("censoring {:?}", attr.syntax());
+                    remove.insert(attr.syntax().clone().into());
+                } else {
+                    // Rustc does not strip the attribute if it is enabled. So we will leave it
+                    debug!("censoring type {:?}", variant.syntax());
+                    remove.insert(variant.syntax().clone().into());
+                    // We need to remove the , as well
+                    remove_possible_comma(&variant, remove);
+                    continue 'variant;
+                }
+            }
+
+            if let Some(enabled) = check_cfg_attr(db, &attr, krate) {
+                if enabled {
+                    debug!("Removing cfg_attr tokens {:?}", attr);
+                    let meta = attr.meta()?;
+                    let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
+                    remove.extend(removes_from_cfg_attr);
+                } else {
+                    debug!("censoring type cfg_attr {:?}", variant.syntax());
+                    remove.insert(attr.syntax().clone().into());
                 }
             }
-            WalkEvent::Enter(SyntaxElement::Token(token)) => {
-                if !in_attr {
-                    return (true, Vec::new());
+        }
+        if let Some(fields) = variant.field_list() {
+            match fields {
+                ast::FieldList::RecordFieldList(fields) => {
+                    process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
+                }
+                ast::FieldList::TupleFieldList(fields) => {
+                    process_has_attrs_with_possible_comma(db, fields.fields(), krate, remove)?;
                 }
+            }
+        }
+    }
+    Some(())
+}
 
-                let Some(ast_attr) = attrs.get_mut(attrs_idx) else {
-                    return (true, Vec::new());
-                };
-                let token_range = token.text_range();
-                let Some(expanded_attr) = ast_attr.expanded_attrs.get(ast_attr.expanded_attrs_idx)
-                else {
-                    // No expanded attributes in this `ast::Attr`, or we finished them all already, either way
-                    // the remaining tokens should be discarded.
-                    return (false, Vec::new());
-                };
-                match ast_attr.next_expanded_attr {
-                    NextExpandedAttrState::NotStarted => {
-                        if token_range.start() >= expanded_attr.range.start() {
-                            // We started the next attribute.
-                            let mut insert_tokens = Vec::with_capacity(3);
-                            insert_tokens.push(tt::Leaf::Punct(tt::Punct {
-                                char: '#',
-                                spacing: tt::Spacing::Alone,
-                                span: ast_attr.pound_span,
-                            }));
-                            if let Some(span) = ast_attr.excl_span {
-                                insert_tokens.push(tt::Leaf::Punct(tt::Punct {
-                                    char: '!',
-                                    spacing: tt::Spacing::Alone,
-                                    span,
-                                }));
-                            }
-                            insert_tokens.push(tt::Leaf::Punct(tt::Punct {
-                                char: '[',
-                                spacing: tt::Spacing::Alone,
-                                span: ast_attr.brackets_span.open,
-                            }));
+pub(crate) fn process_cfg_attrs(
+    db: &dyn ExpandDatabase,
+    node: &SyntaxNode,
+    loc: &MacroCallLoc,
+) -> Option> {
+    // FIXME: #[cfg_eval] is not implemented. But it is not stable yet
+    let is_derive = match loc.def.kind {
+        MacroDefKind::BuiltInDerive(..)
+        | MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => true,
+        MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
+        _ => false,
+    };
+    let mut remove = FxHashSet::default();
 
-                            ast_attr.next_expanded_attr = NextExpandedAttrState::InTheMiddle;
+    let item = ast::Item::cast(node.clone())?;
+    for attr in item.attrs() {
+        if let Some(enabled) = check_cfg_attr(db, &attr, loc.krate) {
+            if enabled {
+                debug!("Removing cfg_attr tokens {:?}", attr);
+                let meta = attr.meta()?;
+                let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
+                remove.extend(removes_from_cfg_attr);
+            } else {
+                debug!("Removing type cfg_attr {:?}", item.syntax());
+                remove.insert(attr.syntax().clone().into());
+            }
+        }
+    }
 
-                            return (true, insert_tokens);
-                        } else {
-                            // Before any attribute or between the attributes.
-                            return (false, Vec::new());
-                        }
-                    }
-                    NextExpandedAttrState::InTheMiddle => {
-                        if token_range.start() >= expanded_attr.range.end() {
-                            // Finished the current attribute.
-                            let insert_tokens = vec![tt::Leaf::Punct(tt::Punct {
-                                char: ']',
-                                spacing: tt::Spacing::Alone,
-                                span: ast_attr.brackets_span.close,
-                            })];
+    if is_derive {
+        // Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level
+        // (cfg_attr is handled above, cfg is handled in the def map).
+        match item {
+            ast::Item::Struct(it) => match it.field_list()? {
+                ast::FieldList::RecordFieldList(fields) => {
+                    process_has_attrs_with_possible_comma(
+                        db,
+                        fields.fields(),
+                        loc.krate,
+                        &mut remove,
+                    )?;
+                }
+                ast::FieldList::TupleFieldList(fields) => {
+                    process_has_attrs_with_possible_comma(
+                        db,
+                        fields.fields(),
+                        loc.krate,
+                        &mut remove,
+                    )?;
+                }
+            },
+            ast::Item::Enum(it) => {
+                process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
+            }
+            ast::Item::Union(it) => {
+                process_has_attrs_with_possible_comma(
+                    db,
+                    it.record_field_list()?.fields(),
+                    loc.krate,
+                    &mut remove,
+                )?;
+            }
+            // FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
+            _ => {}
+        }
+    }
+    Some(remove)
+}
+/// Parses a `cfg` attribute from the meta
+fn parse_from_attr_token_tree(tt: &TokenTree) -> Option {
+    let mut iter = tt
+        .token_trees_and_tokens()
+        .filter(is_not_whitespace)
+        .skip(1)
+        .take_while(is_not_closing_paren)
+        .peekable();
+    next_cfg_expr_from_syntax(&mut iter)
+}
 
-                            ast_attr.next_expanded_attr = NextExpandedAttrState::NotStarted;
-                            ast_attr.expanded_attrs_idx += 1;
+fn is_not_closing_paren(element: &NodeOrToken) -> bool {
+    !matches!(element, NodeOrToken::Token(token) if (token.kind() == syntax::T![')']))
+}
+fn is_not_whitespace(element: &NodeOrToken) -> bool {
+    !matches!(element, NodeOrToken::Token(token) if (token.kind() == SyntaxKind::WHITESPACE))
+}
 
-                            // It's safe to ignore the current token because between attributes
-                            // there is always at least one token we skip - either the closing bracket
-                            // in `#[]` or the comma in case of multiple attrs in `cfg_attr` expansion.
-                            return (false, insert_tokens);
-                        } else {
-                            // Still in the middle.
-                            return (true, Vec::new());
-                        }
-                    }
+fn next_cfg_expr_from_syntax(iter: &mut Peekable) -> Option
+where
+    I: Iterator>,
+{
+    let name = match iter.next() {
+        None => return None,
+        Some(NodeOrToken::Token(element)) => match element.kind() {
+            syntax::T![ident] => Symbol::intern(element.text()),
+            _ => return Some(CfgExpr::Invalid),
+        },
+        Some(_) => return Some(CfgExpr::Invalid),
+    };
+    let result = match &name {
+        s if [&sym::all, &sym::any, &sym::not].contains(&s) => {
+            let mut preds = Vec::new();
+            let Some(NodeOrToken::Node(tree)) = iter.next() else {
+                return Some(CfgExpr::Invalid);
+            };
+            let mut tree_iter = tree
+                .token_trees_and_tokens()
+                .filter(is_not_whitespace)
+                .skip(1)
+                .take_while(is_not_closing_paren)
+                .peekable();
+            while tree_iter.peek().is_some() {
+                let pred = next_cfg_expr_from_syntax(&mut tree_iter);
+                if let Some(pred) = pred {
+                    preds.push(pred);
                 }
             }
-            WalkEvent::Leave(SyntaxElement::Token(_)) => {}
+            let group = match &name {
+                s if *s == sym::all => CfgExpr::All(preds.into_boxed_slice()),
+                s if *s == sym::any => CfgExpr::Any(preds.into_boxed_slice()),
+                s if *s == sym::not => {
+                    CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid)))
+                }
+                _ => unreachable!(),
+            };
+            Some(group)
         }
-        (true, Vec::new())
+        _ => match iter.peek() {
+            Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => {
+                iter.next();
+                match iter.next() {
+                    Some(NodeOrToken::Token(value_token))
+                        if (value_token.kind() == syntax::SyntaxKind::STRING) =>
+                    {
+                        let value = value_token.text();
+                        Some(CfgExpr::Atom(CfgAtom::KeyValue {
+                            key: name,
+                            value: Symbol::intern(value.trim_matches('"')),
+                        }))
+                    }
+                    _ => None,
+                }
+            }
+            _ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
+        },
+    };
+    if let Some(NodeOrToken::Token(element)) = iter.peek()
+        && element.kind() == syntax::T![,]
+    {
+        iter.next();
     }
+    result
 }
+#[cfg(test)]
+mod tests {
+    use cfg::DnfExpr;
+    use expect_test::{Expect, expect};
+    use syntax::{AstNode, SourceFile, ast::Attr};
 
-pub(crate) fn attr_macro_input_to_token_tree(
-    db: &dyn ExpandDatabase,
-    node: &SyntaxNode,
-    span_map: SpanMapRef<'_>,
-    span: Span,
-    is_derive: bool,
-    censor_item_tree_attr_ids: &[AttrId],
-    krate: Crate,
-) -> (tt::TopSubtree, SyntaxFixupUndoInfo) {
-    let fixups = fixup::fixup_syntax(span_map, node, span, DocCommentDesugarMode::ProcMacro);
-    (
-        syntax_bridge::syntax_node_to_token_tree_modified(
-            node,
-            span_map,
-            fixups.append,
-            fixups.remove,
-            span,
-            DocCommentDesugarMode::ProcMacro,
-            macro_input_callback(db, is_derive, censor_item_tree_attr_ids, krate, span, span_map),
-        ),
-        fixups.undo_info,
-    )
-}
+    use crate::cfg_process::parse_from_attr_token_tree;
 
-pub fn check_cfg_attr_value(
-    db: &dyn ExpandDatabase,
-    attr: &ast::TokenTree,
-    krate: Crate,
-) -> Option {
-    let cfg_expr = CfgExpr::parse_from_ast(&mut TokenTreeChildren::new(attr).peekable());
-    krate.cfg_options(db).check(&cfg_expr)
+    fn check_dnf_from_syntax(input: &str, expect: Expect) {
+        let parse = SourceFile::parse(input, span::Edition::CURRENT);
+        let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
+            Some(it) => it,
+            None => {
+                let node = std::any::type_name::();
+                panic!("Failed to make ast node `{node}` from text {input}")
+            }
+        };
+        let node = node.clone_subtree();
+        assert_eq!(node.syntax().text_range().start(), 0.into());
+
+        let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap();
+        let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
+        expect.assert_eq(&actual);
+    }
+    #[test]
+    fn cfg_from_attr() {
+        check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]);
+        check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]);
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 6b5aa39fa6bf1..888c1405a6bb1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,9 +1,11 @@
 //! Defines database & queries for macro expansion.
 
 use base_db::{Crate, RootQueryDb};
+use either::Either;
 use mbe::MatchedArmIndex;
+use rustc_hash::FxHashSet;
 use span::{AstIdMap, Edition, Span, SyntaxContext};
-use syntax::{AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
+use syntax::{AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T, ast};
 use syntax_bridge::{DocCommentDesugarMode, syntax_node_to_token_tree};
 use triomphe::Arc;
 
@@ -11,9 +13,9 @@ use crate::{
     AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
     EagerExpander, EditionedFileId, ExpandError, ExpandResult, ExpandTo, HirFileId, MacroCallId,
     MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
-    attrs::Meta,
+    attrs::{AttrId, AttrInput, RawAttrs, collect_attrs},
     builtin::pseudo_derive_attr_expansion,
-    cfg_process::attr_macro_input_to_token_tree,
+    cfg_process,
     declarative::DeclarativeMacroExpander,
     fixup::{self, SyntaxFixupUndoInfo},
     hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
@@ -175,7 +177,7 @@ pub fn expand_speculative(
     let span_map = SpanMapRef::RealSpanMap(&span_map);
 
     // Build the subtree and token mapping for the speculative args
-    let (mut tt, undo_info) = match &loc.kind {
+    let (mut tt, undo_info) = match loc.kind {
         MacroCallKind::FnLike { .. } => (
             syntax_bridge::syntax_node_to_token_tree(
                 speculative_args,
@@ -198,35 +200,48 @@ pub fn expand_speculative(
             ),
             SyntaxFixupUndoInfo::NONE,
         ),
-        MacroCallKind::Derive { derive_macro_id, .. } => {
-            let MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } =
-                &derive_macro_id.loc(db).kind
-            else {
-                unreachable!("`derive_macro_id` should be `MacroCallKind::Attr`");
+        MacroCallKind::Derive { derive_attr_index: index, .. }
+        | MacroCallKind::Attr { invoc_attr_index: index, .. } => {
+            let censor = if let MacroCallKind::Derive { .. } = loc.kind {
+                censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
+            } else {
+                attr_source(index, &ast::Item::cast(speculative_args.clone())?)
+                    .into_iter()
+                    .map(|it| it.syntax().clone().into())
+                    .collect()
             };
-            attr_macro_input_to_token_tree(
-                db,
-                speculative_args,
+
+            let censor_cfg =
+                cfg_process::process_cfg_attrs(db, speculative_args, &loc).unwrap_or_default();
+            let mut fixups = fixup::fixup_syntax(
                 span_map,
+                speculative_args,
                 span,
-                true,
-                attr_ids,
-                loc.krate,
+                DocCommentDesugarMode::ProcMacro,
+            );
+            fixups.append.retain(|it, _| match it {
+                syntax::NodeOrToken::Token(_) => true,
+                it => !censor.contains(it) && !censor_cfg.contains(it),
+            });
+            fixups.remove.extend(censor);
+            fixups.remove.extend(censor_cfg);
+
+            (
+                syntax_bridge::syntax_node_to_token_tree_modified(
+                    speculative_args,
+                    span_map,
+                    fixups.append,
+                    fixups.remove,
+                    span,
+                    DocCommentDesugarMode::ProcMacro,
+                ),
+                fixups.undo_info,
             )
         }
-        MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => attr_macro_input_to_token_tree(
-            db,
-            speculative_args,
-            span_map,
-            span,
-            false,
-            attr_ids,
-            loc.krate,
-        ),
     };
 
-    let attr_arg = match &loc.kind {
-        MacroCallKind::Attr { censored_attr_ids: attr_ids, .. } => {
+    let attr_arg = match loc.kind {
+        MacroCallKind::Attr { invoc_attr_index, .. } => {
             if loc.def.is_attribute_derive() {
                 // for pseudo-derive expansion we actually pass the attribute itself only
                 ast::Attr::cast(speculative_args.clone()).and_then(|attr| attr.token_tree()).map(
@@ -245,21 +260,18 @@ pub fn expand_speculative(
                 // Attributes may have an input token tree, build the subtree and map for this as well
                 // then try finding a token id for our token if it is inside this input subtree.
                 let item = ast::Item::cast(speculative_args.clone())?;
-                let (_, _, _, meta) =
-                    attr_ids.invoc_attr().find_attr_range_with_source(db, loc.krate, &item);
-                match meta {
-                    Meta::TokenTree { tt, .. } => {
-                        let mut attr_arg = syntax_bridge::syntax_node_to_token_tree(
-                            tt.syntax(),
-                            span_map,
-                            span,
-                            DocCommentDesugarMode::ProcMacro,
-                        );
-                        attr_arg.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Invisible;
-                        Some(attr_arg)
+                let attrs = RawAttrs::new_expanded(db, &item, span_map, loc.krate.cfg_options(db));
+                attrs.iter().find(|attr| attr.id == invoc_attr_index).and_then(|attr| {
+                    match attr.input.as_deref()? {
+                        AttrInput::TokenTree(tt) => {
+                            let mut attr_arg = tt.clone();
+                            attr_arg.top_subtree_delimiter_mut().kind =
+                                tt::DelimiterKind::Invisible;
+                            Some(attr_arg)
+                        }
+                        AttrInput::Literal(_) => None,
                     }
-                    _ => None,
-                }
+                })
             }
         }
         _ => None,
@@ -421,7 +433,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
     let (parse, map) = parse_with_map(db, loc.kind.file_id());
     let root = parse.syntax_node();
 
-    let (is_derive, censor_item_tree_attr_ids, item_node, span) = match &loc.kind {
+    let (censor, item_node, span) = match loc.kind {
         MacroCallKind::FnLike { ast_id, .. } => {
             let node = &ast_id.to_ptr(db).to_node(&root);
             let path_range = node
@@ -489,29 +501,53 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
         MacroCallKind::Derive { .. } => {
             unreachable!("`ExpandDatabase::macro_arg` called with `MacroCallKind::Derive`")
         }
-        MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
+        MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
             let node = ast_id.to_ptr(db).to_node(&root);
-            let range = attr_ids
-                .invoc_attr()
-                .find_attr_range_with_source(db, loc.krate, &node)
-                .3
-                .path_range();
-            let span = map.span_for_range(range);
-
-            let is_derive = matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive());
-            (is_derive, &**attr_ids, node, span)
+            let attr_source = attr_source(invoc_attr_index, &node);
+
+            let span = map.span_for_range(
+                attr_source
+                    .as_ref()
+                    .and_then(|it| it.path())
+                    .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
+            );
+            // If derive attribute we need to censor the derive input
+            if matches!(loc.def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_derive())
+                && ast::Adt::can_cast(node.syntax().kind())
+            {
+                let adt = ast::Adt::cast(node.syntax().clone()).unwrap();
+                let censor_derive_input = censor_derive_input(invoc_attr_index, &adt);
+                (censor_derive_input, node, span)
+            } else {
+                (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
+            }
         }
     };
 
-    let (mut tt, undo_info) = attr_macro_input_to_token_tree(
-        db,
-        item_node.syntax(),
-        map.as_ref(),
-        span,
-        is_derive,
-        censor_item_tree_attr_ids,
-        loc.krate,
-    );
+    let (mut tt, undo_info) = {
+        let syntax = item_node.syntax();
+        let censor_cfg = cfg_process::process_cfg_attrs(db, syntax, &loc).unwrap_or_default();
+        let mut fixups =
+            fixup::fixup_syntax(map.as_ref(), syntax, span, DocCommentDesugarMode::ProcMacro);
+        fixups.append.retain(|it, _| match it {
+            syntax::NodeOrToken::Token(_) => true,
+            it => !censor.contains(it) && !censor_cfg.contains(it),
+        });
+        fixups.remove.extend(censor);
+        fixups.remove.extend(censor_cfg);
+
+        (
+            syntax_bridge::syntax_node_to_token_tree_modified(
+                syntax,
+                map,
+                fixups.append,
+                fixups.remove,
+                span,
+                DocCommentDesugarMode::ProcMacro,
+            ),
+            fixups.undo_info,
+        )
+    };
 
     if loc.def.is_proc_macro() {
         // proc macros expect their inputs without parentheses, MBEs expect it with them included
@@ -521,6 +557,31 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult {
     (Arc::new(tt), undo_info, span)
 }
 
+// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
+/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
+fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet {
+    // FIXME: handle `cfg_attr`
+    cov_mark::hit!(derive_censoring);
+    collect_attrs(node)
+        .take(derive_attr_index.ast_index() + 1)
+        .filter_map(|(_, attr)| Either::left(attr))
+        // FIXME, this resolution should not be done syntactically
+        // derive is a proper macro now, no longer builtin
+        // But we do not have resolution at this stage, this means
+        // we need to know about all macro calls for the given ast item here
+        // so we require some kind of mapping...
+        .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
+        .map(|it| it.syntax().clone().into())
+        .collect()
+}
+
+/// Attributes expect the invoking attribute to be stripped
+fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option {
+    // FIXME: handle `cfg_attr`
+    cov_mark::hit!(attribute_macro_attr_censoring);
+    collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
+}
+
 impl TokenExpander {
     fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
         match id.kind {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
index 3fb9aca9649ef..0d100c1364ab1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
@@ -1,20 +1,16 @@
 //! Compiled declarative macro expanders (`macro_rules!` and `macro`)
 
-use std::{cell::OnceCell, ops::ControlFlow};
-
 use base_db::Crate;
+use intern::sym;
 use span::{Edition, Span, SyntaxContext};
 use stdx::TupleExt;
-use syntax::{
-    AstNode, AstToken,
-    ast::{self, HasAttrs},
-};
+use syntax::{AstNode, ast};
 use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
 
 use crate::{
     AstId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, Lookup, MacroCallId,
-    attrs::{Meta, expand_cfg_attr},
+    attrs::RawAttrs,
     db::ExpandDatabase,
     hygiene::{Transparency, apply_mark},
     tt,
@@ -84,28 +80,29 @@ impl DeclarativeMacroExpander {
         let (root, map) = crate::db::parse_with_map(db, id.file_id);
         let root = root.syntax_node();
 
-        let transparency = |node: ast::AnyHasAttrs| {
-            let cfg_options = OnceCell::new();
-            expand_cfg_attr(
-                node.attrs(),
-                || cfg_options.get_or_init(|| def_crate.cfg_options(db)),
-                |attr, _, _, _| {
-                    if let Meta::NamedKeyValue { name: Some(name), value, .. } = attr
-                        && name.text() == "rustc_macro_transparency"
-                        && let Some(value) = value.and_then(ast::String::cast)
-                        && let Ok(value) = value.value()
-                    {
-                        match &*value {
-                            "transparent" => ControlFlow::Break(Transparency::Transparent),
-                            "semitransparent" => ControlFlow::Break(Transparency::SemiTransparent),
-                            "opaque" => ControlFlow::Break(Transparency::Opaque),
-                            _ => ControlFlow::Continue(()),
-                        }
-                    } else {
-                        ControlFlow::Continue(())
-                    }
+        let transparency = |node| {
+            // ... would be nice to have the item tree here
+            let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
+            match attrs
+                .iter()
+                .find(|it| {
+                    it.path
+                        .as_ident()
+                        .map(|it| *it == sym::rustc_macro_transparency)
+                        .unwrap_or(false)
+                })?
+                .token_tree_value()?
+                .token_trees()
+                .flat_tokens()
+            {
+                [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &i.sym {
+                    s if *s == sym::transparent => Some(Transparency::Transparent),
+                    s if *s == sym::semitransparent => Some(Transparency::SemiTransparent),
+                    s if *s == sym::opaque => Some(Transparency::Opaque),
+                    _ => None,
                 },
-            )
+                _ => None,
+            }
         };
         let ctx_edition = |ctx: SyntaxContext| {
             if ctx.is_root() {
@@ -136,8 +133,7 @@ impl DeclarativeMacroExpander {
                         "expected a token tree".into(),
                     )),
                 },
-                transparency(ast::AnyHasAttrs::from(macro_rules))
-                    .unwrap_or(Transparency::SemiTransparent),
+                transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent),
             ),
             ast::Macro::MacroDef(macro_def) => (
                 match macro_def.body() {
@@ -165,7 +161,7 @@ impl DeclarativeMacroExpander {
                         "expected a token tree".into(),
                     )),
                 },
-                transparency(macro_def.into()).unwrap_or(Transparency::Opaque),
+                transparency(¯o_def).unwrap_or(Transparency::Opaque),
             ),
         };
         let edition = ctx_edition(match id.file_id {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
index fe557d68023d8..a7f3e27a45539 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
@@ -55,6 +55,30 @@ impl From for HirFilePosition {
     }
 }
 
+impl FilePositionWrapper {
+    pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition {
+        FilePositionWrapper {
+            file_id: EditionedFileId::new(db, self.file_id, edition),
+            offset: self.offset,
+        }
+    }
+}
+
+impl FileRangeWrapper {
+    pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange {
+        FileRangeWrapper {
+            file_id: EditionedFileId::new(db, self.file_id, edition),
+            range: self.range,
+        }
+    }
+}
+
+impl InFileWrapper {
+    pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile {
+        InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value }
+    }
+}
+
 impl HirFileRange {
     pub fn file_range(self) -> Option {
         Some(FileRange { file_id: self.file_id.file_id()?, range: self.range })
@@ -383,7 +407,7 @@ impl InFile {
 
                 // Fall back to whole macro call.
                 let loc = db.lookup_intern_macro_call(mac_file);
-                loc.kind.original_call_range(db, loc.krate)
+                loc.kind.original_call_range(db)
             }
         }
     }
@@ -429,10 +453,7 @@ impl InFile {
                     Some(it) => it,
                     None => {
                         let loc = db.lookup_intern_macro_call(mac_file);
-                        (
-                            loc.kind.original_call_range(db, loc.krate),
-                            SyntaxContext::root(loc.def.edition),
-                        )
+                        (loc.kind.original_call_range(db), SyntaxContext::root(loc.def.edition))
                     }
                 }
             }
@@ -447,7 +468,7 @@ impl InFile {
                     Some(it) => it,
                     _ => {
                         let loc = db.lookup_intern_macro_call(mac_file);
-                        loc.kind.original_call_range(db, loc.krate)
+                        loc.kind.original_call_range(db)
                     }
                 }
             }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index cba1c7c1d4b05..fe77e1565987f 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -523,7 +523,6 @@ mod tests {
             fixups.remove,
             span_map.span_for_range(TextRange::empty(0.into())),
             DocCommentDesugarMode::Mbe,
-            |_, _| (true, Vec::new()),
         );
 
         let actual = format!("{tt}\n");
@@ -699,7 +698,7 @@ fn foo() {
 }
 "#,
             expect![[r#"
-fn foo () {a .__ra_fixup ;}
+fn foo () {a . __ra_fixup ;}
 "#]],
         )
     }
@@ -714,7 +713,7 @@ fn foo() {
 }
 "#,
             expect![[r#"
-fn foo () {a .__ra_fixup ; bar () ;}
+fn foo () {a . __ra_fixup ; bar () ;}
 "#]],
         )
     }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index e1103ef43e0f3..472ec83ffef5b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -25,17 +25,18 @@ mod cfg_process;
 mod fixup;
 mod prettify_macro_expansion_;
 
+use attrs::collect_attrs;
+use rustc_hash::FxHashMap;
 use salsa::plumbing::{AsId, FromId};
 use stdx::TupleExt;
-use thin_vec::ThinVec;
 use triomphe::Arc;
 
 use core::fmt;
-use std::{hash::Hash, ops};
+use std::hash::Hash;
 
 use base_db::Crate;
 use either::Either;
-use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContext};
+use span::{Edition, ErasedFileAstId, FileAstId, Span, SpanAnchor, SyntaxContext};
 use syntax::{
     SyntaxNode, SyntaxToken, TextRange, TextSize,
     ast::{self, AstNode},
@@ -316,6 +317,9 @@ pub enum MacroCallKind {
     Derive {
         ast_id: AstId,
         /// Syntactical index of the invoking `#[derive]` attribute.
+        ///
+        /// Outer attributes are counted first, then inner attributes. This does not support
+        /// out-of-line modules, which may have attributes spread across 2 files!
         derive_attr_index: AttrId,
         /// Index of the derive macro in the derive attribute
         derive_index: u32,
@@ -325,68 +329,17 @@ pub enum MacroCallKind {
     },
     Attr {
         ast_id: AstId,
-        // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`.
+        // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index`
+        // but we need to fix the `cfg_attr` handling first.
         attr_args: Option>,
-        /// This contains the list of all *active* attributes (derives and attr macros) preceding this
-        /// attribute, including this attribute. You can retrieve the [`AttrId`] of the current attribute
-        /// by calling [`invoc_attr()`] on this.
-        ///
-        /// The macro should not see the attributes here.
+        /// Syntactical index of the invoking `#[attribute]`.
         ///
-        /// [`invoc_attr()`]: AttrMacroAttrIds::invoc_attr
-        censored_attr_ids: AttrMacroAttrIds,
+        /// Outer attributes are counted first, then inner attributes. This does not support
+        /// out-of-line modules, which may have attributes spread across 2 files!
+        invoc_attr_index: AttrId,
     },
 }
 
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct AttrMacroAttrIds(AttrMacroAttrIdsRepr);
-
-impl AttrMacroAttrIds {
-    #[inline]
-    pub fn from_one(id: AttrId) -> Self {
-        Self(AttrMacroAttrIdsRepr::One(id))
-    }
-
-    #[inline]
-    pub fn from_many(ids: &[AttrId]) -> Self {
-        if let &[id] = ids {
-            Self(AttrMacroAttrIdsRepr::One(id))
-        } else {
-            Self(AttrMacroAttrIdsRepr::ManyDerives(ids.iter().copied().collect()))
-        }
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-enum AttrMacroAttrIdsRepr {
-    One(AttrId),
-    ManyDerives(ThinVec),
-}
-
-impl ops::Deref for AttrMacroAttrIds {
-    type Target = [AttrId];
-
-    #[inline]
-    fn deref(&self) -> &Self::Target {
-        match &self.0 {
-            AttrMacroAttrIdsRepr::One(one) => std::slice::from_ref(one),
-            AttrMacroAttrIdsRepr::ManyDerives(many) => many,
-        }
-    }
-}
-
-impl AttrMacroAttrIds {
-    #[inline]
-    pub fn invoc_attr(&self) -> AttrId {
-        match &self.0 {
-            AttrMacroAttrIdsRepr::One(it) => *it,
-            AttrMacroAttrIdsRepr::ManyDerives(it) => {
-                *it.last().expect("should always have at least one `AttrId`")
-            }
-        }
-    }
-}
-
 impl HirFileId {
     pub fn edition(self, db: &dyn ExpandDatabase) -> Edition {
         match self {
@@ -630,20 +583,34 @@ impl MacroDefId {
 
 impl MacroCallLoc {
     pub fn to_node(&self, db: &dyn ExpandDatabase) -> InFile {
-        match &self.kind {
+        match self.kind {
             MacroCallKind::FnLike { ast_id, .. } => {
                 ast_id.with_value(ast_id.to_node(db).syntax().clone())
             }
             MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
                 // FIXME: handle `cfg_attr`
-                let (attr, _, _, _) = derive_attr_index.find_attr_range(db, self.krate, *ast_id);
-                ast_id.with_value(attr.syntax().clone())
+                ast_id.with_value(ast_id.to_node(db)).map(|it| {
+                    collect_attrs(&it)
+                        .nth(derive_attr_index.ast_index())
+                        .and_then(|it| match it.1 {
+                            Either::Left(attr) => Some(attr.syntax().clone()),
+                            Either::Right(_) => None,
+                        })
+                        .unwrap_or_else(|| it.syntax().clone())
+                })
             }
-            MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
+            MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
                 if self.def.is_attribute_derive() {
-                    let (attr, _, _, _) =
-                        attr_ids.invoc_attr().find_attr_range(db, self.krate, *ast_id);
-                    ast_id.with_value(attr.syntax().clone())
+                    // FIXME: handle `cfg_attr`
+                    ast_id.with_value(ast_id.to_node(db)).map(|it| {
+                        collect_attrs(&it)
+                            .nth(invoc_attr_index.ast_index())
+                            .and_then(|it| match it.1 {
+                                Either::Left(attr) => Some(attr.syntax().clone()),
+                                Either::Right(_) => None,
+                            })
+                            .unwrap_or_else(|| it.syntax().clone())
+                    })
                 } else {
                     ast_id.with_value(ast_id.to_node(db).syntax().clone())
                 }
@@ -748,7 +715,7 @@ impl MacroCallKind {
     /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
     /// get the macro path (rustc shows the whole `ast::MacroCall`), attribute macros get the
     /// attribute's range, and derives get only the specific derive that is being referred to.
-    pub fn original_call_range(self, db: &dyn ExpandDatabase, krate: Crate) -> FileRange {
+    pub fn original_call_range(self, db: &dyn ExpandDatabase) -> FileRange {
         let mut kind = self;
         let file_id = loop {
             match kind.file_id() {
@@ -770,11 +737,24 @@ impl MacroCallKind {
             }
             MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
                 // FIXME: should be the range of the macro name, not the whole derive
-                derive_attr_index.find_attr_range(db, krate, ast_id).2
+                // FIXME: handle `cfg_attr`
+                collect_attrs(&ast_id.to_node(db))
+                    .nth(derive_attr_index.ast_index())
+                    .expect("missing derive")
+                    .1
+                    .expect_left("derive is a doc comment?")
+                    .syntax()
+                    .text_range()
             }
             // FIXME: handle `cfg_attr`
-            MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
-                attr_ids.invoc_attr().find_attr_range(db, krate, ast_id).2
+            MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+                collect_attrs(&ast_id.to_node(db))
+                    .nth(invoc_attr_index.ast_index())
+                    .expect("missing attribute")
+                    .1
+                    .expect_left("attribute macro is a doc comment?")
+                    .syntax()
+                    .text_range()
             }
         };
 
@@ -893,8 +873,7 @@ impl ExpansionInfo {
         let span = self.exp_map.span_at(token.start());
         match &self.arg_map {
             SpanMap::RealSpanMap(_) => {
-                let file_id =
-                    EditionedFileId::from_span_guess_origin(db, span.anchor.file_id).into();
+                let file_id = EditionedFileId::from_span(db, span.anchor.file_id).into();
                 let anchor_offset =
                     db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
                 InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
@@ -950,7 +929,7 @@ pub fn map_node_range_up_rooted(
         start = start.min(span.range.start());
         end = end.max(span.range.end());
     }
-    let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
+    let file_id = EditionedFileId::from_span(db, anchor.file_id);
     let anchor_offset =
         db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
     Some(FileRange { file_id, range: TextRange::new(start, end) + anchor_offset })
@@ -976,12 +955,36 @@ pub fn map_node_range_up(
         start = start.min(span.range.start());
         end = end.max(span.range.end());
     }
-    let file_id = EditionedFileId::from_span_guess_origin(db, anchor.file_id);
+    let file_id = EditionedFileId::from_span(db, anchor.file_id);
     let anchor_offset =
         db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
     Some((FileRange { file_id, range: TextRange::new(start, end) + anchor_offset }, ctx))
 }
 
+/// Maps up the text range out of the expansion hierarchy back into the original file its from.
+/// This version will aggregate the ranges of all spans with the same anchor and syntax context.
+pub fn map_node_range_up_aggregated(
+    db: &dyn ExpandDatabase,
+    exp_map: &ExpansionSpanMap,
+    range: TextRange,
+) -> FxHashMap<(SpanAnchor, SyntaxContext), TextRange> {
+    let mut map = FxHashMap::default();
+    for span in exp_map.spans_for_range(range) {
+        let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range);
+        *range = TextRange::new(
+            range.start().min(span.range.start()),
+            range.end().max(span.range.end()),
+        );
+    }
+    for ((anchor, _), range) in &mut map {
+        let file_id = EditionedFileId::from_span(db, anchor.file_id);
+        let anchor_offset =
+            db.ast_id_map(file_id.into()).get_erased(anchor.ast_id).text_range().start();
+        *range += anchor_offset;
+    }
+    map
+}
+
 /// Looks up the span at the given offset.
 pub fn span_for_offset(
     db: &dyn ExpandDatabase,
@@ -989,7 +992,7 @@ pub fn span_for_offset(
     offset: TextSize,
 ) -> (FileRange, SyntaxContext) {
     let span = exp_map.span_at(offset);
-    let file_id = EditionedFileId::from_span_guess_origin(db, span.anchor.file_id);
+    let file_id = EditionedFileId::from_span(db, span.anchor.file_id);
     let anchor_offset =
         db.ast_id_map(file_id.into()).get_erased(span.anchor.ast_id).text_range().start();
     (FileRange { file_id, range: span.range + anchor_offset }, span.ctx)
@@ -1059,7 +1062,7 @@ impl ExpandTo {
     }
 }
 
-intern::impl_internable!(ModPath);
+intern::impl_internable!(ModPath, attrs::AttrInput);
 
 #[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
 #[doc(alias = "MacroFileId")]
@@ -1122,14 +1125,6 @@ impl HirFileId {
             HirFileId::MacroFile(_) => None,
         }
     }
-
-    #[inline]
-    pub fn krate(self, db: &dyn ExpandDatabase) -> Crate {
-        match self {
-            HirFileId::FileId(it) => it.krate(db),
-            HirFileId::MacroFile(it) => it.loc(db).krate,
-        }
-    }
 }
 
 impl PartialEq for HirFileId {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index e9805e3f86b8c..d84d978cdb7ed 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -2,7 +2,7 @@
 
 use std::{
     fmt::{self, Display as _},
-    iter::{self, Peekable},
+    iter,
 };
 
 use crate::{
@@ -12,11 +12,10 @@ use crate::{
     tt,
 };
 use base_db::Crate;
-use intern::{Symbol, sym};
-use parser::T;
+use intern::sym;
 use smallvec::SmallVec;
 use span::{Edition, SyntaxContext};
-use syntax::{AstNode, SyntaxToken, ast};
+use syntax::{AstNode, ast};
 
 #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
 pub struct ModPath {
@@ -65,58 +64,6 @@ impl ModPath {
         ModPath { kind, segments: SmallVec::new_const() }
     }
 
-    pub fn from_tokens(
-        db: &dyn ExpandDatabase,
-        span_for_range: &mut dyn FnMut(::tt::TextRange) -> SyntaxContext,
-        is_abs: bool,
-        segments: impl Iterator,
-    ) -> Option {
-        let mut segments = segments.peekable();
-        let mut result = SmallVec::new_const();
-        let path_kind = if is_abs {
-            PathKind::Abs
-        } else {
-            let first = segments.next()?;
-            match first.kind() {
-                T![crate] => PathKind::Crate,
-                T![self] => PathKind::Super(handle_super(&mut segments)),
-                T![super] => PathKind::Super(1 + handle_super(&mut segments)),
-                T![ident] => {
-                    let first_text = first.text();
-                    if first_text == "$crate" {
-                        let ctxt = span_for_range(first.text_range());
-                        resolve_crate_root(db, ctxt)
-                            .map(PathKind::DollarCrate)
-                            .unwrap_or(PathKind::Crate)
-                    } else {
-                        result.push(Name::new_symbol_root(Symbol::intern(first_text)));
-                        PathKind::Plain
-                    }
-                }
-                _ => return None,
-            }
-        };
-        for segment in segments {
-            if segment.kind() != T![ident] {
-                return None;
-            }
-            result.push(Name::new_symbol_root(Symbol::intern(segment.text())));
-        }
-        if result.is_empty() {
-            return None;
-        }
-        result.shrink_to_fit();
-        return Some(ModPath { kind: path_kind, segments: result });
-
-        fn handle_super(segments: &mut Peekable>) -> u8 {
-            let mut result = 0;
-            while segments.next_if(|it| it.kind() == T![super]).is_some() {
-                result += 1;
-            }
-            result
-        }
-    }
-
     pub fn segments(&self) -> &[Name] {
         &self.segments
     }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
index 8b0c0d72cd49d..e5a778a95c7c9 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
@@ -1,12 +1,13 @@
 //! Span maps for real files and macro expansions.
 
 use span::{Span, SyntaxContext};
+use stdx::TupleExt;
 use syntax::{AstNode, TextRange, ast};
 use triomphe::Arc;
 
 pub use span::RealSpanMap;
 
-use crate::{HirFileId, MacroCallId, db::ExpandDatabase};
+use crate::{HirFileId, MacroCallId, attrs::collect_attrs, db::ExpandDatabase};
 
 pub type ExpansionSpanMap = span::SpanMap;
 
@@ -109,24 +110,26 @@ pub(crate) fn real_span_map(
     // them anchors too, but only if they have no attributes attached, as those might be proc-macros
     // and using different anchors inside of them will prevent spans from being joinable.
     tree.items().for_each(|item| match &item {
-        ast::Item::ExternBlock(it) if ast::attrs_including_inner(it).next().is_none() => {
+        ast::Item::ExternBlock(it)
+            if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) =>
+        {
             if let Some(extern_item_list) = it.extern_item_list() {
                 pairs.extend(
                     extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry),
                 );
             }
         }
-        ast::Item::Impl(it) if ast::attrs_including_inner(it).next().is_none() => {
+        ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
             if let Some(assoc_item_list) = it.assoc_item_list() {
                 pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
             }
         }
-        ast::Item::Module(it) if ast::attrs_including_inner(it).next().is_none() => {
+        ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
             if let Some(item_list) = it.item_list() {
                 pairs.extend(item_list.items().map(item_to_entry));
             }
         }
-        ast::Item::Trait(it) if ast::attrs_including_inner(it).next().is_none() => {
+        ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
             if let Some(assoc_item_list) = it.assoc_item_list() {
                 pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 0a6458562e15e..18ebe7d7a5395 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -6,7 +6,6 @@ mod tests;
 use base_db::Crate;
 use hir_def::{
     EnumVariantId, GeneralConstId, HasModule, StaticId,
-    attrs::AttrFlags,
     expr_store::Body,
     hir::{Expr, ExprId},
     type_ref::LiteralConstRef,
@@ -199,7 +198,7 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
         return Ok(value);
     }
 
-    let repr = AttrFlags::repr(db, loc.parent.into());
+    let repr = db.enum_signature(loc.parent).repr;
     let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed());
 
     let mir_body = db.monomorphized_mir_body(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index c0e223380bca8..0815e62f87eef 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -17,8 +17,8 @@ use std::fmt;
 
 use hir_def::{
     AdtId, ConstId, EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
-    ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, attrs::AttrFlags,
-    db::DefDatabase, hir::Pat, item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
+    ModuleDefId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, db::DefDatabase, hir::Pat,
+    item_tree::FieldsShape, signatures::StaticFlags, src::HasSource,
 };
 use hir_expand::{
     HirFileId,
@@ -201,7 +201,7 @@ impl<'a> DeclValidator<'a> {
 
             // Don't run the lint on extern "[not Rust]" fn items with the
             // #[no_mangle] attribute.
-            let no_mangle = AttrFlags::query(self.db, func.into()).contains(AttrFlags::NO_MANGLE);
+            let no_mangle = self.db.attrs(func.into()).by_key(sym::no_mangle).exists();
             if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) {
                 cov_mark::hit!(extern_func_no_mangle_ignored);
             } else {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index c70c6b6119446..fb942e336e659 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -2,9 +2,7 @@
 
 use std::{cell::LazyCell, fmt};
 
-use hir_def::{
-    EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId, attrs::AttrFlags,
-};
+use hir_def::{EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
 use intern::sym;
 use rustc_pattern_analysis::{
     IndexVec, PatCx, PrivateUninhabitedField,
@@ -120,7 +118,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> {
     /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
     fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
         let is_local = adt.krate(self.db) == self.module.krate();
-        !is_local && AttrFlags::query(self.db, adt.into()).contains(AttrFlags::NON_EXHAUSTIVE)
+        !is_local && self.db.attrs(adt.into()).by_key(sym::non_exhaustive).exists()
     }
 
     fn variant_id_for_adt(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 8ac7ab19cd3bf..53524d66a33c2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -144,7 +144,7 @@ struct UnsafeVisitor<'db> {
     inside_assignment: bool,
     inside_union_destructure: bool,
     callback: &'db mut dyn FnMut(UnsafeDiagnostic),
-    def_target_features: TargetFeatures<'db>,
+    def_target_features: TargetFeatures,
     // FIXME: This needs to be the edition of the span of each call.
     edition: Edition,
     /// On some targets (WASM), calling safe functions with `#[target_feature]` is always safe, even when
@@ -162,7 +162,7 @@ impl<'db> UnsafeVisitor<'db> {
     ) -> Self {
         let resolver = def.resolver(db);
         let def_target_features = match def {
-            DefWithBodyId::FunctionId(func) => TargetFeatures::from_fn(db, func),
+            DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
             _ => TargetFeatures::default(),
         };
         let krate = resolver.module().krate();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 03ae970acaa78..9891f3f248bd8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -38,7 +38,7 @@ use hir_def::{
     lang_item::{LangItem, LangItemTarget, lang_item},
     layout::Integer,
     resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
-    signatures::{ConstSignature, EnumSignature, StaticSignature},
+    signatures::{ConstSignature, StaticSignature},
     type_ref::{ConstRef, LifetimeRefId, TypeRefId},
 };
 use hir_expand::{mod_path::ModPath, name::Name};
@@ -104,7 +104,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc ctx.collect_const(c, &db.const_signature(c)),
         DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)),
         DefWithBodyId::VariantId(v) => {
-            ctx.return_ty = match EnumSignature::variant_body_type(db, v.lookup(db).parent) {
+            ctx.return_ty = match db.enum_signature(v.lookup(db).parent).variant_body_type() {
                 hir_def::layout::IntegerType::Pointer(signed) => match signed {
                     true => ctx.types.isize,
                     false => ctx.types.usize,
@@ -759,7 +759,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
     /// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
     /// and resolve the path via its methods. This will ensure proper error reporting.
     pub(crate) resolver: Resolver<'db>,
-    target_features: OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>,
+    target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
     pub(crate) generic_def: GenericDefId,
     table: unify::InferenceTable<'db>,
     /// The traits in scope, disregarding block modules. This is used for caching purposes.
@@ -903,14 +903,14 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
     }
 
     fn target_features<'a>(
-        db: &'db dyn HirDatabase,
-        target_features: &'a OnceCell<(TargetFeatures<'db>, TargetFeatureIsSafeInTarget)>,
+        db: &dyn HirDatabase,
+        target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
         owner: DefWithBodyId,
         krate: Crate,
-    ) -> (&'a TargetFeatures<'db>, TargetFeatureIsSafeInTarget) {
+    ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) {
         let (target_features, target_feature_is_safe) = target_features.get_or_init(|| {
             let target_features = match owner {
-                DefWithBodyId::FunctionId(id) => TargetFeatures::from_fn(db, id),
+                DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())),
                 _ => TargetFeatures::default(),
             };
             let target_feature_is_safe = match &krate.workspace_data(db).target {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index 9b95eef0e0d6a..78889ccb89a28 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -37,11 +37,11 @@
 
 use hir_def::{
     CallableDefId,
-    attrs::AttrFlags,
     hir::{ExprId, ExprOrPatId},
     lang_item::LangItem,
     signatures::FunctionSignature,
 };
+use intern::sym;
 use rustc_ast_ir::Mutability;
 use rustc_type_ir::{
     BoundVar, TypeAndMut,
@@ -76,7 +76,7 @@ use crate::{
 struct Coerce<'a, 'b, 'db> {
     table: &'a mut InferenceTable<'db>,
     has_errors: &'a mut bool,
-    target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures<'db>, TargetFeatureIsSafeInTarget),
+    target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget),
     use_lub: bool,
     /// Determines whether or not allow_two_phase_borrow is set on any
     /// autoref adjustments we create while coercing. We don't want to
@@ -864,14 +864,14 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
                             return Err(TypeError::IntrinsicCast);
                         }
 
-                        let attrs = AttrFlags::query(self.table.db, def_id.into());
-                        if attrs.contains(AttrFlags::RUSTC_FORCE_INLINE) {
+                        let attrs = self.table.db.attrs(def_id.into());
+                        if attrs.by_key(sym::rustc_force_inline).exists() {
                             return Err(TypeError::ForceInlineCast);
                         }
 
-                        if b_hdr.safety.is_safe() && attrs.contains(AttrFlags::HAS_TARGET_FEATURE) {
+                        if b_hdr.safety.is_safe() && attrs.by_key(sym::target_feature).exists() {
                             let fn_target_features =
-                                TargetFeatures::from_fn_no_implications(self.table.db, def_id);
+                                TargetFeatures::from_attrs_no_implications(&attrs);
                             // Allow the coercion if the current function has all the features that would be
                             // needed to call the coercee safely.
                             let (target_features, target_feature_is_safe) =
@@ -1056,7 +1056,7 @@ impl<'db> InferenceContext<'_, 'db> {
 
         let is_force_inline = |ty: Ty<'db>| {
             if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(did)), _) = ty.kind() {
-                AttrFlags::query(self.db, did.into()).contains(AttrFlags::RUSTC_FORCE_INLINE)
+                self.db.attrs(did.into()).by_key(sym::rustc_force_inline).exists()
             } else {
                 false
             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index a1d99a45287d8..efb7244ff6375 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -2365,11 +2365,9 @@ impl<'db> InferenceContext<'_, 'db> {
         };
 
         let data = self.db.function_signature(func);
-        let Some(legacy_const_generics_indices) = data.legacy_const_generics_indices(self.db, func)
-        else {
+        let Some(legacy_const_generics_indices) = &data.legacy_const_generics_indices else {
             return Default::default();
         };
-        let mut legacy_const_generics_indices = Box::<[u32]>::from(legacy_const_generics_indices);
 
         // only use legacy const generics if the param count matches with them
         if data.params.len() + legacy_const_generics_indices.len() != args.len() {
@@ -2378,8 +2376,9 @@ impl<'db> InferenceContext<'_, 'db> {
             } else {
                 // there are more parameters than there should be without legacy
                 // const params; use them
-                legacy_const_generics_indices.sort_unstable();
-                return legacy_const_generics_indices;
+                let mut indices = legacy_const_generics_indices.as_ref().clone();
+                indices.sort();
+                return indices;
             }
         }
 
@@ -2392,8 +2391,9 @@ impl<'db> InferenceContext<'_, 'db> {
             self.infer_expr(args[arg_idx as usize], &expected, ExprIsRead::Yes);
             // FIXME: evaluate and unify with the const
         }
-        legacy_const_generics_indices.sort_unstable();
-        legacy_const_generics_indices
+        let mut indices = legacy_const_generics_indices.as_ref().clone();
+        indices.sort();
+        indices
     }
 
     /// Dereferences a single level of immutable referencing.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index b650f5c1a16a6..fc0b9d30b3333 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -4,7 +4,6 @@ use std::fmt;
 
 use hir_def::{
     AdtId, LocalFieldId, StructId,
-    attrs::AttrFlags,
     layout::{LayoutCalculatorError, LayoutData},
 };
 use la_arena::{Idx, RawIdx};
@@ -175,7 +174,8 @@ pub fn layout_of_ty_query<'db>(
         TyKind::Adt(def, args) => {
             match def.inner().id {
                 hir_def::AdtId::StructId(s) => {
-                    let repr = AttrFlags::repr(db, s.into()).unwrap_or_default();
+                    let data = db.struct_signature(s);
+                    let repr = data.repr.unwrap_or_default();
                     if repr.simd() {
                         return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target);
                     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index ecebf7935d06e..a8f04bf8c132e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -4,9 +4,9 @@ use std::{cmp, ops::Bound};
 
 use hir_def::{
     AdtId, VariantId,
-    attrs::AttrFlags,
     signatures::{StructFlags, VariantFields},
 };
+use intern::sym;
 use rustc_abi::{Integer, ReprOptions, TargetDataLayout};
 use rustc_index::IndexVec;
 use smallvec::SmallVec;
@@ -44,15 +44,15 @@ pub fn layout_of_adt_query<'db>(
             r.push(handle_variant(s.into(), s.fields(db))?);
             (
                 r,
-                AttrFlags::repr(db, s.into()).unwrap_or_default(),
+                sig.repr.unwrap_or_default(),
                 sig.flags.intersects(StructFlags::IS_UNSAFE_CELL | StructFlags::IS_UNSAFE_PINNED),
             )
         }
         AdtId::UnionId(id) => {
-            let repr = AttrFlags::repr(db, id.into());
+            let data = db.union_signature(id);
             let mut r = SmallVec::new();
             r.push(handle_variant(id.into(), id.fields(db))?);
-            (r, repr.unwrap_or_default(), false)
+            (r, data.repr.unwrap_or_default(), false)
         }
         AdtId::EnumId(e) => {
             let variants = e.enum_variants(db);
@@ -61,7 +61,7 @@ pub fn layout_of_adt_query<'db>(
                 .iter()
                 .map(|&(v, _, _)| handle_variant(v.into(), v.fields(db)))
                 .collect::, _>>()?;
-            (r, AttrFlags::repr(db, e.into()).unwrap_or_default(), false)
+            (r, db.enum_signature(e).repr.unwrap_or_default(), false)
         }
     };
     let variants = variants
@@ -105,12 +105,27 @@ pub(crate) fn layout_of_adt_cycle_result<'db>(
 }
 
 fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, Bound) {
-    let range = AttrFlags::rustc_layout_scalar_valid_range(db, def);
-    let get = |value| match value {
-        Some(it) => Bound::Included(it),
-        None => Bound::Unbounded,
+    let attrs = db.attrs(def.into());
+    let get = |name| {
+        let attr = attrs.by_key(name).tt_values();
+        for tree in attr {
+            if let Some(it) = tree.iter().next_as_view() {
+                let text = it.to_string().replace('_', "");
+                let (text, base) = match text.as_bytes() {
+                    [b'0', b'x', ..] => (&text[2..], 16),
+                    [b'0', b'o', ..] => (&text[2..], 8),
+                    [b'0', b'b', ..] => (&text[2..], 2),
+                    _ => (&*text, 10),
+                };
+
+                if let Ok(it) = u128::from_str_radix(text, base) {
+                    return Bound::Included(it);
+                }
+            }
+        }
+        Bound::Unbounded
     };
-    (get(range.start), get(range.end))
+    (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end))
 }
 
 /// Finds the appropriate Integer type and signedness for the given
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index 1b5f4595ca3cb..cec63566338f1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -8,11 +8,11 @@ use base_db::Crate;
 use hir_def::{
     AdtId, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup,
     ModuleId, TraitId, TypeAliasId,
-    attrs::AttrFlags,
     nameres::{DefMap, block_def_map, crate_def_map},
     signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags},
 };
 use hir_expand::name::Name;
+use intern::sym;
 use rustc_ast_ir::Mutability;
 use rustc_hash::{FxHashMap, FxHashSet};
 use rustc_type_ir::{
@@ -230,8 +230,7 @@ impl TraitImpls {
                 // FIXME: Reservation impls should be considered during coherence checks. If we are
                 // (ever) to implement coherence checks, this filtering should be done by the trait
                 // solver.
-                if AttrFlags::query(db, impl_id.into()).contains(AttrFlags::RUSTC_RESERVATION_IMPL)
-                {
+                if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
                     continue;
                 }
                 let target_trait = match db.impl_trait(impl_id) {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
index d4aab2d094960..4b1adecf8c87d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
@@ -3,11 +3,9 @@
 //!
 use std::cmp::{self, Ordering};
 
-use hir_def::{
-    CrateRootModuleId, attrs::AttrFlags, resolver::HasResolver, signatures::FunctionSignature,
-};
+use hir_def::{CrateRootModuleId, resolver::HasResolver, signatures::FunctionSignature};
 use hir_expand::name::Name;
-use intern::sym;
+use intern::{Symbol, sym};
 use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike, Ty as _};
 use stdx::never;
 
@@ -55,7 +53,7 @@ impl<'db> Evaluator<'db> {
         }
 
         let function_data = self.db.function_signature(def);
-        let attrs = AttrFlags::query(self.db, def.into());
+        let attrs = self.db.attrs(def.into());
         let is_intrinsic = FunctionSignature::is_intrinsic(self.db, def);
 
         if is_intrinsic {
@@ -67,7 +65,7 @@ impl<'db> Evaluator<'db> {
                 locals,
                 span,
                 !function_data.has_body()
-                    || attrs.contains(AttrFlags::RUSTC_INTRINSIC_MUST_BE_OVERRIDDEN),
+                    || attrs.by_key(sym::rustc_intrinsic_must_be_overridden).exists(),
             );
         }
         let is_extern_c = match def.lookup(self.db).container {
@@ -87,13 +85,18 @@ impl<'db> Evaluator<'db> {
                 .map(|()| true);
         }
 
-        if attrs.intersects(
-            AttrFlags::RUSTC_ALLOCATOR
-                | AttrFlags::RUSTC_DEALLOCATOR
-                | AttrFlags::RUSTC_REALLOCATOR
-                | AttrFlags::RUSTC_ALLOCATOR_ZEROED,
-        ) {
-            self.exec_alloc_fn(attrs, args, destination)?;
+        let alloc_fn =
+            attrs.iter().filter_map(|it| it.path().as_ident()).map(|it| it.symbol()).find(|it| {
+                [
+                    &sym::rustc_allocator,
+                    &sym::rustc_deallocator,
+                    &sym::rustc_reallocator,
+                    &sym::rustc_allocator_zeroed,
+                ]
+                .contains(it)
+            });
+        if let Some(alloc_fn) = alloc_fn {
+            self.exec_alloc_fn(alloc_fn, args, destination)?;
             return Ok(true);
         }
         if let Some(it) = self.detect_lang_function(def) {
@@ -242,14 +245,12 @@ impl<'db> Evaluator<'db> {
 
     fn exec_alloc_fn(
         &mut self,
-        alloc_fn: AttrFlags,
+        alloc_fn: &Symbol,
         args: &[IntervalAndTy<'db>],
         destination: Interval,
     ) -> Result<'db, ()> {
         match alloc_fn {
-            _ if alloc_fn
-                .intersects(AttrFlags::RUSTC_ALLOCATOR_ZEROED | AttrFlags::RUSTC_ALLOCATOR) =>
-            {
+            _ if *alloc_fn == sym::rustc_allocator_zeroed || *alloc_fn == sym::rustc_allocator => {
                 let [size, align] = args else {
                     return Err(MirEvalError::InternalError(
                         "rustc_allocator args are not provided".into(),
@@ -260,8 +261,8 @@ impl<'db> Evaluator<'db> {
                 let result = self.heap_allocate(size, align)?;
                 destination.write_from_bytes(self, &result.to_bytes())?;
             }
-            _ if alloc_fn.contains(AttrFlags::RUSTC_DEALLOCATOR) => { /* no-op for now */ }
-            _ if alloc_fn.contains(AttrFlags::RUSTC_REALLOCATOR) => {
+            _ if *alloc_fn == sym::rustc_deallocator => { /* no-op for now */ }
+            _ if *alloc_fn == sym::rustc_reallocator => {
                 let [ptr, old_size, align, new_size] = args else {
                     return Err(MirEvalError::InternalError(
                         "rustc_allocator args are not provided".into(),
@@ -287,14 +288,14 @@ impl<'db> Evaluator<'db> {
 
     fn detect_lang_function(&self, def: FunctionId) -> Option {
         use LangItem::*;
-        let attrs = AttrFlags::query(self.db, def.into());
+        let attrs = self.db.attrs(def.into());
 
-        if attrs.contains(AttrFlags::RUSTC_CONST_PANIC_STR) {
+        if attrs.by_key(sym::rustc_const_panic_str).exists() {
             // `#[rustc_const_panic_str]` is treated like `lang = "begin_panic"` by rustc CTFE.
             return Some(LangItem::BeginPanic);
         }
 
-        let candidate = attrs.lang_item_with_attrs(self.db, def.into())?;
+        let candidate = attrs.lang_item()?;
         // We want to execute these functions with special logic
         // `PanicFmt` is not detected here as it's redirected later.
         if [BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
index 7f7d596be9fbf..ce8b76837a3c7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
@@ -9,7 +9,6 @@ use base_db::Crate;
 use hir_def::{
     AdtId, AttrDefId, BlockId, CallableDefId, EnumVariantId, ItemContainerId, StructId, UnionId,
     VariantId,
-    attrs::AttrFlags,
     lang_item::LangItem,
     signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
 };
@@ -468,28 +467,28 @@ impl AdtDef {
 
                 let variants = vec![(VariantIdx(0), VariantDef::Struct(struct_id))];
 
-                let data_repr = data.repr(db, struct_id);
+                let mut repr = ReprOptions::default();
+                repr.align = data.repr.and_then(|r| r.align);
+                repr.pack = data.repr.and_then(|r| r.pack);
+                repr.int = data.repr.and_then(|r| r.int);
+
                 let mut repr_flags = ReprFlags::empty();
                 if flags.is_box {
                     repr_flags.insert(ReprFlags::IS_LINEAR);
                 }
-                if data_repr.is_some_and(|r| r.c()) {
+                if data.repr.is_some_and(|r| r.c()) {
                     repr_flags.insert(ReprFlags::IS_C);
                 }
-                if data_repr.is_some_and(|r| r.simd()) {
+                if data.repr.is_some_and(|r| r.simd()) {
                     repr_flags.insert(ReprFlags::IS_SIMD);
                 }
-                let repr = ReprOptions {
-                    align: data_repr.and_then(|r| r.align),
-                    pack: data_repr.and_then(|r| r.pack),
-                    int: data_repr.and_then(|r| r.int),
-                    flags: repr_flags,
-                    ..ReprOptions::default()
-                };
+                repr.flags = repr_flags;
 
                 (flags, variants, repr)
             }
             AdtId::UnionId(union_id) => {
+                let data = db.union_signature(union_id);
+
                 let flags = AdtFlags {
                     is_enum: false,
                     is_union: true,
@@ -502,24 +501,22 @@ impl AdtDef {
 
                 let variants = vec![(VariantIdx(0), VariantDef::Union(union_id))];
 
-                let data_repr = AttrFlags::repr(db, union_id.into());
+                let mut repr = ReprOptions::default();
+                repr.align = data.repr.and_then(|r| r.align);
+                repr.pack = data.repr.and_then(|r| r.pack);
+                repr.int = data.repr.and_then(|r| r.int);
+
                 let mut repr_flags = ReprFlags::empty();
                 if flags.is_box {
                     repr_flags.insert(ReprFlags::IS_LINEAR);
                 }
-                if data_repr.is_some_and(|r| r.c()) {
+                if data.repr.is_some_and(|r| r.c()) {
                     repr_flags.insert(ReprFlags::IS_C);
                 }
-                if data_repr.is_some_and(|r| r.simd()) {
+                if data.repr.is_some_and(|r| r.simd()) {
                     repr_flags.insert(ReprFlags::IS_SIMD);
                 }
-                let repr = ReprOptions {
-                    align: data_repr.and_then(|r| r.align),
-                    pack: data_repr.and_then(|r| r.pack),
-                    int: data_repr.and_then(|r| r.int),
-                    flags: repr_flags,
-                    ..ReprOptions::default()
-                };
+                repr.flags = repr_flags;
 
                 (flags, variants, repr)
             }
@@ -543,26 +540,24 @@ impl AdtDef {
                     .map(|(idx, v)| (idx, VariantDef::Enum(v.0)))
                     .collect();
 
-                let data_repr = AttrFlags::repr(db, enum_id.into());
+                let data = db.enum_signature(enum_id);
+
+                let mut repr = ReprOptions::default();
+                repr.align = data.repr.and_then(|r| r.align);
+                repr.pack = data.repr.and_then(|r| r.pack);
+                repr.int = data.repr.and_then(|r| r.int);
 
                 let mut repr_flags = ReprFlags::empty();
                 if flags.is_box {
                     repr_flags.insert(ReprFlags::IS_LINEAR);
                 }
-                if data_repr.is_some_and(|r| r.c()) {
+                if data.repr.is_some_and(|r| r.c()) {
                     repr_flags.insert(ReprFlags::IS_C);
                 }
-                if data_repr.is_some_and(|r| r.simd()) {
+                if data.repr.is_some_and(|r| r.simd()) {
                     repr_flags.insert(ReprFlags::IS_SIMD);
                 }
-
-                let repr = ReprOptions {
-                    align: data_repr.and_then(|r| r.align),
-                    pack: data_repr.and_then(|r| r.pack),
-                    int: data_repr.and_then(|r| r.int),
-                    flags: repr_flags,
-                    ..ReprOptions::default()
-                };
+                repr.flags = repr_flags;
 
                 (flags, variants, repr)
             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
index 2bd675ba124e4..0a8ed2cf0cabd 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/target_feature.rs
@@ -1,35 +1,31 @@
 //! Stuff for handling `#[target_feature]` (needed for unsafe check).
 
-use std::borrow::Cow;
 use std::sync::LazyLock;
 
-use hir_def::FunctionId;
-use hir_def::attrs::AttrFlags;
-use intern::Symbol;
+use hir_def::attr::Attrs;
+use hir_def::tt;
+use intern::{Symbol, sym};
 use rustc_hash::{FxHashMap, FxHashSet};
 
-use crate::db::HirDatabase;
-
 #[derive(Debug, Default, Clone)]
-pub struct TargetFeatures<'db> {
-    pub(crate) enabled: Cow<'db, FxHashSet>,
+pub struct TargetFeatures {
+    pub(crate) enabled: FxHashSet,
 }
 
-impl<'db> TargetFeatures<'db> {
-    pub fn from_fn(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
-        let mut result = TargetFeatures::from_fn_no_implications(db, owner);
+impl TargetFeatures {
+    pub fn from_attrs(attrs: &Attrs) -> Self {
+        let mut result = TargetFeatures::from_attrs_no_implications(attrs);
         result.expand_implications();
         result
     }
 
     fn expand_implications(&mut self) {
         let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS);
-        let enabled = self.enabled.to_mut();
-        let mut queue = enabled.iter().cloned().collect::>();
+        let mut queue = self.enabled.iter().cloned().collect::>();
         while let Some(feature) = queue.pop() {
             if let Some(implications) = all_implications.get(&feature) {
                 for implication in implications {
-                    if enabled.insert(implication.clone()) {
+                    if self.enabled.insert(implication.clone()) {
                         queue.push(implication.clone());
                     }
                 }
@@ -38,9 +34,25 @@ impl<'db> TargetFeatures<'db> {
     }
 
     /// Retrieves the target features from the attributes, and does not expand the target features implied by them.
-    pub(crate) fn from_fn_no_implications(db: &'db dyn HirDatabase, owner: FunctionId) -> Self {
-        let enabled = AttrFlags::target_features(db, owner);
-        Self { enabled: Cow::Borrowed(enabled) }
+    pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
+        let enabled = attrs
+            .by_key(sym::target_feature)
+            .tt_values()
+            .filter_map(|tt| match tt.token_trees().flat_tokens() {
+                [
+                    tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
+                    tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
+                    tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+                        kind: tt::LitKind::Str,
+                        symbol: features,
+                        ..
+                    })),
+                ] if enable_ident.sym == sym::enable => Some(features),
+                _ => None,
+            })
+            .flat_map(|features| features.as_str().split(',').map(Symbol::intern))
+            .collect();
+        Self { enabled }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
index 50625c1c26d55..bc4701970c76c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -31,6 +31,7 @@ fn foo() -> i32 {
         &[("infer_shim", 1)],
         expect_test::expect![[r#"
             [
+                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -39,7 +40,7 @@ fn foo() -> i32 {
                 "infer_shim",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "body_shim",
                 "body_with_source_map_shim",
                 "trait_environment_shim",
@@ -78,7 +79,7 @@ fn foo() -> i32 {
                 "ast_id_map_shim",
                 "file_item_tree_query",
                 "real_span_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
@@ -117,6 +118,7 @@ fn baz() -> i32 {
         &[("infer_shim", 3)],
         expect_test::expect![[r#"
             [
+                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -125,7 +127,7 @@ fn baz() -> i32 {
                 "infer_shim",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "body_shim",
                 "body_with_source_map_shim",
                 "trait_environment_shim",
@@ -133,8 +135,8 @@ fn baz() -> i32 {
                 "expr_scopes_shim",
                 "lang_item",
                 "crate_lang_items",
-                "AttrFlags::query_",
-                "AttrFlags::query_",
+                "attrs_shim",
+                "attrs_shim",
                 "infer_shim",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
@@ -187,13 +189,13 @@ fn baz() -> i32 {
                 "ast_id_map_shim",
                 "file_item_tree_query",
                 "real_span_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
                 "body_shim",
-                "AttrFlags::query_",
-                "AttrFlags::query_",
+                "attrs_shim",
+                "attrs_shim",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
@@ -233,6 +235,7 @@ $0",
         &[("trait_impls_in_crate_shim", 1)],
         expect_test::expect![[r#"
             [
+                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -304,6 +307,7 @@ $0",
         &[("trait_impls_in_crate_shim", 1)],
         expect_test::expect![[r#"
             [
+                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -376,6 +380,7 @@ $0",
         &[("trait_impls_in_crate_shim", 1)],
         expect_test::expect![[r#"
             [
+                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -449,6 +454,7 @@ $0",
         &[("trait_impls_in_crate_shim", 1)],
         expect_test::expect![[r#"
             [
+                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -497,14 +503,14 @@ impl SomeStruct {
                 "real_span_map_shim",
                 "crate_local_def_map",
                 "trait_impls_in_crate_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "impl_trait_with_diagnostics_shim",
                 "impl_signature_shim",
                 "impl_signature_with_source_map_shim",
                 "impl_self_ty_with_diagnostics_shim",
                 "struct_signature_shim",
                 "struct_signature_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
             ]
         "#]],
     );
@@ -554,6 +560,7 @@ fn main() {
         &[("trait_solve_shim", 0)],
         expect_test::expect![[r#"
             [
+                "source_root_crates_shim",
                 "crate_local_def_map",
                 "file_item_tree_query",
                 "ast_id_map_shim",
@@ -562,22 +569,22 @@ fn main() {
                 "TraitItems::query_with_diagnostics_",
                 "body_shim",
                 "body_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "ImplItems::of_",
                 "infer_shim",
                 "trait_signature_shim",
                 "trait_signature_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "function_signature_shim",
                 "function_signature_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "body_shim",
                 "body_with_source_map_shim",
                 "trait_environment_shim",
                 "lang_item",
                 "crate_lang_items",
-                "AttrFlags::query_",
-                "AttrFlags::query_",
+                "attrs_shim",
+                "attrs_shim",
                 "generic_predicates_shim",
                 "return_type_impl_traits_shim",
                 "infer_shim",
@@ -659,22 +666,22 @@ fn main() {
                 "crate_local_def_map",
                 "TraitItems::query_with_diagnostics_",
                 "body_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "body_shim",
                 "ImplItems::of_",
                 "infer_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "trait_signature_with_source_map_shim",
-                "AttrFlags::query_",
+                "attrs_shim",
                 "function_signature_with_source_map_shim",
                 "function_signature_shim",
                 "body_with_source_map_shim",
                 "body_shim",
                 "trait_environment_shim",
                 "crate_lang_items",
-                "AttrFlags::query_",
-                "AttrFlags::query_",
-                "AttrFlags::query_",
+                "attrs_shim",
+                "attrs_shim",
+                "attrs_shim",
                 "generic_predicates_shim",
                 "return_type_impl_traits_shim",
                 "infer_shim",
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 41dc4dc533753..ca5e33fe6ad00 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -9,7 +9,6 @@ use base_db::{
 };
 use hir_def::{
     EnumId, EnumVariantId, FunctionId, Lookup, TraitId,
-    attrs::AttrFlags,
     db::DefDatabase,
     hir::generics::WherePredicate,
     lang_item::LangItem,
@@ -120,7 +119,7 @@ pub fn target_feature_is_safe_in_target(target: &TargetData) -> TargetFeatureIsS
 pub fn is_fn_unsafe_to_call(
     db: &dyn HirDatabase,
     func: FunctionId,
-    caller_target_features: &TargetFeatures<'_>,
+    caller_target_features: &TargetFeatures,
     call_edition: Edition,
     target_feature_is_safe: TargetFeatureIsSafeInTarget,
 ) -> Unsafety {
@@ -131,7 +130,8 @@ pub fn is_fn_unsafe_to_call(
 
     if data.has_target_feature() && target_feature_is_safe == TargetFeatureIsSafeInTarget::No {
         // RFC 2396 .
-        let callee_target_features = TargetFeatures::from_fn_no_implications(db, func);
+        let callee_target_features =
+            TargetFeatures::from_attrs_no_implications(&db.attrs(func.into()));
         if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) {
             return Unsafety::Unsafe;
         }
@@ -152,7 +152,7 @@ pub fn is_fn_unsafe_to_call(
             if is_intrinsic_block {
                 // legacy intrinsics
                 // extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
-                if AttrFlags::query(db, func.into()).contains(AttrFlags::RUSTC_SAFE_INTRINSIC) {
+                if db.attrs(func.into()).by_key(sym::rustc_safe_intrinsic).exists() {
                     Unsafety::Safe
                 } else {
                     Unsafety::Unsafe
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index 3376c51fe5c92..147f1b8653be8 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -2,12 +2,9 @@
 
 use std::ops::ControlFlow;
 
-use cfg::CfgExpr;
-use either::Either;
 use hir_def::{
-    AssocItemId, AttrDefId, FieldId, InternedModuleId, LifetimeParamId, ModuleDefId,
-    TypeOrConstParamId,
-    attrs::{AttrFlags, Docs, IsInnerDoc},
+    AssocItemId, AttrDefId, ModuleDefId,
+    attr::AttrsWithOwner,
     expr_store::path::Path,
     item_scope::ItemInNs,
     per_ns::Namespace,
@@ -18,7 +15,6 @@ use hir_expand::{
     name::Name,
 };
 use hir_ty::{db::HirDatabase, method_resolution};
-use intern::Symbol;
 
 use crate::{
     Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@@ -26,161 +22,28 @@ use crate::{
     Struct, Trait, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
 };
 
-#[derive(Debug, Clone, Copy)]
-pub enum AttrsOwner {
-    AttrDef(AttrDefId),
-    Field(FieldId),
-    LifetimeParam(LifetimeParamId),
-    TypeOrConstParam(TypeOrConstParamId),
-}
-
-impl AttrsOwner {
-    #[inline]
-    fn attr_def(&self) -> Option {
-        match self {
-            AttrsOwner::AttrDef(it) => Some(*it),
-            _ => None,
-        }
-    }
-}
-
-#[derive(Debug, Clone)]
-pub struct AttrsWithOwner {
-    pub(crate) attrs: AttrFlags,
-    owner: AttrsOwner,
-}
-
-impl AttrsWithOwner {
-    fn new(db: &dyn HirDatabase, owner: AttrDefId) -> Self {
-        Self { attrs: AttrFlags::query(db, owner), owner: AttrsOwner::AttrDef(owner) }
-    }
-
-    fn new_field(db: &dyn HirDatabase, owner: FieldId) -> Self {
-        Self { attrs: AttrFlags::query_field(db, owner), owner: AttrsOwner::Field(owner) }
-    }
-
-    fn new_lifetime_param(db: &dyn HirDatabase, owner: LifetimeParamId) -> Self {
-        Self {
-            attrs: AttrFlags::query_lifetime_param(db, owner),
-            owner: AttrsOwner::LifetimeParam(owner),
-        }
-    }
-    fn new_type_or_const_param(db: &dyn HirDatabase, owner: TypeOrConstParamId) -> Self {
-        Self {
-            attrs: AttrFlags::query_type_or_const_param(db, owner),
-            owner: AttrsOwner::TypeOrConstParam(owner),
-        }
-    }
-
-    #[inline]
-    pub fn is_unstable(&self) -> bool {
-        self.attrs.contains(AttrFlags::IS_UNSTABLE)
-    }
-
-    #[inline]
-    pub fn is_macro_export(&self) -> bool {
-        self.attrs.contains(AttrFlags::IS_MACRO_EXPORT)
-    }
-
-    #[inline]
-    pub fn is_doc_notable_trait(&self) -> bool {
-        self.attrs.contains(AttrFlags::IS_DOC_NOTABLE_TRAIT)
-    }
-
-    #[inline]
-    pub fn is_doc_hidden(&self) -> bool {
-        self.attrs.contains(AttrFlags::IS_DOC_HIDDEN)
-    }
-
-    #[inline]
-    pub fn is_deprecated(&self) -> bool {
-        self.attrs.contains(AttrFlags::IS_DEPRECATED)
-    }
-
-    #[inline]
-    pub fn is_non_exhaustive(&self) -> bool {
-        self.attrs.contains(AttrFlags::NON_EXHAUSTIVE)
-    }
-
-    #[inline]
-    pub fn is_test(&self) -> bool {
-        self.attrs.contains(AttrFlags::IS_TEST)
-    }
-
-    #[inline]
-    pub fn lang(&self, db: &dyn HirDatabase) -> Option<&'static str> {
-        self.owner
-            .attr_def()
-            .and_then(|owner| self.attrs.lang_item_with_attrs(db, owner))
-            .map(|lang| lang.name())
-    }
-
-    #[inline]
-    pub fn doc_aliases<'db>(&self, db: &'db dyn HirDatabase) -> &'db [Symbol] {
-        let owner = match self.owner {
-            AttrsOwner::AttrDef(it) => Either::Left(it),
-            AttrsOwner::Field(it) => Either::Right(it),
-            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return &[],
-        };
-        self.attrs.doc_aliases(db, owner)
-    }
-
-    #[inline]
-    pub fn cfgs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db CfgExpr> {
-        let owner = match self.owner {
-            AttrsOwner::AttrDef(it) => Either::Left(it),
-            AttrsOwner::Field(it) => Either::Right(it),
-            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
-        };
-        self.attrs.cfgs(db, owner)
-    }
-
-    #[inline]
-    pub fn hir_docs<'db>(&self, db: &'db dyn HirDatabase) -> Option<&'db Docs> {
-        match self.owner {
-            AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
-            AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
-            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
-        }
-    }
-}
-
-pub trait HasAttrs: Sized {
-    #[inline]
-    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
-        match self.attr_id(db) {
-            AttrsOwner::AttrDef(it) => AttrsWithOwner::new(db, it),
-            AttrsOwner::Field(it) => AttrsWithOwner::new_field(db, it),
-            AttrsOwner::LifetimeParam(it) => AttrsWithOwner::new_lifetime_param(db, it),
-            AttrsOwner::TypeOrConstParam(it) => AttrsWithOwner::new_type_or_const_param(db, it),
-        }
-    }
-
+pub trait HasAttrs {
+    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
     #[doc(hidden)]
-    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner;
-
-    #[inline]
-    fn hir_docs(self, db: &dyn HirDatabase) -> Option<&Docs> {
-        match self.attr_id(db) {
-            AttrsOwner::AttrDef(it) => AttrFlags::docs(db, it).as_deref(),
-            AttrsOwner::Field(it) => AttrFlags::field_docs(db, it),
-            AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => None,
-        }
-    }
+    fn attr_id(self) -> AttrDefId;
 }
 
 macro_rules! impl_has_attrs {
     ($(($def:ident, $def_id:ident),)*) => {$(
         impl HasAttrs for $def {
-            #[inline]
-            fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
-                AttrsOwner::AttrDef(AttrDefId::$def_id(self.into()))
+            fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+                let def = AttrDefId::$def_id(self.into());
+                AttrsWithOwner::new(db, def)
+            }
+            fn attr_id(self) -> AttrDefId {
+                AttrDefId::$def_id(self.into())
             }
         }
     )*};
 }
 
 impl_has_attrs![
+    (Field, FieldId),
     (Variant, EnumVariantId),
     (Static, StaticId),
     (Const, ConstId),
@@ -189,6 +52,8 @@ impl_has_attrs![
     (Macro, MacroId),
     (Function, FunctionId),
     (Adt, AdtId),
+    (Module, ModuleId),
+    (GenericParam, GenericParamId),
     (Impl, ImplId),
     (ExternCrateDecl, ExternCrateId),
 ];
@@ -196,9 +61,11 @@ impl_has_attrs![
 macro_rules! impl_has_attrs_enum {
     ($($variant:ident),* for $enum:ident) => {$(
         impl HasAttrs for $variant {
-            #[inline]
-            fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
-                $enum::$variant(self).attr_id(db)
+            fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+                $enum::$variant(self).attrs(db)
+            }
+            fn attr_id(self) -> AttrDefId {
+                $enum::$variant(self).attr_id()
             }
         }
     )*};
@@ -207,46 +74,30 @@ macro_rules! impl_has_attrs_enum {
 impl_has_attrs_enum![Struct, Union, Enum for Adt];
 impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
 
-impl HasAttrs for Module {
-    #[inline]
-    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
-        AttrsOwner::AttrDef(AttrDefId::ModuleId(InternedModuleId::new(db, self.id)))
-    }
-}
-
-impl HasAttrs for GenericParam {
-    #[inline]
-    fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
+impl HasAttrs for AssocItem {
+    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
         match self {
-            GenericParam::TypeParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
-            GenericParam::ConstParam(it) => AttrsOwner::TypeOrConstParam(it.merge().into()),
-            GenericParam::LifetimeParam(it) => AttrsOwner::LifetimeParam(it.into()),
+            AssocItem::Function(it) => it.attrs(db),
+            AssocItem::Const(it) => it.attrs(db),
+            AssocItem::TypeAlias(it) => it.attrs(db),
         }
     }
-}
-
-impl HasAttrs for AssocItem {
-    #[inline]
-    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
+    fn attr_id(self) -> AttrDefId {
         match self {
-            AssocItem::Function(it) => it.attr_id(db),
-            AssocItem::Const(it) => it.attr_id(db),
-            AssocItem::TypeAlias(it) => it.attr_id(db),
+            AssocItem::Function(it) => it.attr_id(),
+            AssocItem::Const(it) => it.attr_id(),
+            AssocItem::TypeAlias(it) => it.attr_id(),
         }
     }
 }
 
 impl HasAttrs for crate::Crate {
-    #[inline]
-    fn attr_id(self, db: &dyn HirDatabase) -> AttrsOwner {
-        self.root_module().attr_id(db)
+    fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+        let def = AttrDefId::ModuleId(self.root_module().id);
+        AttrsWithOwner::new(db, def)
     }
-}
-
-impl HasAttrs for Field {
-    #[inline]
-    fn attr_id(self, _db: &dyn HirDatabase) -> AttrsOwner {
-        AttrsOwner::Field(self.into())
+    fn attr_id(self) -> AttrDefId {
+        AttrDefId::ModuleId(self.root_module().id)
     }
 }
 
@@ -256,22 +107,21 @@ pub fn resolve_doc_path_on(
     def: impl HasAttrs + Copy,
     link: &str,
     ns: Option,
-    is_inner_doc: IsInnerDoc,
+    is_inner_doc: bool,
 ) -> Option {
-    resolve_doc_path_on_(db, link, def.attr_id(db), ns, is_inner_doc)
+    resolve_doc_path_on_(db, link, def.attr_id(), ns, is_inner_doc)
 }
 
 fn resolve_doc_path_on_(
     db: &dyn HirDatabase,
     link: &str,
-    attr_id: AttrsOwner,
+    attr_id: AttrDefId,
     ns: Option,
-    is_inner_doc: IsInnerDoc,
+    is_inner_doc: bool,
 ) -> Option {
     let resolver = match attr_id {
-        AttrsOwner::AttrDef(AttrDefId::ModuleId(it)) => {
-            let it = it.loc(db);
-            if is_inner_doc.yes() {
+        AttrDefId::ModuleId(it) => {
+            if is_inner_doc {
                 it.resolver(db)
             } else if let Some(parent) = Module::from(it).parent(db) {
                 parent.id.resolver(db)
@@ -279,20 +129,20 @@ fn resolve_doc_path_on_(
                 it.resolver(db)
             }
         }
-        AttrsOwner::AttrDef(AttrDefId::AdtId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::FunctionId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::EnumVariantId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::StaticId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::ConstId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::TraitId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::TypeAliasId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::ImplId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::ExternBlockId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::UseId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::MacroId(it)) => it.resolver(db),
-        AttrsOwner::AttrDef(AttrDefId::ExternCrateId(it)) => it.resolver(db),
-        AttrsOwner::Field(it) => it.parent.resolver(db),
-        AttrsOwner::LifetimeParam(_) | AttrsOwner::TypeOrConstParam(_) => return None,
+        AttrDefId::FieldId(it) => it.parent.resolver(db),
+        AttrDefId::AdtId(it) => it.resolver(db),
+        AttrDefId::FunctionId(it) => it.resolver(db),
+        AttrDefId::EnumVariantId(it) => it.resolver(db),
+        AttrDefId::StaticId(it) => it.resolver(db),
+        AttrDefId::ConstId(it) => it.resolver(db),
+        AttrDefId::TraitId(it) => it.resolver(db),
+        AttrDefId::TypeAliasId(it) => it.resolver(db),
+        AttrDefId::ImplId(it) => it.resolver(db),
+        AttrDefId::ExternBlockId(it) => it.resolver(db),
+        AttrDefId::UseId(it) => it.resolver(db),
+        AttrDefId::MacroId(it) => it.resolver(db),
+        AttrDefId::ExternCrateId(it) => it.resolver(db),
+        AttrDefId::GenericParamId(_) => return None,
     };
 
     let mut modpath = doc_modpath_from_str(link)?;
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 6ef6ea272e58c..a6d67e8fb4fb5 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -153,7 +153,8 @@ pub struct UnresolvedImport {
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct UnresolvedMacroCall {
-    pub range: InFile,
+    pub macro_call: InFile,
+    pub precise_location: Option,
     pub path: ModPath,
     pub is_bang: bool,
 }
@@ -184,7 +185,8 @@ pub struct InactiveCode {
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct MacroError {
-    pub range: InFile,
+    pub node: InFile,
+    pub precise_location: Option,
     pub message: String,
     pub error: bool,
     pub kind: &'static str,
@@ -192,7 +194,8 @@ pub struct MacroError {
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct MacroExpansionParseError {
-    pub range: InFile,
+    pub node: InFile,
+    pub precise_location: Option,
     pub errors: Arc<[SyntaxError]>,
 }
 
@@ -210,12 +213,12 @@ pub struct UnimplementedBuiltinMacro {
 
 #[derive(Debug)]
 pub struct InvalidDeriveTarget {
-    pub range: InFile,
+    pub node: InFile,
 }
 
 #[derive(Debug)]
 pub struct MalformedDerive {
-    pub range: InFile,
+    pub node: InFile,
 }
 
 #[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 1b24aad103b42..9418903123179 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -45,12 +45,11 @@ use arrayvec::ArrayVec;
 use base_db::{CrateDisplayName, CrateOrigin, LangCrateOrigin};
 use either::Either;
 use hir_def::{
-    AdtId, AssocItemId, AssocItemLoc, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
-    DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId,
-    GenericParamId, HasModule, ImplId, InternedModuleId, ItemContainerId, LifetimeParamId,
+    AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
+    CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
+    FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
     LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, SyntheticSyntax,
     TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
-    attrs::AttrFlags,
     expr_store::{ExpressionStoreDiagnostics, ExpressionStoreSourceMap},
     hir::{
         BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, LabelId, Pat,
@@ -64,12 +63,13 @@ use hir_def::{
     },
     per_ns::PerNs,
     resolver::{HasResolver, Resolver},
-    signatures::{EnumSignature, ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
+    signatures::{ImplFlags, StaticFlags, StructFlags, TraitFlags, VariantFields},
     src::HasSource as _,
     visibility::visibility_from_ast,
 };
 use hir_expand::{
-    AstId, MacroCallKind, RenderedExpandError, ValueResult, proc_macro::ProcMacroKind,
+    AstId, MacroCallKind, RenderedExpandError, ValueResult, attrs::collect_attrs,
+    proc_macro::ProcMacroKind,
 };
 use hir_ty::{
     TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, all_super_traits, autoderef,
@@ -98,8 +98,8 @@ use smallvec::SmallVec;
 use span::{AstIdNode, Edition, FileId};
 use stdx::{format_to, impl_from, never};
 use syntax::{
-    AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr,
-    ast::{self, HasName, HasVisibility as _},
+    AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, T, TextRange, ToSmolStr,
+    ast::{self, HasAttrs as _, HasName, HasVisibility as _},
     format_smolstr,
 };
 use triomphe::{Arc, ThinArc};
@@ -107,7 +107,7 @@ use triomphe::{Arc, ThinArc};
 use crate::db::{DefDatabase, HirDatabase};
 
 pub use crate::{
-    attrs::{AttrsWithOwner, HasAttrs, resolve_doc_path_on},
+    attrs::{HasAttrs, resolve_doc_path_on},
     diagnostics::*,
     has_source::HasSource,
     semantics::{
@@ -130,7 +130,7 @@ pub use {
     hir_def::{
         Complete,
         FindPathConfig,
-        attrs::{Docs, IsInnerDoc},
+        attr::{AttrSourceMap, Attrs, AttrsWithOwner},
         find_path::PrefixKind,
         import_map,
         lang_item::LangItem,
@@ -144,6 +144,7 @@ pub use {
     },
     hir_expand::{
         EditionedFileId, ExpandResult, HirFileId, MacroCallId, MacroKind,
+        attrs::{Attr, AttrId},
         change::ChangeWithProcMacros,
         files::{
             FilePosition, FilePositionWrapper, FileRange, FileRangeWrapper, HirFilePosition,
@@ -290,10 +291,11 @@ impl Crate {
     }
 
     /// Try to get the root URL of the documentation of a crate.
-    pub fn get_html_root_url(self, db: &dyn HirDatabase) -> Option {
+    pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option {
         // Look for #![doc(html_root_url = "...")]
-        let doc_url = AttrFlags::doc_html_root_url(db, self.id);
-        doc_url.as_ref().map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
+        let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
+        let doc_url = attrs.by_key(sym::doc).find_string_value_in_tt(sym::html_root_url);
+        doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
     }
 
     pub fn cfg<'db>(&self, db: &'db dyn HirDatabase) -> &'db CfgOptions {
@@ -638,7 +640,7 @@ impl Module {
                 // FIXME: This is accidentally quadratic.
                 continue;
             }
-            emit_def_diagnostic(db, acc, diag, edition, def_map.krate());
+            emit_def_diagnostic(db, acc, diag, edition);
         }
 
         if !self.id.is_block_module() {
@@ -657,9 +659,8 @@ impl Module {
                     acc.extend(def.diagnostics(db, style_lints))
                 }
                 ModuleDef::Trait(t) => {
-                    let krate = t.krate(db);
                     for diag in TraitItems::query_with_diagnostics(db, t.id).1.iter() {
-                        emit_def_diagnostic(db, acc, diag, edition, krate.id);
+                        emit_def_diagnostic(db, acc, diag, edition);
                     }
 
                     for item in t.items(db) {
@@ -777,7 +778,7 @@ impl Module {
             let ast_id_map = db.ast_id_map(file_id);
 
             for diag in impl_def.id.impl_items_with_diagnostics(db).1.iter() {
-                emit_def_diagnostic(db, acc, diag, edition, loc.container.krate());
+                emit_def_diagnostic(db, acc, diag, edition);
             }
 
             if inherent_impls.invalid_impls().contains(&impl_def.id) {
@@ -808,10 +809,21 @@ impl Module {
                     return None;
                 }
                 let parent = impl_def.id.into();
-                let (lifetimes_attrs, type_and_consts_attrs) =
-                    AttrFlags::query_generic_params(db, parent);
-                let res = lifetimes_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE))
-                    || type_and_consts_attrs.values().any(|it| it.contains(AttrFlags::MAY_DANGLE));
+                let generic_params = db.generic_params(parent);
+                let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| {
+                    GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
+                });
+                let type_params = generic_params
+                    .iter_type_or_consts()
+                    .filter(|(_, it)| it.type_param().is_some())
+                    .map(|(local_id, _)| {
+                        GenericParamId::TypeParamId(TypeParamId::from_unchecked(
+                            TypeOrConstParamId { parent, local_id },
+                        ))
+                    });
+                let res = type_params.chain(lifetime_params).any(|p| {
+                    db.attrs(AttrDefId::GenericParamId(p)).by_key(sym::may_dangle).exists()
+                });
                 Some(res)
             })()
             .unwrap_or(false);
@@ -972,17 +984,6 @@ impl Module {
     ) -> Option {
         hir_def::find_path::find_path(db, item.into().into(), self.into(), prefix_kind, true, cfg)
     }
-
-    #[inline]
-    pub fn doc_keyword(self, db: &dyn HirDatabase) -> Option {
-        AttrFlags::doc_keyword(db, InternedModuleId::new(db, self.id))
-    }
-
-    /// Whether it has `#[path = "..."]` attribute.
-    #[inline]
-    pub fn has_path(&self, db: &dyn HirDatabase) -> bool {
-        self.attrs(db).attrs.contains(AttrFlags::HAS_PATH)
-    }
 }
 
 fn macro_call_diagnostics<'db>(
@@ -997,19 +998,31 @@ fn macro_call_diagnostics<'db>(
     if let Some(err) = err {
         let loc = db.lookup_intern_macro_call(macro_call_id);
         let file_id = loc.kind.file_id();
-        let mut range = precise_macro_call_location(&loc.kind, db, loc.krate);
+        let node =
+            InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id()));
         let RenderedExpandError { message, error, kind } = err.render_to_string(db);
-        if Some(err.span().anchor.file_id) == file_id.file_id().map(|it| it.editioned_file_id(db)) {
-            range.value = err.span().range
-                + db.ast_id_map(file_id).get_erased(err.span().anchor.ast_id).text_range().start();
-        }
-        acc.push(MacroError { range, message, error, kind }.into());
+        let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id);
+        let precise_location = if editioned_file_id == file_id {
+            Some(
+                err.span().range
+                    + db.ast_id_map(editioned_file_id.into())
+                        .get_erased(err.span().anchor.ast_id)
+                        .text_range()
+                        .start(),
+            )
+        } else {
+            None
+        };
+        acc.push(MacroError { node, precise_location, message, error, kind }.into());
     }
 
     if !parse_errors.is_empty() {
         let loc = db.lookup_intern_macro_call(macro_call_id);
-        let range = precise_macro_call_location(&loc.kind, db, loc.krate);
-        acc.push(MacroExpansionParseError { range, errors: parse_errors.clone() }.into())
+        let (node, precise_location) = precise_macro_call_location(&loc.kind, db);
+        acc.push(
+            MacroExpansionParseError { node, precise_location, errors: parse_errors.clone() }
+                .into(),
+        )
     }
 }
 
@@ -1033,7 +1046,6 @@ fn emit_macro_def_diagnostics<'db>(
             acc,
             &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
             edition,
-            m.krate(db).id,
         );
     }
 }
@@ -1043,9 +1055,8 @@ fn emit_def_diagnostic<'db>(
     acc: &mut Vec>,
     diag: &DefDiagnostic,
     edition: Edition,
-    krate: base_db::Crate,
 ) {
-    emit_def_diagnostic_(db, acc, &diag.kind, edition, krate)
+    emit_def_diagnostic_(db, acc, &diag.kind, edition)
 }
 
 fn emit_def_diagnostic_<'db>(
@@ -1053,7 +1064,6 @@ fn emit_def_diagnostic_<'db>(
     acc: &mut Vec>,
     diag: &DefDiagnosticKind,
     edition: Edition,
-    krate: base_db::Crate,
 ) {
     match diag {
         DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
@@ -1076,7 +1086,8 @@ fn emit_def_diagnostic_<'db>(
             let RenderedExpandError { message, error, kind } = err.render_to_string(db);
             acc.push(
                 MacroError {
-                    range: InFile::new(ast.file_id, item.text_range()),
+                    node: InFile::new(ast.file_id, item.syntax_node_ptr()),
+                    precise_location: None,
                     message: format!("{}: {message}", path.display(db, edition)),
                     error,
                     kind,
@@ -1106,10 +1117,11 @@ fn emit_def_diagnostic_<'db>(
             );
         }
         DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
-            let location = precise_macro_call_location(ast, db, krate);
+            let (node, precise_location) = precise_macro_call_location(ast, db);
             acc.push(
                 UnresolvedMacroCall {
-                    range: location,
+                    macro_call: node,
+                    precise_location,
                     path: path.clone(),
                     is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
                 }
@@ -1128,12 +1140,34 @@ fn emit_def_diagnostic_<'db>(
             );
         }
         DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
-            let derive = id.find_attr_range(db, krate, *ast).3.path_range();
-            acc.push(InvalidDeriveTarget { range: ast.with_value(derive) }.into());
+            let node = ast.to_node(db);
+            let derive = node.attrs().nth(*id);
+            match derive {
+                Some(derive) => {
+                    acc.push(
+                        InvalidDeriveTarget {
+                            node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+                        }
+                        .into(),
+                    );
+                }
+                None => stdx::never!("derive diagnostic on item without derive attribute"),
+            }
         }
         DefDiagnosticKind::MalformedDerive { ast, id } => {
-            let derive = id.find_attr_range(db, krate, *ast).2;
-            acc.push(MalformedDerive { range: ast.with_value(derive) }.into());
+            let node = ast.to_node(db);
+            let derive = node.attrs().nth(*id);
+            match derive {
+                Some(derive) => {
+                    acc.push(
+                        MalformedDerive {
+                            node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+                        }
+                        .into(),
+                    );
+                }
+                None => stdx::never!("derive diagnostic on item without derive attribute"),
+            }
         }
         DefDiagnosticKind::MacroDefError { ast, message } => {
             let node = ast.to_node(db);
@@ -1152,28 +1186,61 @@ fn emit_def_diagnostic_<'db>(
 fn precise_macro_call_location(
     ast: &MacroCallKind,
     db: &dyn HirDatabase,
-    krate: base_db::Crate,
-) -> InFile {
+) -> (InFile, Option) {
     // FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
     // - e.g. the full attribute for macro errors, but only the name for name resolution
     match ast {
         MacroCallKind::FnLike { ast_id, .. } => {
             let node = ast_id.to_node(db);
-            let range = node
-                .path()
-                .and_then(|it| it.segment())
-                .and_then(|it| it.name_ref())
-                .map(|it| it.syntax().text_range());
-            let range = range.unwrap_or_else(|| node.syntax().text_range());
-            ast_id.with_value(range)
+            (
+                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+                node.path()
+                    .and_then(|it| it.segment())
+                    .and_then(|it| it.name_ref())
+                    .map(|it| it.syntax().text_range()),
+            )
         }
         MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
-            let range = derive_attr_index.find_derive_range(db, krate, *ast_id, *derive_index);
-            ast_id.with_value(range)
+            let node = ast_id.to_node(db);
+            // Compute the precise location of the macro name's token in the derive
+            // list.
+            let token = (|| {
+                let derive_attr = collect_attrs(&node)
+                    .nth(derive_attr_index.ast_index())
+                    .and_then(|x| Either::left(x.1))?;
+                let token_tree = derive_attr.meta()?.token_tree()?;
+                let chunk_by = token_tree
+                    .syntax()
+                    .children_with_tokens()
+                    .filter_map(|elem| match elem {
+                        syntax::NodeOrToken::Token(tok) => Some(tok),
+                        _ => None,
+                    })
+                    .chunk_by(|t| t.kind() == T![,]);
+                let (_, mut group) = chunk_by
+                    .into_iter()
+                    .filter(|&(comma, _)| !comma)
+                    .nth(*derive_index as usize)?;
+                group.find(|t| t.kind() == T![ident])
+            })();
+            (
+                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+                token.as_ref().map(|tok| tok.text_range()),
+            )
         }
-        MacroCallKind::Attr { ast_id, censored_attr_ids: attr_ids, .. } => {
-            let attr_range = attr_ids.invoc_attr().find_attr_range(db, krate, *ast_id).2;
-            ast_id.with_value(attr_range)
+        MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+            let node = ast_id.to_node(db);
+            let attr = collect_attrs(&node)
+                .nth(invoc_attr_index.ast_index())
+                .and_then(|x| Either::left(x.1))
+                .unwrap_or_else(|| {
+                    panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
+                });
+
+            (
+                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
+                Some(attr.syntax().text_range()),
+            )
         }
     }
 }
@@ -1371,7 +1438,7 @@ impl Struct {
     }
 
     pub fn repr(self, db: &dyn HirDatabase) -> Option {
-        AttrFlags::repr(db, self.id.into())
+        db.struct_signature(self.id).repr
     }
 
     pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
@@ -1387,7 +1454,7 @@ impl Struct {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
+        db.attrs(self.id.into()).is_unstable()
     }
 
     pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedStruct<'db> {
@@ -1476,7 +1543,7 @@ impl Union {
             .collect()
     }
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
+        db.attrs(self.id.into()).is_unstable()
     }
 }
 
@@ -1511,7 +1578,7 @@ impl Enum {
     }
 
     pub fn repr(self, db: &dyn HirDatabase) -> Option {
-        AttrFlags::repr(db, self.id.into())
+        db.enum_signature(self.id).repr
     }
 
     pub fn ty<'db>(self, db: &'db dyn HirDatabase) -> Type<'db> {
@@ -1527,7 +1594,7 @@ impl Enum {
         let interner = DbInterner::new_with(db, None, None);
         Type::new_for_crate(
             self.id.lookup(db).container.krate(),
-            match EnumSignature::variant_body_type(db, self.id) {
+            match db.enum_signature(self.id).variant_body_type() {
                 layout::IntegerType::Pointer(sign) => match sign {
                     true => Ty::new_int(interner, rustc_type_ir::IntTy::Isize),
                     false => Ty::new_uint(interner, rustc_type_ir::UintTy::Usize),
@@ -1568,7 +1635,7 @@ impl Enum {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
+        db.attrs(self.id.into()).is_unstable()
     }
 }
 
@@ -1669,7 +1736,7 @@ impl Variant {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
+        db.attrs(self.id.into()).is_unstable()
     }
 
     pub fn instantiate_infer<'db>(self, infer_ctxt: &InferCtxt<'db>) -> InstantiatedVariant<'db> {
@@ -2154,7 +2221,8 @@ fn expr_store_diagnostics<'db>(
                 InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into()
             }
             ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall {
-                range: node.map(|ptr| ptr.text_range()),
+                macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
+                precise_location: None,
                 path: path.clone(),
                 is_bang: true,
             }
@@ -2379,33 +2447,33 @@ impl Function {
 
     /// Does this function have `#[test]` attribute?
     pub fn is_test(self, db: &dyn HirDatabase) -> bool {
-        self.attrs(db).is_test()
+        db.attrs(self.id.into()).is_test()
     }
 
     /// is this a `fn main` or a function with an `export_name` of `main`?
     pub fn is_main(self, db: &dyn HirDatabase) -> bool {
-        self.exported_main(db)
+        db.attrs(self.id.into()).export_name() == Some(&sym::main)
             || self.module(db).is_crate_root() && db.function_signature(self.id).name == sym::main
     }
 
     /// Is this a function with an `export_name` of `main`?
     pub fn exported_main(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_EXPORT_NAME_MAIN)
+        db.attrs(self.id.into()).export_name() == Some(&sym::main)
     }
 
     /// Does this function have the ignore attribute?
     pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_IGNORE)
+        db.attrs(self.id.into()).is_ignore()
     }
 
     /// Does this function have `#[bench]` attribute?
     pub fn is_bench(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_BENCH)
+        db.attrs(self.id.into()).is_bench()
     }
 
     /// Is this function marked as unstable with `#[feature]` attribute?
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_UNSTABLE)
+        db.attrs(self.id.into()).is_unstable()
     }
 
     pub fn is_unsafe_to_call(
@@ -2416,7 +2484,8 @@ impl Function {
     ) -> bool {
         let (target_features, target_feature_is_safe_in_target) = caller
             .map(|caller| {
-                let target_features = hir_ty::TargetFeatures::from_fn(db, caller.id);
+                let target_features =
+                    hir_ty::TargetFeatures::from_attrs(&db.attrs(caller.id.into()));
                 let target_feature_is_safe_in_target =
                     match &caller.krate(db).id.workspace_data(db).target {
                         Ok(target) => hir_ty::target_feature_is_safe_in_target(target),
@@ -2447,6 +2516,14 @@ impl Function {
     }
 
     pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option {
+        let attrs = db.attrs(self.id.into());
+        // FIXME: Store this in FunctionData flags?
+        if !(attrs.is_proc_macro()
+            || attrs.is_proc_macro_attribute()
+            || attrs.is_proc_macro_derive())
+        {
+            return None;
+        }
         let def_map = crate_def_map(db, HasModule::krate(&self.id, db));
         def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
     }
@@ -2899,7 +2976,7 @@ impl Trait {
 
     /// `#[rust_analyzer::completions(...)]` mode.
     pub fn complete(self, db: &dyn HirDatabase) -> Complete {
-        Complete::extract(true, self.attrs(db).attrs)
+        Complete::extract(true, &self.attrs(db))
     }
 }
 
@@ -3070,10 +3147,10 @@ impl Macro {
                 let loc = id.lookup(db);
                 let source = loc.source(db);
                 match loc.kind {
-                    ProcMacroKind::CustomDerive => AttrFlags::derive_info(db, self.id).map_or_else(
-                        || as_name_opt(source.value.name()),
-                        |info| Name::new_symbol_root(info.trait_name.clone()),
-                    ),
+                    ProcMacroKind::CustomDerive => db
+                        .attrs(id.into())
+                        .parse_proc_macro_derive()
+                        .map_or_else(|| as_name_opt(source.value.name()), |(it, _)| it),
                     ProcMacroKind::Bang | ProcMacroKind::Attr => as_name_opt(source.value.name()),
                 }
             }
@@ -3081,7 +3158,7 @@ impl Macro {
     }
 
     pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
-        matches!(self.id, MacroId::MacroRulesId(_) if AttrFlags::query(db, self.id.into()).contains(AttrFlags::IS_MACRO_EXPORT))
+        matches!(self.id, MacroId::MacroRulesId(_) if db.attrs(self.id.into()).by_key(sym::macro_export).exists())
     }
 
     pub fn is_proc_macro(self) -> bool {
@@ -3905,10 +3982,18 @@ impl DeriveHelper {
     }
 
     pub fn name(&self, db: &dyn HirDatabase) -> Name {
-        AttrFlags::derive_info(db, self.derive)
-            .and_then(|it| it.helpers.get(self.idx as usize))
-            .map(|helper| Name::new_symbol_root(helper.clone()))
-            .unwrap_or_else(Name::missing)
+        match self.derive {
+            makro @ MacroId::Macro2Id(_) => db
+                .attrs(makro.into())
+                .parse_rustc_builtin_macro()
+                .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
+            MacroId::MacroRulesId(_) => None,
+            makro @ MacroId::ProcMacroId(_) => db
+                .attrs(makro.into())
+                .parse_proc_macro_derive()
+                .and_then(|(_, helpers)| helpers.get(self.idx as usize).cloned()),
+        }
+        .unwrap_or_else(Name::missing)
     }
 }
 
@@ -4129,7 +4214,7 @@ impl TypeParam {
     }
 
     pub fn is_unstable(self, db: &dyn HirDatabase) -> bool {
-        self.attrs(db).is_unstable()
+        db.attrs(GenericParamId::from(self.id).into()).is_unstable()
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 8eb1c9725cd2a..62ce3daab75df 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -21,6 +21,7 @@ use hir_def::{
 };
 use hir_expand::{
     EditionedFileId, ExpandResult, FileRange, HirFileId, InMacroFile, MacroCallId,
+    attrs::collect_attrs,
     builtin::{BuiltinFnLikeExpander, EagerExpander},
     db::ExpandDatabase,
     files::{FileRangeWrapper, HirFileRange, InRealFile},
@@ -35,7 +36,7 @@ use intern::{Interned, Symbol, sym};
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::{SmallVec, smallvec};
-use span::{FileId, SyntaxContext};
+use span::{Edition, FileId, SyntaxContext};
 use stdx::{TupleExt, always};
 use syntax::{
     AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
@@ -385,14 +386,17 @@ impl<'db> SemanticsImpl<'db> {
     }
 
     pub fn attach_first_edition(&self, file: FileId) -> Option {
-        let krate = self.file_to_module_defs(file).next()?.krate();
-        Some(EditionedFileId::new(self.db, file, krate.edition(self.db), krate.id))
+        Some(EditionedFileId::new(
+            self.db,
+            file,
+            self.file_to_module_defs(file).next()?.krate().edition(self.db),
+        ))
     }
 
     pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
         let file_id = self
             .attach_first_edition(file_id)
-            .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(self.db, file_id));
+            .unwrap_or_else(|| EditionedFileId::new(self.db, file_id, Edition::CURRENT));
 
         let tree = self.db.parse(file_id).tree();
         self.cache(tree.syntax().clone(), file_id.into());
@@ -1193,34 +1197,33 @@ impl<'db> SemanticsImpl<'db> {
                                     .zip(Some(item))
                             })
                             .map(|(call_id, item)| {
-                                let item_range = item.syntax().text_range();
-                                let loc = db.lookup_intern_macro_call(call_id);
-                                let text_range = match loc.kind {
+                                let attr_id = match db.lookup_intern_macro_call(call_id).kind {
                                     hir_expand::MacroCallKind::Attr {
-                                        censored_attr_ids: attr_ids,
-                                        ..
-                                    } => {
-                                        // FIXME: here, the attribute's text range is used to strip away all
-                                        // entries from the start of the attribute "list" up the invoking
-                                        // attribute. But in
-                                        // ```
-                                        // mod foo {
-                                        //     #![inner]
-                                        // }
-                                        // ```
-                                        // we don't wanna strip away stuff in the `mod foo {` range, that is
-                                        // here if the id corresponds to an inner attribute we got strip all
-                                        // text ranges of the outer ones, and then all of the inner ones up
-                                        // to the invoking attribute so that the inbetween is ignored.
-                                        // FIXME: Should cfg_attr be handled differently?
-                                        let (attr, _, _, _) = attr_ids
-                                            .invoc_attr()
-                                            .find_attr_range_with_source(db, loc.krate, &item);
-                                        let start = attr.syntax().text_range().start();
-                                        TextRange::new(start, item_range.end())
-                                    }
-                                    _ => item_range,
+                                        invoc_attr_index, ..
+                                    } => invoc_attr_index.ast_index(),
+                                    _ => 0,
                                 };
+                                // FIXME: here, the attribute's text range is used to strip away all
+                                // entries from the start of the attribute "list" up the invoking
+                                // attribute. But in
+                                // ```
+                                // mod foo {
+                                //     #![inner]
+                                // }
+                                // ```
+                                // we don't wanna strip away stuff in the `mod foo {` range, that is
+                                // here if the id corresponds to an inner attribute we got strip all
+                                // text ranges of the outer ones, and then all of the inner ones up
+                                // to the invoking attribute so that the inbetween is ignored.
+                                let text_range = item.syntax().text_range();
+                                let start = collect_attrs(&item)
+                                    .nth(attr_id)
+                                    .map(|attr| match attr.1 {
+                                        Either::Left(it) => it.syntax().text_range().start(),
+                                        Either::Right(it) => it.syntax().text_range().start(),
+                                    })
+                                    .unwrap_or_else(|| text_range.start());
+                                let text_range = TextRange::new(start, text_range.end());
                                 filter_duplicates(tokens, text_range);
                                 process_expansion_for_token(ctx, &mut stack, call_id)
                             })
@@ -1470,14 +1473,6 @@ impl<'db> SemanticsImpl<'db> {
         FileRangeWrapper { file_id: file_id.file_id(self.db), range }
     }
 
-    pub fn diagnostics_display_range_for_range(
-        &self,
-        src: InFile,
-    ) -> FileRangeWrapper {
-        let FileRange { file_id, range } = src.original_node_file_range_rooted(self.db);
-        FileRangeWrapper { file_id: file_id.file_id(self.db), range }
-    }
-
     fn token_ancestors_with_macros(
         &self,
         token: SyntaxToken,
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
index 165ac7e4a08d3..5019a5987e513 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
@@ -5,7 +5,7 @@
 //! node for a *child*, and get its hir.
 
 use either::Either;
-use hir_expand::HirFileId;
+use hir_expand::{HirFileId, attrs::collect_attrs};
 use span::AstIdNode;
 use syntax::{AstPtr, ast};
 
@@ -94,7 +94,6 @@ impl ChildBySource for ModuleId {
 
 impl ChildBySource for ItemScope {
     fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
-        let krate = file_id.krate(db);
         self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
         self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL));
         self.extern_blocks().for_each(|extern_block| {
@@ -124,10 +123,12 @@ impl ChildBySource for ItemScope {
             |(ast_id, calls)| {
                 let adt = ast_id.to_node(db);
                 calls.for_each(|(attr_id, call_id, calls)| {
-                    // FIXME: Fix cfg_attr handling.
-                    let (attr, _, _, _) = attr_id.find_attr_range_with_source(db, krate, &adt);
-                    res[keys::DERIVE_MACRO_CALL]
-                        .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
+                    if let Some((_, Either::Left(attr))) =
+                        collect_attrs(&adt).nth(attr_id.ast_index())
+                    {
+                        res[keys::DERIVE_MACRO_CALL]
+                            .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
+                    }
                 });
             },
         );
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index 9059c88ad66a1..d8c624e5c6896 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -392,12 +392,12 @@ impl<'a> SymbolCollector<'a> {
         let mut do_not_complete = Complete::Yes;
 
         if let Some(attrs) = def.attrs(self.db) {
-            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
+            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
             if let Some(trait_do_not_complete) = trait_do_not_complete {
                 do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete);
             }
 
-            for alias in attrs.doc_aliases(self.db) {
+            for alias in attrs.doc_aliases() {
                 self.symbols.insert(FileSymbol {
                     name: alias.clone(),
                     def,
@@ -441,9 +441,9 @@ impl<'a> SymbolCollector<'a> {
 
         let mut do_not_complete = Complete::Yes;
         if let Some(attrs) = def.attrs(self.db) {
-            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), attrs.attrs);
+            do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
 
-            for alias in attrs.doc_aliases(self.db) {
+            for alias in attrs.doc_aliases() {
                 self.symbols.insert(FileSymbol {
                     name: alias.clone(),
                     def,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index e06c534e3c51f..7843ab9e8f25b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -1,7 +1,7 @@
 use std::iter::{self, Peekable};
 
 use either::Either;
-use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics};
+use hir::{Adt, AsAssocItem, Crate, FindPathConfig, HasAttrs, ModuleDef, Semantics, sym};
 use ide_db::RootDatabase;
 use ide_db::assists::ExprFillDefaultMode;
 use ide_db::syntax_helpers::suggest_name;
@@ -401,7 +401,7 @@ impl ExtendedVariant {
     fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool {
         match self {
             ExtendedVariant::Variant { variant: var, .. } => {
-                var.attrs(db).is_doc_hidden() && var.module(db).krate() != krate
+                var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate
             }
             _ => false,
         }
@@ -424,7 +424,7 @@ impl ExtendedEnum {
     fn is_non_exhaustive(&self, db: &RootDatabase, krate: Crate) -> bool {
         match self {
             ExtendedEnum::Enum { enum_: e, .. } => {
-                e.attrs(db).is_non_exhaustive() && e.module(db).krate() != krate
+                e.attrs(db).by_key(sym::non_exhaustive).exists() && e.module(db).krate() != krate
             }
             _ => false,
         }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
index 46f210804da32..8b24d33bf9965 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs
@@ -1,4 +1,4 @@
-use hir::HasVisibility;
+use hir::{HasVisibility, sym};
 use ide_db::{
     FxHashMap, FxHashSet,
     assists::AssistId,
@@ -93,7 +93,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option) ->
                 let mut buf = String::from("./");
                 let db = ctx.db();
                 match parent_module.name(db) {
-                    Some(name) if !parent_module.is_mod_rs(db) && !parent_module.has_path(db) => {
+                    Some(name)
+                        if !parent_module.is_mod_rs(db)
+                            && parent_module
+                                .attrs(db)
+                                .by_key(sym::path)
+                                .string_value_unescape()
+                                .is_none() =>
+                    {
                         format_to!(buf, "{}/", name.as_str())
                     }
                     _ => (),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index eb7553222a688..2977f8b8c2e75 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -68,7 +68,7 @@ pub mod utils;
 
 use hir::Semantics;
 use ide_db::{EditionedFileId, RootDatabase};
-use syntax::TextRange;
+use syntax::{Edition, TextRange};
 
 pub(crate) use crate::assist_context::{AssistContext, Assists};
 
@@ -90,7 +90,7 @@ pub fn assists(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(range.file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, range.file_id));
+        .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT));
     let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
     let mut acc = Assists::new(&ctx, resolve);
     handlers::all().iter().for_each(|handler| {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index 2e220b129fe13..ade60691b57bc 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -321,13 +321,11 @@ fn check_with_config(
     let _tracing = setup_tracing();
     let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
     db.enable_proc_attr_macros();
-    let sema = Semantics::new(&db);
-    let file_with_caret_id =
-        sema.attach_first_edition(file_with_caret_id.file_id(&db)).unwrap_or(file_with_caret_id);
     let text_without_caret = db.file_text(file_with_caret_id.file_id(&db)).text(&db).to_string();
 
     let frange = hir::FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
 
+    let sema = Semantics::new(&db);
     let ctx = AssistContext::new(sema, &config, frange);
     let resolve = match expected {
         ExpectedResult::Unresolved => AssistResolveStrategy::None,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index 7a86339c1c9c3..5a3c5a39dac79 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -93,7 +93,16 @@ pub fn test_related_attribute_syn(fn_def: &ast::Fn) -> Option {
 }
 
 pub fn has_test_related_attribute(attrs: &hir::AttrsWithOwner) -> bool {
-    attrs.is_test()
+    attrs.iter().any(|attr| {
+        let path = attr.path();
+        (|| {
+            Some(
+                path.segments().first()?.as_str().starts_with("test")
+                    || path.segments().last()?.as_str().ends_with("test"),
+            )
+        })()
+        .unwrap_or_default()
+    })
 }
 
 #[derive(Clone, Copy, PartialEq)]
@@ -119,7 +128,7 @@ pub fn filter_assoc_items(
         .copied()
         .filter(|assoc_item| {
             if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
-                && assoc_item.attrs(sema.db).is_doc_hidden()
+                && assoc_item.attrs(sema.db).has_doc_hidden()
             {
                 if let hir::AssocItem::Function(f) = assoc_item
                     && !f.has_body(sema.db)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
index df577b8ed02eb..c87c46d98127b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs
@@ -56,7 +56,7 @@ pub(super) fn complete_lint(
         };
         let mut item =
             CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition);
-        item.documentation(Documentation::new_owned(description.to_owned()));
+        item.documentation(Documentation::new(description.to_owned()));
         item.add_to(acc, ctx.db)
     }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
index 20d01485a45a2..d1e05a4359f19 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -266,7 +266,7 @@ fn import_on_the_fly(
             let original_item = &import.original_item;
             !ctx.is_item_hidden(&import.item_to_import)
                 && !ctx.is_item_hidden(original_item)
-                && ctx.check_stability(original_item.attrs(ctx.db).as_ref())
+                && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
         })
         .filter(|import| filter_excluded_flyimport(ctx, import))
         .sorted_by(|a, b| {
@@ -313,7 +313,7 @@ fn import_on_the_fly_pat_(
             let original_item = &import.original_item;
             !ctx.is_item_hidden(&import.item_to_import)
                 && !ctx.is_item_hidden(original_item)
-                && ctx.check_stability(original_item.attrs(ctx.db).as_ref())
+                && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
         })
         .sorted_by(|a, b| {
             let key = |import_path| {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
index 4474d6181c209..73cbe3f0aaab9 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -440,7 +440,7 @@ fn add_custom_postfix_completions(
             let body = snippet.postfix_snippet(receiver_text);
             let mut builder =
                 postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
-            builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
+            builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
             for import in imports.into_iter() {
                 builder.add_import(import);
             }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
index 04450aea75bf7..ead9852eff53c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs
@@ -139,7 +139,7 @@ fn add_custom_completions(
             };
             let body = snip.snippet();
             let mut builder = snippet(ctx, cap, trigger, &body);
-            builder.documentation(Documentation::new_owned(format!("```rust\n{body}\n```")));
+            builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
             for import in imports.into_iter() {
                 builder.add_import(import);
             }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index c95b83ef8a027..fc2cc3b796ec9 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -558,7 +558,7 @@ impl CompletionContext<'_> {
         I: hir::HasAttrs + Copy,
     {
         let attrs = item.attrs(self.db);
-        attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
+        attrs.doc_aliases().map(|it| it.as_str().into()).collect()
     }
 
     /// Check if an item is `#[doc(hidden)]`.
@@ -572,7 +572,7 @@ impl CompletionContext<'_> {
     }
 
     /// Checks whether this item should be listed in regards to stability. Returns `true` if we should.
-    pub(crate) fn check_stability(&self, attrs: Option<&hir::AttrsWithOwner>) -> bool {
+    pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool {
         let Some(attrs) = attrs else {
             return true;
         };
@@ -590,15 +590,15 @@ impl CompletionContext<'_> {
 
     /// Whether the given trait is an operator trait or not.
     pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
-        match trait_.attrs(self.db).lang(self.db) {
-            Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang),
+        match trait_.attrs(self.db).lang() {
+            Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
             None => false,
         }
     }
 
     /// Whether the given trait has `#[doc(notable_trait)]`
     pub(crate) fn is_doc_notable_trait(&self, trait_: hir::Trait) -> bool {
-        trait_.attrs(self.db).is_doc_notable_trait()
+        trait_.attrs(self.db).has_doc_notable_trait()
     }
 
     /// Returns the traits in scope, with the [`Drop`] trait removed.
@@ -662,7 +662,7 @@ impl CompletionContext<'_> {
     fn is_visible_impl(
         &self,
         vis: &hir::Visibility,
-        attrs: &hir::AttrsWithOwner,
+        attrs: &hir::Attrs,
         defining_crate: hir::Crate,
     ) -> Visible {
         if !self.check_stability(Some(attrs)) {
@@ -684,18 +684,14 @@ impl CompletionContext<'_> {
         if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes }
     }
 
-    pub(crate) fn is_doc_hidden(
-        &self,
-        attrs: &hir::AttrsWithOwner,
-        defining_crate: hir::Crate,
-    ) -> bool {
+    pub(crate) fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
         // `doc(hidden)` items are only completed within the defining crate.
-        self.krate != defining_crate && attrs.is_doc_hidden()
+        self.krate != defining_crate && attrs.has_doc_hidden()
     }
 
     pub(crate) fn doc_aliases_in_scope(&self, scope_def: ScopeDef) -> Vec {
         if let Some(attrs) = scope_def.attrs(self.db) {
-            attrs.doc_aliases(self.db).iter().map(|it| it.as_str().into()).collect()
+            attrs.doc_aliases().map(|it| it.as_str().into()).collect()
         } else {
             vec![]
         }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index c526c7f070bff..303c71230d606 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -57,8 +57,7 @@ pub struct CompletionItem {
 
     /// Additional info to show in the UI pop up.
     pub detail: Option,
-    // FIXME: Make this with `'db` lifetime.
-    pub documentation: Option>,
+    pub documentation: Option,
 
     /// Whether this item is marked as deprecated
     pub deprecated: bool,
@@ -489,8 +488,7 @@ pub(crate) struct Builder {
     insert_text: Option,
     is_snippet: bool,
     detail: Option,
-    // FIXME: Make this with `'db` lifetime.
-    documentation: Option>,
+    documentation: Option,
     lookup: Option,
     kind: CompletionItemKind,
     text_edit: Option,
@@ -646,11 +644,11 @@ impl Builder {
         self
     }
     #[allow(unused)]
-    pub(crate) fn documentation(&mut self, docs: Documentation<'_>) -> &mut Builder {
+    pub(crate) fn documentation(&mut self, docs: Documentation) -> &mut Builder {
         self.set_documentation(Some(docs))
     }
-    pub(crate) fn set_documentation(&mut self, docs: Option>) -> &mut Builder {
-        self.documentation = docs.map(Documentation::into_owned);
+    pub(crate) fn set_documentation(&mut self, docs: Option) -> &mut Builder {
+        self.documentation = docs;
         self
     }
     pub(crate) fn set_deprecated(&mut self, deprecated: bool) -> &mut Builder {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index 77a2a3a3a9a02..094e679501fc2 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -10,7 +10,7 @@ pub(crate) mod type_alias;
 pub(crate) mod union_literal;
 pub(crate) mod variant;
 
-use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type, sym};
 use ide_db::text_edit::TextEdit;
 use ide_db::{
     RootDatabase, SnippetCap, SymbolKind,
@@ -91,7 +91,8 @@ impl<'a> RenderContext<'a> {
     }
 
     fn is_deprecated(&self, def: impl HasAttrs) -> bool {
-        def.attrs(self.db()).is_deprecated()
+        let attrs = def.attrs(self.db());
+        attrs.by_key(sym::deprecated).exists()
     }
 
     fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
@@ -114,7 +115,7 @@ impl<'a> RenderContext<'a> {
     }
 
     // FIXME: remove this
-    fn docs(&self, def: impl HasDocs) -> Option> {
+    fn docs(&self, def: impl HasDocs) -> Option {
         def.docs(self.db())
     }
 }
@@ -320,9 +321,7 @@ pub(crate) fn render_expr(
     );
     let edit = TextEdit::replace(source_range, snippet);
     item.snippet_edit(ctx.config.snippet_cap?, edit);
-    item.documentation(Documentation::new_owned(String::from(
-        "Autogenerated expression by term search",
-    )));
+    item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
     item.set_relevance(crate::CompletionRelevance {
         type_match: compute_type_match(ctx, &expr.ty(ctx.db)),
         ..Default::default()
@@ -555,7 +554,7 @@ fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
     }
 }
 
-fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option> {
+fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option {
     use hir::ModuleDef::*;
     match resolution {
         ScopeDef::ModuleDef(Module(it)) => it.docs(db),
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
index 8b14f05b72b2e..6c89e49f94e8b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
@@ -180,7 +180,7 @@ impl Variant {
         }
     }
 
-    fn docs(self, db: &dyn HirDatabase) -> Option> {
+    fn docs(self, db: &dyn HirDatabase) -> Option {
         match self {
             Variant::Struct(it) => it.docs(db),
             Variant::EnumVariant(it) => it.docs(db),
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
index 60474a31b4d3e..312d3bd426f90 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs
@@ -108,7 +108,7 @@ fn build_completion(
     label: SmolStr,
     lookup: SmolStr,
     pat: String,
-    def: impl HasDocs,
+    def: impl HasDocs + Copy,
     adt_ty: hir::Type<'_>,
     // Missing in context of match statement completions
     is_variant_missing: bool,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
index cfd6340f1eeab..37d0fa18c4972 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs
@@ -1,7 +1,7 @@
 //! Code common to structs, unions, and enum variants.
 
 use crate::context::CompletionContext;
-use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind};
+use hir::{HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind, sym};
 use ide_db::SnippetCap;
 use itertools::Itertools;
 use syntax::SmolStr;
@@ -105,8 +105,8 @@ pub(crate) fn visible_fields(
         .copied()
         .collect::>();
     let has_invisible_field = n_fields - fields.len() > 0;
-    let is_foreign_non_exhaustive =
-        item.attrs(ctx.db).is_non_exhaustive() && item.krate(ctx.db) != module.krate();
+    let is_foreign_non_exhaustive = item.attrs(ctx.db).by_key(sym::non_exhaustive).exists()
+        && item.krate(ctx.db) != module.krate();
     let fields_omitted = has_invisible_field || is_foreign_non_exhaustive;
     Some((fields, fields_omitted))
 }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index 36d739455030d..b32a895457268 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -160,12 +160,12 @@ pub(crate) fn position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (RootDatabase, FilePosition) {
     let mut database = RootDatabase::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&database, ra_fixture);
     database.enable_proc_attr_macros();
     database.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
-    let position = FilePosition { file_id: file_id.file_id(), offset };
+    let position = FilePosition { file_id: file_id.file_id(&database), offset };
     (database, position)
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index 9ce85b2bf3304..c051fd863de6f 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -5,10 +5,8 @@
 
 // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
 
-use std::borrow::Cow;
-
 use crate::RootDatabase;
-use crate::documentation::{Documentation, HasDocs};
+use crate::documentation::{DocsRangeMap, Documentation, HasDocs};
 use crate::famous_defs::FamousDefs;
 use arrayvec::ArrayVec;
 use either::Either;
@@ -23,7 +21,7 @@ use hir::{
 use span::Edition;
 use stdx::{format_to, impl_from};
 use syntax::{
-    SyntaxKind, SyntaxNode, SyntaxToken,
+    SyntaxKind, SyntaxNode, SyntaxToken, TextSize,
     ast::{self, AstNode},
     match_ast,
 };
@@ -201,25 +199,21 @@ impl Definition {
         Some(name)
     }
 
-    pub fn docs<'db>(
+    pub fn docs(
         &self,
-        db: &'db RootDatabase,
+        db: &RootDatabase,
         famous_defs: Option<&FamousDefs<'_, '_>>,
         display_target: DisplayTarget,
-    ) -> Option> {
-        self.docs_with_rangemap(db, famous_defs, display_target).map(|docs| match docs {
-            Either::Left(Cow::Borrowed(docs)) => Documentation::new_borrowed(docs.docs()),
-            Either::Left(Cow::Owned(docs)) => Documentation::new_owned(docs.into_docs()),
-            Either::Right(docs) => docs,
-        })
+    ) -> Option {
+        self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs)
     }
 
-    pub fn docs_with_rangemap<'db>(
+    pub fn docs_with_rangemap(
         &self,
-        db: &'db RootDatabase,
+        db: &RootDatabase,
         famous_defs: Option<&FamousDefs<'_, '_>>,
         display_target: DisplayTarget,
-    ) -> Option, Documentation<'db>>> {
+    ) -> Option<(Documentation, Option)> {
         let docs = match self {
             Definition::Macro(it) => it.docs_with_rangemap(db),
             Definition::Field(it) => it.docs_with_rangemap(db),
@@ -235,13 +229,15 @@ impl Definition {
                 it.docs_with_rangemap(db).or_else(|| {
                     // docs are missing, try to fall back to the docs of the aliased item.
                     let adt = it.ty(db).as_adt()?;
-                    let mut docs = adt.docs_with_rangemap(db)?.into_owned();
+                    let (docs, range_map) = adt.docs_with_rangemap(db)?;
                     let header_docs = format!(
                         "*This is the documentation for* `{}`\n\n",
                         adt.display(db, display_target)
                     );
-                    docs.prepend_str(&header_docs);
-                    Some(Cow::Owned(docs))
+                    let offset = TextSize::new(header_docs.len() as u32);
+                    let range_map = range_map.shift_docstring_line_range(offset);
+                    let docs = header_docs + docs.as_str();
+                    Some((Documentation::new(docs), range_map))
                 })
             }
             Definition::BuiltinType(it) => {
@@ -250,7 +246,7 @@ impl Definition {
                     let primitive_mod =
                         format!("prim_{}", it.name().display(fd.0.db, display_target.edition));
                     let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?;
-                    doc_owner.docs_with_rangemap(db)
+                    doc_owner.docs_with_rangemap(fd.0.db)
                 })
             }
             Definition::BuiltinLifetime(StaticLifetime) => None,
@@ -286,7 +282,7 @@ impl Definition {
                     );
                 }
 
-                return Some(Either::Right(Documentation::new_owned(docs.replace('*', "\\*"))));
+                return Some((Documentation::new(docs.replace('*', "\\*")), None));
             }
             Definition::ToolModule(_) => None,
             Definition::DeriveHelper(_) => None,
@@ -303,7 +299,7 @@ impl Definition {
             let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
             item.docs_with_rangemap(db)
         })
-        .map(Either::Left)
+        .map(|(docs, range_map)| (docs, Some(range_map)))
     }
 
     pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
index 4c4691cca2ca1..cab19aadfd010 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs
@@ -1,100 +1,337 @@
 //! Documentation attribute related utilities.
-use std::borrow::Cow;
-
-use hir::{HasAttrs, db::HirDatabase, resolve_doc_path_on};
+use either::Either;
+use hir::{
+    AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile,
+    db::{DefDatabase, HirDatabase},
+    resolve_doc_path_on, sym,
+};
+use itertools::Itertools;
+use span::{TextRange, TextSize};
+use syntax::{
+    AstToken,
+    ast::{self, IsString},
+};
 
 /// Holds documentation
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Documentation<'db>(Cow<'db, str>);
-
-impl<'db> Documentation<'db> {
-    #[inline]
-    pub fn new_owned(s: String) -> Self {
-        Documentation(Cow::Owned(s))
-    }
+pub struct Documentation(String);
 
-    #[inline]
-    pub fn new_borrowed(s: &'db str) -> Self {
-        Documentation(Cow::Borrowed(s))
+impl Documentation {
+    pub fn new(s: String) -> Self {
+        Documentation(s)
     }
 
-    #[inline]
-    pub fn into_owned(self) -> Documentation<'static> {
-        Documentation::new_owned(self.0.into_owned())
-    }
-
-    #[inline]
     pub fn as_str(&self) -> &str {
         &self.0
     }
 }
 
-pub trait HasDocs: HasAttrs + Copy {
-    fn docs(self, db: &dyn HirDatabase) -> Option> {
-        let docs = match self.docs_with_rangemap(db)? {
-            Cow::Borrowed(docs) => Documentation::new_borrowed(docs.docs()),
-            Cow::Owned(docs) => Documentation::new_owned(docs.into_docs()),
-        };
-        Some(docs)
-    }
-    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option> {
-        self.hir_docs(db).map(Cow::Borrowed)
+impl From for String {
+    fn from(Documentation(string): Documentation) -> Self {
+        string
     }
+}
+
+pub trait HasDocs: HasAttrs {
+    fn docs(self, db: &dyn HirDatabase) -> Option;
+    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>;
     fn resolve_doc_path(
         self,
         db: &dyn HirDatabase,
         link: &str,
         ns: Option,
-        is_inner_doc: hir::IsInnerDoc,
-    ) -> Option {
-        resolve_doc_path_on(db, self, link, ns, is_inner_doc)
+        is_inner_doc: bool,
+    ) -> Option;
+}
+/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
+#[derive(Debug)]
+pub struct DocsRangeMap {
+    source_map: AttrSourceMap,
+    // (docstring-line-range, attr_index, attr-string-range)
+    // a mapping from the text range of a line of the [`Documentation`] to the attribute index and
+    // the original (untrimmed) syntax doc line
+    mapping: Vec<(TextRange, AttrId, TextRange)>,
+}
+
+impl DocsRangeMap {
+    /// Maps a [`TextRange`] relative to the documentation string back to its AST range
+    pub fn map(&self, range: TextRange) -> Option<(InFile, AttrId)> {
+        let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
+        let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
+        if !line_docs_range.contains_range(range) {
+            return None;
+        }
+
+        let relative_range = range - line_docs_range.start();
+
+        let InFile { file_id, value: source } = self.source_map.source_of_id(idx);
+        match source {
+            Either::Left(attr) => {
+                let string = get_doc_string_in_attr(attr)?;
+                let text_range = string.open_quote_text_range()?;
+                let range = TextRange::at(
+                    text_range.end() + original_line_src_range.start() + relative_range.start(),
+                    string.syntax().text_range().len().min(range.len()),
+                );
+                Some((InFile { file_id, value: range }, idx))
+            }
+            Either::Right(comment) => {
+                let text_range = comment.syntax().text_range();
+                let range = TextRange::at(
+                    text_range.start()
+                        + TextSize::try_from(comment.prefix().len()).ok()?
+                        + original_line_src_range.start()
+                        + relative_range.start(),
+                    text_range.len().min(range.len()),
+                );
+                Some((InFile { file_id, value: range }, idx))
+            }
+        }
     }
+
+    pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap {
+        let mapping = self
+            .mapping
+            .into_iter()
+            .map(|(buf_offset, id, base_offset)| {
+                let buf_offset = buf_offset.checked_add(offset).unwrap();
+                (buf_offset, id, base_offset)
+            })
+            .collect_vec();
+        DocsRangeMap { source_map: self.source_map, mapping }
+    }
+}
+
+pub fn docs_with_rangemap(
+    db: &dyn DefDatabase,
+    attrs: &AttrsWithOwner,
+) -> Option<(Documentation, DocsRangeMap)> {
+    let docs = attrs
+        .by_key(sym::doc)
+        .attrs()
+        .filter_map(|attr| attr.string_value_unescape().map(|s| (s, attr.id)));
+    let indent = doc_indent(attrs);
+    let mut buf = String::new();
+    let mut mapping = Vec::new();
+    for (doc, idx) in docs {
+        if !doc.is_empty() {
+            let mut base_offset = 0;
+            for raw_line in doc.split('\n') {
+                let line = raw_line.trim_end();
+                let line_len = line.len();
+                let (offset, line) = match line.char_indices().nth(indent) {
+                    Some((offset, _)) => (offset, &line[offset..]),
+                    None => (0, line),
+                };
+                let buf_offset = buf.len();
+                buf.push_str(line);
+                mapping.push((
+                    TextRange::new(buf_offset.try_into().ok()?, buf.len().try_into().ok()?),
+                    idx,
+                    TextRange::at(
+                        (base_offset + offset).try_into().ok()?,
+                        line_len.try_into().ok()?,
+                    ),
+                ));
+                buf.push('\n');
+                base_offset += raw_line.len() + 1;
+            }
+        } else {
+            buf.push('\n');
+        }
+    }
+    buf.pop();
+    if buf.is_empty() {
+        None
+    } else {
+        Some((Documentation(buf), DocsRangeMap { mapping, source_map: attrs.source_map(db) }))
+    }
+}
+
+pub fn docs_from_attrs(attrs: &hir::Attrs) -> Option {
+    let docs = attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape());
+    let indent = doc_indent(attrs);
+    let mut buf = String::new();
+    for doc in docs {
+        // str::lines doesn't yield anything for the empty string
+        if !doc.is_empty() {
+            // We don't trim trailing whitespace from doc comments as multiple trailing spaces
+            // indicates a hard line break in Markdown.
+            let lines = doc.lines().map(|line| {
+                line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..])
+            });
+
+            buf.extend(Itertools::intersperse(lines, "\n"));
+        }
+        buf.push('\n');
+    }
+    buf.pop();
+    if buf.is_empty() { None } else { Some(buf) }
 }
 
 macro_rules! impl_has_docs {
     ($($def:ident,)*) => {$(
-        impl HasDocs for hir::$def {}
+        impl HasDocs for hir::$def {
+            fn docs(self, db: &dyn HirDatabase) -> Option {
+                docs_from_attrs(&self.attrs(db)).map(Documentation)
+            }
+            fn docs_with_rangemap(
+                self,
+                db: &dyn HirDatabase,
+            ) -> Option<(Documentation, DocsRangeMap)> {
+                docs_with_rangemap(db, &self.attrs(db))
+            }
+            fn resolve_doc_path(
+                self,
+                db: &dyn HirDatabase,
+                link: &str,
+                ns: Option,
+                is_inner_doc: bool,
+            ) -> Option {
+                resolve_doc_path_on(db, self, link, ns, is_inner_doc)
+            }
+        }
     )*};
 }
 
 impl_has_docs![
     Variant, Field, Static, Const, Trait, TypeAlias, Macro, Function, Adt, Module, Impl, Crate,
-    AssocItem, Struct, Union, Enum,
 ];
 
+macro_rules! impl_has_docs_enum {
+    ($($variant:ident),* for $enum:ident) => {$(
+        impl HasDocs for hir::$variant {
+            fn docs(self, db: &dyn HirDatabase) -> Option {
+                hir::$enum::$variant(self).docs(db)
+            }
+
+            fn docs_with_rangemap(
+                self,
+                db: &dyn HirDatabase,
+            ) -> Option<(Documentation, DocsRangeMap)> {
+                hir::$enum::$variant(self).docs_with_rangemap(db)
+            }
+            fn resolve_doc_path(
+                self,
+                db: &dyn HirDatabase,
+                link: &str,
+                ns: Option,
+                is_inner_doc: bool,
+            ) -> Option {
+                hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc)
+            }
+        }
+    )*};
+}
+
+impl_has_docs_enum![Struct, Union, Enum for Adt];
+
+impl HasDocs for hir::AssocItem {
+    fn docs(self, db: &dyn HirDatabase) -> Option {
+        match self {
+            hir::AssocItem::Function(it) => it.docs(db),
+            hir::AssocItem::Const(it) => it.docs(db),
+            hir::AssocItem::TypeAlias(it) => it.docs(db),
+        }
+    }
+
+    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
+        match self {
+            hir::AssocItem::Function(it) => it.docs_with_rangemap(db),
+            hir::AssocItem::Const(it) => it.docs_with_rangemap(db),
+            hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db),
+        }
+    }
+
+    fn resolve_doc_path(
+        self,
+        db: &dyn HirDatabase,
+        link: &str,
+        ns: Option,
+        is_inner_doc: bool,
+    ) -> Option {
+        match self {
+            hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+            hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+            hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+        }
+    }
+}
+
 impl HasDocs for hir::ExternCrateDecl {
-    fn docs(self, db: &dyn HirDatabase) -> Option> {
-        let crate_docs = self.resolved_crate(db)?.hir_docs(db);
-        let decl_docs = self.hir_docs(db);
+    fn docs(self, db: &dyn HirDatabase) -> Option {
+        let crate_docs = docs_from_attrs(&self.resolved_crate(db)?.root_module().attrs(db));
+        let decl_docs = docs_from_attrs(&self.attrs(db));
         match (decl_docs, crate_docs) {
             (None, None) => None,
-            (Some(docs), None) | (None, Some(docs)) => {
-                Some(Documentation::new_borrowed(docs.docs()))
-            }
-            (Some(decl_docs), Some(crate_docs)) => {
-                let mut docs = String::with_capacity(
-                    decl_docs.docs().len() + "\n\n".len() + crate_docs.docs().len(),
-                );
-                docs.push_str(decl_docs.docs());
-                docs.push_str("\n\n");
-                docs.push_str(crate_docs.docs());
-                Some(Documentation::new_owned(docs))
+            (Some(decl_docs), None) => Some(decl_docs),
+            (None, Some(crate_docs)) => Some(crate_docs),
+            (Some(mut decl_docs), Some(crate_docs)) => {
+                decl_docs.push('\n');
+                decl_docs.push('\n');
+                decl_docs += &crate_docs;
+                Some(decl_docs)
             }
         }
+        .map(Documentation::new)
     }
 
-    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option> {
-        let crate_docs = self.resolved_crate(db)?.hir_docs(db);
-        let decl_docs = self.hir_docs(db);
+    fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
+        let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db));
+        let decl_docs = docs_with_rangemap(db, &self.attrs(db));
         match (decl_docs, crate_docs) {
             (None, None) => None,
-            (Some(docs), None) | (None, Some(docs)) => Some(Cow::Borrowed(docs)),
-            (Some(decl_docs), Some(crate_docs)) => {
-                let mut docs = decl_docs.clone();
-                docs.append_str("\n\n");
-                docs.append(crate_docs);
-                Some(Cow::Owned(docs))
+            (Some(decl_docs), None) => Some(decl_docs),
+            (None, Some(crate_docs)) => Some(crate_docs),
+            (
+                Some((Documentation(mut decl_docs), mut decl_range_map)),
+                Some((Documentation(crate_docs), crate_range_map)),
+            ) => {
+                decl_docs.push('\n');
+                decl_docs.push('\n');
+                let offset = TextSize::new(decl_docs.len() as u32);
+                decl_docs += &crate_docs;
+                let crate_range_map = crate_range_map.shift_docstring_line_range(offset);
+                decl_range_map.mapping.extend(crate_range_map.mapping);
+                Some((Documentation(decl_docs), decl_range_map))
             }
         }
     }
+    fn resolve_doc_path(
+        self,
+        db: &dyn HirDatabase,
+        link: &str,
+        ns: Option,
+        is_inner_doc: bool,
+    ) -> Option {
+        resolve_doc_path_on(db, self, link, ns, is_inner_doc)
+    }
+}
+
+fn get_doc_string_in_attr(it: &ast::Attr) -> Option {
+    match it.expr() {
+        // #[doc = lit]
+        Some(ast::Expr::Literal(lit)) => match lit.kind() {
+            ast::LiteralKind::String(it) => Some(it),
+            _ => None,
+        },
+        // #[cfg_attr(..., doc = "", ...)]
+        None => {
+            // FIXME: See highlight injection for what to do here
+            None
+        }
+        _ => None,
+    }
+}
+
+fn doc_indent(attrs: &hir::Attrs) -> usize {
+    let mut min = !0;
+    for val in attrs.by_key(sym::doc).attrs().filter_map(|attr| attr.string_value_unescape()) {
+        if let Some(m) =
+            val.lines().filter_map(|line| line.chars().position(|c| !c.is_whitespace())).min()
+        {
+            min = min.min(m);
+        }
+    }
+    min
 }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs
index cd86e7765196c..1f056a835bc62 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/ra_fixture.rs
@@ -25,14 +25,18 @@ impl RootDatabase {
         // We don't want a mistake in the fixture to crash r-a, so we wrap this in `catch_unwind()`.
         std::panic::catch_unwind(|| {
             let mut db = RootDatabase::default();
-            let fixture =
-                test_fixture::ChangeFixture::parse_with_proc_macros(text, minicore.0, Vec::new());
+            let fixture = test_fixture::ChangeFixture::parse_with_proc_macros(
+                &db,
+                text,
+                minicore.0,
+                Vec::new(),
+            );
             db.apply_change(fixture.change);
             let files = fixture
                 .files
                 .into_iter()
                 .zip(fixture.file_lines)
-                .map(|(file_id, range)| (file_id.file_id(), range))
+                .map(|(file_id, range)| (file_id.file_id(&db), range))
                 .collect();
             (db, files, fixture.sysroot_files)
         })
@@ -521,7 +525,7 @@ impl_empty_upmap_from_ra_fixture!(
     &str,
     String,
     SmolStr,
-    Documentation<'_>,
+    Documentation,
     SymbolKind,
     CfgExpr,
     ReferenceCategory,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
index 36a6938af6b82..eacd9b9b4d2f6 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rust_doc.rs
@@ -33,7 +33,7 @@ pub fn is_rust_fence(s: &str) -> bool {
 
 const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
 
-pub fn format_docs(src: &Documentation<'_>) -> String {
+pub fn format_docs(src: &Documentation) -> String {
     format_docs_(src.as_str())
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index 8b53cea7e6d38..f1d076e874d5c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -12,7 +12,7 @@ use either::Either;
 use hir::{
     Adt, AsAssocItem, DefWithBody, EditionedFileId, FileRange, FileRangeWrapper, HasAttrs,
     HasContainer, HasSource, InFile, InFileWrapper, InRealFile, InlineAsmOperand, ItemContainer,
-    ModuleSource, PathResolution, Semantics, Visibility,
+    ModuleSource, PathResolution, Semantics, Visibility, sym,
 };
 use memchr::memmem::Finder;
 use parser::SyntaxKind;
@@ -169,7 +169,7 @@ impl SearchScope {
             entries.extend(
                 source_root
                     .iter()
-                    .map(|id| (EditionedFileId::new(db, id, crate_data.edition, krate), None)),
+                    .map(|id| (EditionedFileId::new(db, id, crate_data.edition), None)),
             );
         }
         SearchScope { entries }
@@ -183,9 +183,11 @@ impl SearchScope {
 
             let source_root = db.file_source_root(root_file).source_root_id(db);
             let source_root = db.source_root(source_root).source_root(db);
-            entries.extend(source_root.iter().map(|id| {
-                (EditionedFileId::new(db, id, rev_dep.edition(db), rev_dep.into()), None)
-            }));
+            entries.extend(
+                source_root
+                    .iter()
+                    .map(|id| (EditionedFileId::new(db, id, rev_dep.edition(db)), None)),
+            );
         }
         SearchScope { entries }
     }
@@ -199,7 +201,7 @@ impl SearchScope {
         SearchScope {
             entries: source_root
                 .iter()
-                .map(|id| (EditionedFileId::new(db, id, of.edition(db), of.into()), None))
+                .map(|id| (EditionedFileId::new(db, id, of.edition(db)), None))
                 .collect(),
         }
     }
@@ -366,7 +368,7 @@ impl Definition {
         if let Definition::Macro(macro_def) = self {
             return match macro_def.kind(db) {
                 hir::MacroKind::Declarative => {
-                    if macro_def.attrs(db).is_macro_export() {
+                    if macro_def.attrs(db).by_key(sym::macro_export).exists() {
                         SearchScope::reverse_dependencies(db, module.krate())
                     } else {
                         SearchScope::krate(db, module.krate())
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
index 427a510559486..30d1df4f8e554 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -3,7 +3,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(2c00),
+                    Id(3000),
                 ),
                 block: None,
                 local_id: Idx::(0),
@@ -16,7 +16,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3801,
+                                3401,
                             ),
                         },
                     ),
@@ -24,7 +24,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -50,7 +50,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3800,
+                                3400,
                             ),
                         },
                     ),
@@ -58,7 +58,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -84,7 +84,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3800,
+                                3400,
                             ),
                         },
                     ),
@@ -92,7 +92,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -118,7 +118,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3800,
+                                3400,
                             ),
                         },
                     ),
@@ -126,7 +126,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -152,7 +152,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3800,
+                                3400,
                             ),
                         },
                     ),
@@ -160,7 +160,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -186,7 +186,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3801,
+                                3401,
                             ),
                         },
                     ),
@@ -194,7 +194,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -220,7 +220,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3800,
+                                3400,
                             ),
                         },
                     ),
@@ -228,7 +228,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index ce93fa59e258e..973256c470f34 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -3,7 +3,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(2c00),
+                    Id(3000),
                 ),
                 block: None,
                 local_id: Idx::(0),
@@ -22,7 +22,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -49,14 +49,14 @@
                 def: TypeAlias(
                     TypeAlias {
                         id: TypeAliasId(
-                            6c00,
+                            6800,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -88,7 +88,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -115,14 +115,14 @@
                 def: Const(
                     Const {
                         id: ConstId(
-                            6400,
+                            6000,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -147,14 +147,14 @@
                 def: Const(
                     Const {
                         id: ConstId(
-                            6402,
+                            6002,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -180,7 +180,7 @@
                     Enum(
                         Enum {
                             id: EnumId(
-                                5000,
+                                4c00,
                             ),
                         },
                     ),
@@ -188,7 +188,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -214,7 +214,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4c00,
+                                4800,
                             ),
                         ),
                     },
@@ -222,7 +222,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -248,7 +248,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4c00,
+                                4800,
                             ),
                         ),
                     },
@@ -256,7 +256,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -281,14 +281,14 @@
                 def: Static(
                     Static {
                         id: StaticId(
-                            6800,
+                            6400,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -314,7 +314,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4801,
+                                4401,
                             ),
                         },
                     ),
@@ -322,7 +322,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -348,7 +348,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4800,
+                                4400,
                             ),
                         },
                     ),
@@ -356,7 +356,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: MacroFile(
                         MacroCallId(
-                            Id(3c00),
+                            Id(3800),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -382,7 +382,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4805,
+                                4405,
                             ),
                         },
                     ),
@@ -390,7 +390,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -418,7 +418,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4806,
+                                4406,
                             ),
                         },
                     ),
@@ -426,7 +426,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -454,7 +454,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4807,
+                                4407,
                             ),
                         },
                     ),
@@ -462,7 +462,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -488,7 +488,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4802,
+                                4402,
                             ),
                         },
                     ),
@@ -496,7 +496,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -521,14 +521,14 @@
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            5c00,
+                            5800,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -554,7 +554,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4c00,
+                                4800,
                             ),
                         ),
                     },
@@ -562,7 +562,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -588,7 +588,7 @@
                     Union(
                         Union {
                             id: UnionId(
-                                5400,
+                                5000,
                             ),
                         },
                     ),
@@ -596,7 +596,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -622,7 +622,7 @@
                     Module {
                         id: ModuleId {
                             krate: Crate(
-                                Id(2c00),
+                                Id(3000),
                             ),
                             block: None,
                             local_id: Idx::(1),
@@ -632,7 +632,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -658,7 +658,7 @@
                     Module {
                         id: ModuleId {
                             krate: Crate(
-                                Id(2c00),
+                                Id(3000),
                             ),
                             block: None,
                             local_id: Idx::(2),
@@ -668,7 +668,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -694,7 +694,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                3801,
+                                3401,
                             ),
                         ),
                     },
@@ -702,7 +702,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -727,14 +727,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            6002,
+                            5c02,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -761,14 +761,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            6001,
+                            5c01,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -796,7 +796,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                3800,
+                                3400,
                             ),
                         ),
                     },
@@ -804,7 +804,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -829,14 +829,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            6000,
+                            5c00,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -862,7 +862,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                3801,
+                                3401,
                             ),
                         ),
                     },
@@ -870,7 +870,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -895,14 +895,14 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            6003,
+                            5c03,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -930,7 +930,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(2c00),
+                    Id(3000),
                 ),
                 block: None,
                 local_id: Idx::(1),
@@ -943,7 +943,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4803,
+                                4403,
                             ),
                         },
                     ),
@@ -951,7 +951,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3000),
+                            Id(2000),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -977,7 +977,7 @@
         Module {
             id: ModuleId {
                 krate: Crate(
-                    Id(2c00),
+                    Id(3000),
                 ),
                 block: None,
                 local_id: Idx::(2),
@@ -989,14 +989,14 @@
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            5c00,
+                            5800,
                         ),
                     },
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3001),
+                            Id(2001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1022,7 +1022,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4c00,
+                                4800,
                             ),
                         ),
                     },
@@ -1030,7 +1030,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3001),
+                            Id(2001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1056,7 +1056,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4804,
+                                4404,
                             ),
                         },
                     ),
@@ -1064,7 +1064,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3001),
+                            Id(2001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1090,7 +1090,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                4c00,
+                                4800,
                             ),
                         ),
                     },
@@ -1098,7 +1098,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3001),
+                            Id(2001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
@@ -1124,7 +1124,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4804,
+                                4404,
                             ),
                         },
                     ),
@@ -1132,7 +1132,7 @@
                 loc: DeclarationLocation {
                     hir_file_id: FileId(
                         EditionedFileId(
-                            Id(3001),
+                            Id(2001),
                         ),
                     ),
                     ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
index 3ab837aa613f1..22872b577f712 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_exclude_imports.txt
@@ -13,7 +13,7 @@
         loc: DeclarationLocation {
             hir_file_id: FileId(
                 EditionedFileId(
-                    Id(3001),
+                    Id(2001),
                 ),
             ),
             ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt
index a6a808d616a7a..9f98bf87e2e8d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbols_with_imports.txt
@@ -13,7 +13,7 @@
         loc: DeclarationLocation {
             hir_file_id: FileId(
                 EditionedFileId(
-                    Id(3001),
+                    Id(2001),
                 ),
             ),
             ptr: SyntaxNodePtr {
@@ -47,7 +47,7 @@
         loc: DeclarationLocation {
             hir_file_id: FileId(
                 EditionedFileId(
-                    Id(3000),
+                    Id(2000),
                 ),
             ),
             ptr: SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
index 7b9fdb1e1cf3b..61e28386d0721 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
@@ -114,7 +114,8 @@ fn assoc_item_of_trait(
 #[cfg(test)]
 mod tests {
     use expect_test::{Expect, expect};
-    use hir::{EditionedFileId, FilePosition, Semantics};
+    use hir::FilePosition;
+    use hir::Semantics;
     use span::Edition;
     use syntax::ast::{self, AstNode};
     use test_fixture::ChangeFixture;
@@ -126,11 +127,10 @@ mod tests {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (RootDatabase, FilePosition) {
         let mut database = RootDatabase::default();
-        let change_fixture = ChangeFixture::parse(ra_fixture);
+        let change_fixture = ChangeFixture::parse(&database, ra_fixture);
         database.apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ($0)");
-        let file_id = EditionedFileId::from_span_guess_origin(&database, file_id);
         let offset = range_or_offset.expect_offset();
         (database, FilePosition { file_id, offset })
     }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index dfa9639f6eb90..8611ef653b02d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -95,7 +95,7 @@ fn f() {
   //^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
 
     #[cfg(no)] #[cfg(no2)] mod m;
-  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
+  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled
 
     #[cfg(all(not(a), b))] enum E {}
   //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled
@@ -130,6 +130,7 @@ trait Bar {
     /// Tests that `cfg` attributes behind `cfg_attr` is handled properly.
     #[test]
     fn inactive_via_cfg_attr() {
+        cov_mark::check!(cfg_attr_active);
         check(
             r#"
     #[cfg_attr(not(never), cfg(no))] fn f() {}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index 9aa7aed16964d..8b708f229d009 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target(
     ctx: &DiagnosticsContext<'_>,
     d: &hir::InvalidDeriveTarget,
 ) -> Diagnostic {
-    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
+    let display_range = ctx.sema.diagnostics_display_range(d.node);
 
     Diagnostic::new(
         DiagnosticCode::RustcHardError("E0774"),
@@ -29,7 +29,7 @@ mod tests {
 //- minicore:derive
 mod __ {
     #[derive()]
-   // ^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
+  //^^^^^^^^^^^ error: `derive` may only be applied to `struct`s, `enum`s and `union`s
     fn main() {}
 }
             "#,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index a44b043f433c6..6a1ecae651501 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -13,7 +13,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
 // This diagnostic is shown for proc macros that have been specifically disabled via `rust-analyzer.procMacro.ignored`.
 pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
     // Use more accurate position if available.
-    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
+    let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
     Diagnostic::new(
         DiagnosticCode::Ra(d.kind, if d.error { Severity::Error } else { Severity::WeakWarning }),
         d.message.clone(),
@@ -27,10 +27,8 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) ->
 // This diagnostic is shown for macro expansion errors.
 pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefError) -> Diagnostic {
     // Use more accurate position if available.
-    let display_range = match d.name {
-        Some(name) => ctx.sema.diagnostics_display_range_for_range(d.node.with_value(name)),
-        None => ctx.sema.diagnostics_display_range(d.node.map(|it| it.syntax_node_ptr())),
-    };
+    let display_range =
+        ctx.resolve_precise_location(&d.node.map(|it| it.syntax_node_ptr()), d.name);
     Diagnostic::new(
         DiagnosticCode::Ra("macro-def-error", Severity::Error),
         d.message.clone(),
@@ -137,12 +135,10 @@ macro_rules! env { () => {} }
 #[rustc_builtin_macro]
 macro_rules! concat { () => {} }
 
-  include!(concat!(
-        // ^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
-    env!(
-  //^^^ error: `OUT_DIR` not set, build scripts may have failed to run
-        "OUT_DIR"), "/out.rs"));
-      //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+  include!(concat!(env!("OUT_DIR"), "/out.rs"));
+                      //^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+                 //^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
+         //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `OUT_DIR` not set, build scripts may have failed to run
 "#,
         );
     }
@@ -186,7 +182,7 @@ fn main() {
            //^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
 
     include!(concat!("does ", "not ", "exist"));
-                  // ^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
+                  //^^^^^^^^^^^^^^^^^^^^^^^^^^ error: failed to load file `does not exist`
 
     env!(invalid);
        //^^^^^^^ error: expected string literal
@@ -293,7 +289,7 @@ include!("include-me.rs");
 //- /include-me.rs
 /// long doc that pushes the diagnostic range beyond the first file's text length
   #[err]
- // ^^^ error: unresolved macro `err`
+//^^^^^^error: unresolved macro `err`
 mod prim_never {}
 "#,
         );
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index 7d0c71f4fa7c1..701b30b9b593d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -7,7 +7,7 @@ pub(crate) fn malformed_derive(
     ctx: &DiagnosticsContext<'_>,
     d: &hir::MalformedDerive,
 ) -> Diagnostic {
-    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
+    let display_range = ctx.sema.diagnostics_display_range(d.node);
 
     Diagnostic::new(
         DiagnosticCode::RustcHardError("E0777"),
@@ -28,7 +28,7 @@ mod tests {
 //- minicore:derive
 mod __ {
     #[derive = "aaaa"]
-   // ^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
+  //^^^^^^^^^^^^^^^^^^ error: malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`
     struct Foo;
 }
             "#,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index 030c82ca0ba79..a87b8c42ac1d0 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -8,7 +8,8 @@ pub(crate) fn unresolved_macro_call(
     ctx: &DiagnosticsContext<'_>,
     d: &hir::UnresolvedMacroCall,
 ) -> Diagnostic {
-    let display_range = ctx.sema.diagnostics_display_range_for_range(d.range);
+    // Use more accurate position if available.
+    let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location);
     let bang = if d.is_bang { "!" } else { "" };
     Diagnostic::new(
         DiagnosticCode::RustcHardError("unresolved-macro-call"),
@@ -75,7 +76,7 @@ self::m!(); self::m2!();
             r#"
     mod _test_inner {
         #![empty_attr]
-        // ^^^^^^^^^^ error: unresolved macro `empty_attr`
+      //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
     }
 "#,
         );
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index 5c8f030de4def..1530e64652464 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -102,7 +102,7 @@ use ide_db::{
 use itertools::Itertools;
 use syntax::{
     AstPtr, Edition, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, T, TextRange,
-    ast::{self, AstNode},
+    ast::{self, AstNode, HasAttrs},
 };
 
 // FIXME: Make this an enum
@@ -277,6 +277,31 @@ struct DiagnosticsContext<'a> {
     is_nightly: bool,
 }
 
+impl DiagnosticsContext<'_> {
+    fn resolve_precise_location(
+        &self,
+        node: &InFile,
+        precise_location: Option,
+    ) -> FileRange {
+        let sema = &self.sema;
+        (|| {
+            let precise_location = precise_location?;
+            let root = sema.parse_or_expand(node.file_id);
+            match root.covering_element(precise_location) {
+                syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)),
+                syntax::NodeOrToken::Token(it) => {
+                    node.with_value(it).original_file_range_opt(sema.db)
+                }
+            }
+        })()
+        .map(|frange| ide_db::FileRange {
+            file_id: frange.file_id.file_id(self.sema.db),
+            range: frange.range,
+        })
+        .unwrap_or_else(|| sema.diagnostics_display_range(*node))
+    }
+}
+
 /// Request parser level diagnostics for the given [`FileId`].
 pub fn syntax_diagnostics(
     db: &RootDatabase,
@@ -292,7 +317,7 @@ pub fn syntax_diagnostics(
     let sema = Semantics::new(db);
     let editioned_file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
 
     let (file_id, _) = editioned_file_id.unpack(db);
 
@@ -323,7 +348,7 @@ pub fn semantic_diagnostics(
     let sema = Semantics::new(db);
     let editioned_file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
 
     let (file_id, edition) = editioned_file_id.unpack(db);
     let mut res = Vec::new();
@@ -401,7 +426,7 @@ pub fn semantic_diagnostics(
                         Diagnostic::new(
                             DiagnosticCode::SyntaxError,
                             format!("Syntax Error in Expansion: {err}"),
-                            ctx.sema.diagnostics_display_range_for_range(d.range),
+                            ctx.resolve_precise_location(&d.node.clone(), d.precise_location),
                         )
                 }));
                 continue;
@@ -652,7 +677,7 @@ fn find_outline_mod_lint_severity(
     let lint_groups = lint_groups(&diag.code, edition);
     lint_attrs(
         sema,
-        &ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
+        ast::AnyHasAttrs::cast(module_source_file.value).expect("SourceFile always has attrs"),
         edition,
     )
     .for_each(|(lint, severity)| {
@@ -673,7 +698,7 @@ fn lint_severity_at(
         .ancestors()
         .filter_map(ast::AnyHasAttrs::cast)
         .find_map(|ancestor| {
-            lint_attrs(sema, &ancestor, edition)
+            lint_attrs(sema, ancestor, edition)
                 .find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity))
         })
         .or_else(|| {
@@ -681,13 +706,13 @@ fn lint_severity_at(
         })
 }
 
-// FIXME: Switch this to analysis' `expand_cfg_attr`.
 fn lint_attrs<'a>(
     sema: &'a Semantics<'a, RootDatabase>,
-    ancestor: &'a ast::AnyHasAttrs,
+    ancestor: ast::AnyHasAttrs,
     edition: Edition,
 ) -> impl Iterator + 'a {
-    ast::attrs_including_inner(ancestor)
+    ancestor
+        .attrs_including_inner()
         .filter_map(|attr| {
             attr.as_simple_call().and_then(|(name, value)| match &*name {
                 "allow" | "expect" => Some(Either::Left(iter::once((Severity::Allow, value)))),
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
index de26879c2959d..181cc74a51d4f 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
@@ -17,7 +17,7 @@ pub fn ssr_from_comment(
     frange: FileRange,
 ) -> Option<(MatchFinder<'_>, TextRange)> {
     let comment = {
-        let file_id = EditionedFileId::current_edition_guess_origin(db, frange.file_id);
+        let file_id = EditionedFileId::current_edition(db, frange.file_id);
 
         let file = db.parse(file_id);
         file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
index 7b2142a9f3489..43ad12c1f699a 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -125,9 +125,9 @@ impl<'db> MatchFinder<'db> {
     ) -> Result, SsrError> {
         restrict_ranges.retain(|range| !range.range.is_empty());
         let sema = Semantics::new(db);
-        let file_id = sema.attach_first_edition(lookup_context.file_id).unwrap_or_else(|| {
-            EditionedFileId::current_edition_guess_origin(db, lookup_context.file_id)
-        });
+        let file_id = sema
+            .attach_first_edition(lookup_context.file_id)
+            .unwrap_or_else(|| EditionedFileId::current_edition(db, lookup_context.file_id));
         let resolution_scope = resolving::ResolutionScope::new(
             &sema,
             hir::FilePosition { file_id, offset: lookup_context.offset },
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
index d23d22b4e8986..72f857ceda903 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -135,9 +135,11 @@ impl<'db> MatchFinder<'db> {
         // seems to get put into a single source root.
         let mut files = Vec::new();
         self.search_files_do(|file_id| {
-            files.push(self.sema.attach_first_edition(file_id).unwrap_or_else(|| {
-                EditionedFileId::current_edition_guess_origin(self.sema.db, file_id)
-            }));
+            files.push(
+                self.sema
+                    .attach_first_edition(file_id)
+                    .unwrap_or_else(|| EditionedFileId::current_edition(self.sema.db, file_id)),
+            );
         });
         SearchScope::files(&files)
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 0ed91cf7f5885..c197d559aa89a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -13,13 +13,13 @@ use stdx::format_to;
 use url::Url;
 
 use hir::{
-    Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase,
+    Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, sym,
 };
 use ide_db::{
     RootDatabase,
     base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
     defs::{Definition, NameClass, NameRefClass},
-    documentation::{Documentation, HasDocs},
+    documentation::{DocsRangeMap, Documentation, HasDocs, docs_with_rangemap},
     helpers::pick_best_token,
 };
 use syntax::{
@@ -54,7 +54,7 @@ pub(crate) fn rewrite_links(
     db: &RootDatabase,
     markdown: &str,
     definition: Definition,
-    range_map: Option<&hir::Docs>,
+    range_map: Option,
 ) -> String {
     let mut cb = broken_link_clone_cb;
     let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
@@ -74,9 +74,9 @@ pub(crate) fn rewrite_links(
                 TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
             let is_inner_doc = range_map
                 .as_ref()
-                .and_then(|range_map| range_map.find_ast_range(text_range))
-                .map(|(_, is_inner)| is_inner)
-                .unwrap_or(hir::IsInnerDoc::No);
+                .and_then(|range_map| range_map.map(text_range))
+                .map(|(_, attr_id)| attr_id.is_inner_attr())
+                .unwrap_or(false);
             if let Some((target, title)) =
                 rewrite_intra_doc_link(db, definition, target, title, is_inner_doc, link_type)
             {
@@ -187,7 +187,7 @@ pub(crate) fn external_docs(
 /// Extracts all links from a given markdown text returning the definition text range, link-text
 /// and the namespace if known.
 pub(crate) fn extract_definitions_from_docs(
-    docs: &Documentation<'_>,
+    docs: &Documentation,
 ) -> Vec<(TextRange, String, Option)> {
     Parser::new_with_broken_link_callback(
         docs.as_str(),
@@ -214,7 +214,7 @@ pub(crate) fn resolve_doc_path_for_def(
     def: Definition,
     link: &str,
     ns: Option,
-    is_inner_doc: hir::IsInnerDoc,
+    is_inner_doc: bool,
 ) -> Option {
     match def {
         Definition::Module(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
@@ -324,11 +324,11 @@ impl DocCommentToken {
             let token_start = t.text_range().start();
             let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
             let (attributes, def) = Self::doc_attributes(sema, &node, is_inner)?;
-            let doc_mapping = attributes.hir_docs(sema.db)?;
+            let (docs, doc_mapping) = docs_with_rangemap(sema.db, &attributes)?;
             let (in_expansion_range, link, ns, is_inner) =
-                extract_definitions_from_docs(&Documentation::new_borrowed(doc_mapping.docs())).into_iter().find_map(|(range, link, ns)| {
-                    let (mapped, is_inner) = doc_mapping.find_ast_range(range)?;
-                    (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, is_inner))
+                extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
+                    let (mapped, idx) = doc_mapping.map(range)?;
+                    (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, idx.is_inner_attr()))
                 })?;
             // get the relative range to the doc/attribute in the expansion
             let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
@@ -416,7 +416,7 @@ fn rewrite_intra_doc_link(
     def: Definition,
     target: &str,
     title: &str,
-    is_inner_doc: hir::IsInnerDoc,
+    is_inner_doc: bool,
     link_type: LinkType,
 ) -> Option<(String, String)> {
     let (link, ns) = parse_intra_doc_link(target);
@@ -659,12 +659,14 @@ fn filename_and_frag_for_def(
         Definition::Crate(_) => String::from("index.html"),
         Definition::Module(m) => match m.name(db) {
             // `#[doc(keyword = "...")]` is internal used only by rust compiler
-            Some(name) => match m.doc_keyword(db) {
-                Some(kw) => {
-                    format!("keyword.{kw}.html")
+            Some(name) => {
+                match m.attrs(db).by_key(sym::doc).find_string_value_in_tt(sym::keyword) {
+                    Some(kw) => {
+                        format!("keyword.{kw}.html")
+                    }
+                    None => format!("{}/index.html", name.as_str()),
                 }
-                None => format!("{}/index.html", name.as_str()),
-            },
+            }
             None => String::from("index.html"),
         },
         Definition::Trait(t) => {
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
index 34ffc11c4b5f3..3fd885535a234 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -1,11 +1,11 @@
-use std::{borrow::Cow, iter};
+use std::iter;
 
 use expect_test::{Expect, expect};
 use hir::Semantics;
 use ide_db::{
     FilePosition, FileRange, RootDatabase,
     defs::Definition,
-    documentation::{Documentation, HasDocs},
+    documentation::{DocsRangeMap, Documentation, HasDocs},
 };
 use itertools::Itertools;
 use syntax::{AstNode, SyntaxNode, ast, match_ast};
@@ -45,9 +45,9 @@ fn check_external_docs(
 fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
     let (analysis, position) = fixture::position(ra_fixture);
     let sema = &Semantics::new(&analysis.db);
-    let (cursor_def, docs) = def_under_cursor(sema, &position);
+    let (cursor_def, docs, range) = def_under_cursor(sema, &position);
     let res =
-        hir::attach_db(sema.db, || rewrite_links(sema.db, docs.docs(), cursor_def, Some(&docs)));
+        hir::attach_db(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range)));
     expect.assert_eq(&res)
 }
 
@@ -57,36 +57,33 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
     let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
     expected.sort_by_key(key_fn);
     let sema = &Semantics::new(&analysis.db);
-    hir::attach_db(sema.db, || {
-        let (cursor_def, docs) = def_under_cursor(sema, &position);
-        let defs = extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs()));
-        let actual: Vec<_> = defs
-            .into_iter()
-            .flat_map(|(text_range, link, ns)| {
-                let attr = docs.find_ast_range(text_range);
-                let is_inner_attr =
-                    attr.map(|(_file, is_inner)| is_inner).unwrap_or(hir::IsInnerDoc::No);
-                let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
-                    .unwrap_or_else(|| panic!("Failed to resolve {link}"));
-                def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link))
-            })
-            .map(|(nav_target, link)| {
-                let range = FileRange {
-                    file_id: nav_target.file_id,
-                    range: nav_target.focus_or_full_range(),
-                };
-                (range, link)
-            })
-            .sorted_by_key(key_fn)
-            .collect();
-        assert_eq!(expected, actual);
-    });
-}
-
-fn def_under_cursor<'db>(
-    sema: &Semantics<'db, RootDatabase>,
+    let (cursor_def, docs, range) = def_under_cursor(sema, &position);
+    let defs = extract_definitions_from_docs(&docs);
+    let actual: Vec<_> = defs
+        .into_iter()
+        .flat_map(|(text_range, link, ns)| {
+            let attr = range.map(text_range);
+            let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false);
+            let def = hir::attach_db(sema.db, || {
+                resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
+                    .unwrap_or_else(|| panic!("Failed to resolve {link}"))
+            });
+            def.try_to_nav(sema).unwrap().into_iter().zip(iter::repeat(link))
+        })
+        .map(|(nav_target, link)| {
+            let range =
+                FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
+            (range, link)
+        })
+        .sorted_by_key(key_fn)
+        .collect();
+    assert_eq!(expected, actual);
+}
+
+fn def_under_cursor(
+    sema: &Semantics<'_, RootDatabase>,
     position: &FilePosition,
-) -> (Definition, Cow<'db, hir::Docs>) {
+) -> (Definition, Documentation, DocsRangeMap) {
     let (docs, def) = sema
         .parse_guess_edition(position.file_id)
         .syntax()
@@ -97,14 +94,14 @@ fn def_under_cursor<'db>(
         .find_map(|it| node_to_def(sema, &it))
         .expect("no def found")
         .unwrap();
-    let docs = docs.expect("no docs found for cursor def");
-    (def, docs)
+    let (docs, range) = docs.expect("no docs found for cursor def");
+    (def, docs, range)
 }
 
-fn node_to_def<'db>(
-    sema: &Semantics<'db, RootDatabase>,
+fn node_to_def(
+    sema: &Semantics<'_, RootDatabase>,
     node: &SyntaxNode,
-) -> Option>, Definition)>> {
+) -> Option, Definition)>> {
     Some(match_ast! {
         match node {
             ast::SourceFile(it)  => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Module(def))),
diff --git a/src/tools/rust-analyzer/crates/ide/src/fixture.rs b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
index 1a8591d25dcaf..fbf89042fae15 100644
--- a/src/tools/rust-analyzer/crates/ide/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/fixture.rs
@@ -7,10 +7,10 @@ use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
 /// Creates analysis for a single file.
 pub(crate) fn file(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileId) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
-    (host.analysis(), change_fixture.files[0].file_id())
+    (host.analysis(), change_fixture.files[0].file_id(&host.db))
 }
 
 /// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -18,23 +18,23 @@ pub(crate) fn position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FilePosition) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
-    (host.analysis(), FilePosition { file_id: file_id.file_id(), offset })
+    (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset })
 }
 
 /// Creates analysis for a single file, returns range marked with a pair of $0.
 pub(crate) fn range(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (Analysis, FileRange) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let range = range_or_offset.expect_range();
-    (host.analysis(), FileRange { file_id: file_id.file_id(), range })
+    (host.analysis(), FileRange { file_id: file_id.file_id(&host.db), range })
 }
 
 /// Creates analysis for a single file, returns range marked with a pair of $0 or a position marked with $0.
@@ -42,11 +42,11 @@ pub(crate) fn range_or_position(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FileId, RangeOrOffset) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
-    (host.analysis(), file_id.file_id(), range_or_offset)
+    (host.analysis(), file_id.file_id(&host.db), range_or_offset)
 }
 
 /// Creates analysis from a multi-file fixture, returns positions marked with $0.
@@ -54,24 +54,25 @@ pub(crate) fn annotations(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, FilePosition, Vec<(FileRange, String)>) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
     let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
     let offset = range_or_offset.expect_offset();
 
+    let db = &host.db;
     let annotations = change_fixture
         .files
         .iter()
         .flat_map(|&file_id| {
-            let file_text = host.analysis().file_text(file_id.file_id()).unwrap();
+            let file_text = host.analysis().file_text(file_id.file_id(&host.db)).unwrap();
             let annotations = extract_annotations(&file_text);
             annotations
                 .into_iter()
-                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data))
+                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
         })
         .collect();
-    (host.analysis(), FilePosition { file_id: file_id.file_id(), offset }, annotations)
+    (host.analysis(), FilePosition { file_id: file_id.file_id(&host.db), offset }, annotations)
 }
 
 /// Creates analysis from a multi-file fixture with annotations without $0
@@ -79,19 +80,20 @@ pub(crate) fn annotations_without_marker(
     #[rust_analyzer::rust_fixture] ra_fixture: &str,
 ) -> (Analysis, Vec<(FileRange, String)>) {
     let mut host = AnalysisHost::default();
-    let change_fixture = ChangeFixture::parse(ra_fixture);
+    let change_fixture = ChangeFixture::parse(&host.db, ra_fixture);
     host.db.enable_proc_attr_macros();
     host.db.apply_change(change_fixture.change);
 
+    let db = &host.db;
     let annotations = change_fixture
         .files
         .iter()
         .flat_map(|&file_id| {
-            let file_text = host.analysis().file_text(file_id.file_id()).unwrap();
+            let file_text = host.analysis().file_text(file_id.file_id(db)).unwrap();
             let annotations = extract_annotations(&file_text);
             annotations
                 .into_iter()
-                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(), range }, data))
+                .map(move |(range, data)| (FileRange { file_id: file_id.file_id(db), range }, data))
         })
         .collect();
     (host.analysis(), annotations)
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index cc333d66caf32..875403c4e32a4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -355,7 +355,7 @@ trait Bar {}
 
 fn test() {
     #[derive(Copy)]
-   // ^^^^^^^^^^^^
+  //^^^^^^^^^^^^^^^
     struct Foo$0;
 
     impl Foo {}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index f7870032ea281..04ce5a7567f3c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -62,7 +62,7 @@ pub(crate) fn highlight_related(
     let _p = tracing::info_span!("highlight_related").entered();
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
     let syntax = sema.parse(file_id).syntax().clone();
 
     let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 5bdfb57356583..a1eff3aaee789 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -1,5 +1,5 @@
 //! Logic for rendering the different hover messages
-use std::{borrow::Cow, env, mem, ops::Not};
+use std::{env, mem, ops::Not};
 
 use either::Either;
 use hir::{
@@ -11,7 +11,7 @@ use hir::{
 use ide_db::{
     RootDatabase,
     defs::{Definition, find_std_module},
-    documentation::{Documentation, HasDocs},
+    documentation::{DocsRangeMap, HasDocs},
     famous_defs::FamousDefs,
     generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
     syntax_helpers::prettify_macro_expansion,
@@ -278,9 +278,9 @@ pub(super) fn keyword(
         keyword_hints(sema, token, parent, edition, display_target);
 
     let doc_owner = find_std_module(&famous_defs, &keyword_mod, edition)?;
-    let docs = doc_owner.docs_with_rangemap(sema.db)?;
+    let (docs, range_map) = doc_owner.docs_with_rangemap(sema.db)?;
     let (markup, range_map) =
-        markup(Some(Either::Left(docs)), description, None, None, String::new());
+        markup(Some(docs.into()), Some(range_map), description, None, None, String::new());
     let markup = process_markup(sema.db, Definition::Module(doc_owner), &markup, range_map, config);
     Some(HoverResult { markup, actions })
 }
@@ -370,12 +370,12 @@ pub(super) fn process_markup(
     db: &RootDatabase,
     def: Definition,
     markup: &Markup,
-    markup_range_map: Option,
+    markup_range_map: Option,
     config: &HoverConfig<'_>,
 ) -> Markup {
     let markup = markup.as_str();
     let markup = if config.links_in_hover {
-        rewrite_links(db, markup, def, markup_range_map.as_ref())
+        rewrite_links(db, markup, def, markup_range_map)
     } else {
         remove_links(markup)
     };
@@ -484,7 +484,7 @@ pub(super) fn definition(
     config: &HoverConfig<'_>,
     edition: Edition,
     display_target: DisplayTarget,
-) -> (Markup, Option) {
+) -> (Markup, Option) {
     let mod_path = definition_path(db, &def, edition);
     let label = match def {
         Definition::Trait(trait_) => trait_
@@ -520,7 +520,12 @@ pub(super) fn definition(
         }
         _ => def.label(db, display_target),
     };
-    let docs = def.docs_with_rangemap(db, famous_defs, display_target);
+    let (docs, range_map) =
+        if let Some((docs, doc_range)) = def.docs_with_rangemap(db, famous_defs, display_target) {
+            (Some(docs), doc_range)
+        } else {
+            (None, None)
+        };
     let value = || match def {
         Definition::Variant(it) => {
             if !it.parent_enum(db).is_data_carrying(db) {
@@ -837,7 +842,14 @@ pub(super) fn definition(
         }
     };
 
-    markup(docs, desc, extra.is_empty().not().then_some(extra), mod_path, subst_types)
+    markup(
+        docs.map(Into::into),
+        range_map,
+        desc,
+        extra.is_empty().not().then_some(extra),
+        mod_path,
+        subst_types,
+    )
 }
 
 #[derive(Debug)]
@@ -1112,12 +1124,13 @@ fn definition_path(db: &RootDatabase, &def: &Definition, edition: Edition) -> Op
 }
 
 fn markup(
-    docs: Option, Documentation<'_>>>,
+    docs: Option,
+    range_map: Option,
     rust: String,
     extra: Option,
     mod_path: Option,
     subst_types: String,
-) -> (Markup, Option) {
+) -> (Markup, Option) {
     let mut buf = String::new();
 
     if let Some(mod_path) = mod_path
@@ -1138,21 +1151,10 @@ fn markup(
     if let Some(doc) = docs {
         format_to!(buf, "\n___\n\n");
         let offset = TextSize::new(buf.len() as u32);
-        let docs_str = match &doc {
-            Either::Left(docs) => docs.docs(),
-            Either::Right(docs) => docs.as_str(),
-        };
-        format_to!(buf, "{}", docs_str);
-        let range_map = match doc {
-            Either::Left(range_map) => {
-                let mut range_map = range_map.into_owned();
-                range_map.shift_by(offset);
-                Some(range_map)
-            }
-            Either::Right(_) => None,
-        };
+        let buf_range_map = range_map.map(|range_map| range_map.shift_docstring_line_range(offset));
+        format_to!(buf, "{}", doc);
 
-        (buf.into(), range_map)
+        (buf.into(), buf_range_map)
     } else {
         (buf.into(), None)
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index d474e50d3c2c9..21550d5e66658 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -90,7 +90,7 @@ pub(crate) fn inlay_hints(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     let file = sema.parse(file_id);
     let file = file.syntax();
 
@@ -143,7 +143,7 @@ pub(crate) fn inlay_hints_resolve(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     let file = sema.parse(file_id);
     let file = file.syntax();
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index a633877adb4e1..857252832ffe1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -331,8 +331,7 @@ impl Analysis {
     pub fn parse(&self, file_id: FileId) -> Cancellable {
         // FIXME edition
         self.with_db(|db| {
-            let editioned_file_id_wrapper =
-                EditionedFileId::current_edition_guess_origin(&self.db, file_id);
+            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
 
             db.parse(editioned_file_id_wrapper).tree()
         })
@@ -361,7 +360,7 @@ impl Analysis {
     /// supported).
     pub fn matching_brace(&self, position: FilePosition) -> Cancellable> {
         self.with_db(|db| {
-            let file_id = EditionedFileId::current_edition_guess_origin(&self.db, position.file_id);
+            let file_id = EditionedFileId::current_edition(&self.db, position.file_id);
             let parse = db.parse(file_id);
             let file = parse.tree();
             matching_brace::matching_brace(&file, position.offset)
@@ -422,7 +421,7 @@ impl Analysis {
     pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable {
         self.with_db(|db| {
             let editioned_file_id_wrapper =
-                EditionedFileId::current_edition_guess_origin(&self.db, frange.file_id);
+                EditionedFileId::current_edition(&self.db, frange.file_id);
             let parse = db.parse(editioned_file_id_wrapper);
             join_lines::join_lines(config, &parse.tree(), frange.range)
         })
@@ -463,8 +462,7 @@ impl Analysis {
     ) -> Cancellable> {
         // FIXME: Edition
         self.with_db(|db| {
-            let editioned_file_id_wrapper =
-                EditionedFileId::current_edition_guess_origin(&self.db, file_id);
+            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
             let source_file = db.parse(editioned_file_id_wrapper).tree();
             file_structure::file_structure(&source_file, config)
         })
@@ -495,8 +493,7 @@ impl Analysis {
     /// Returns the set of folding ranges.
     pub fn folding_ranges(&self, file_id: FileId) -> Cancellable> {
         self.with_db(|db| {
-            let editioned_file_id_wrapper =
-                EditionedFileId::current_edition_guess_origin(&self.db, file_id);
+            let editioned_file_id_wrapper = EditionedFileId::current_edition(&self.db, file_id);
 
             folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
         })
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index 8e73ddf8bfc3f..b222ff3eec0be 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -54,8 +54,7 @@ pub struct NavigationTarget {
     // FIXME: Symbol
     pub container_name: Option,
     pub description: Option,
-    // FIXME: Use the database lifetime here.
-    pub docs: Option>,
+    pub docs: Option,
     /// In addition to a `name` field, a `NavigationTarget` may also be aliased
     /// In such cases we want a `NavigationTarget` to be accessible by its alias
     // FIXME: Symbol
@@ -164,7 +163,7 @@ impl NavigationTarget {
                             full_range,
                             SymbolKind::Module,
                         );
-                        res.docs = module.docs(db).map(Documentation::into_owned);
+                        res.docs = module.docs(db);
                         res.description = Some(
                             module.display(db, module.krate().to_display_target(db)).to_string(),
                         );
@@ -438,7 +437,7 @@ where
                 D::KIND,
             )
             .map(|mut res| {
-                res.docs = self.docs(db).map(Documentation::into_owned);
+                res.docs = self.docs(db);
                 res.description = hir::attach_db(db, || {
                     Some(self.display(db, self.krate(db).to_display_target(db)).to_string())
                 });
@@ -537,7 +536,7 @@ impl TryToNav for hir::ExternCrateDecl {
                     SymbolKind::Module,
                 );
 
-                res.docs = self.docs(db).map(Documentation::into_owned);
+                res.docs = self.docs(db);
                 res.description = Some(self.display(db, krate.to_display_target(db)).to_string());
                 res.container_name = container_name(db, *self, edition);
                 res
@@ -559,9 +558,10 @@ impl TryToNav for hir::Field {
             FieldSource::Named(it) => {
                 NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map(
                     |mut res| {
-                        res.docs = self.docs(db).map(Documentation::into_owned);
-                        res.description =
-                            Some(self.display(db, krate.to_display_target(db)).to_string());
+                        res.docs = self.docs(db);
+                        res.description = hir::attach_db(db, || {
+                            Some(self.display(db, krate.to_display_target(db)).to_string())
+                        });
                         res
                     },
                 )
@@ -600,7 +600,7 @@ impl TryToNav for hir::Macro {
                 self.kind(db).into(),
             )
             .map(|mut res| {
-                res.docs = self.docs(db).map(Documentation::into_owned);
+                res.docs = self.docs(db);
                 res
             }),
         )
@@ -939,7 +939,7 @@ pub(crate) fn orig_range_with_focus_r(
 ) -> UpmappingResult<(FileRange, Option)> {
     let Some(name) = focus_range else { return orig_range_r(db, hir_file, value) };
 
-    let call = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap());
+    let call_kind = || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).kind;
 
     let def_range =
         || db.lookup_intern_macro_call(hir_file.macro_file().unwrap()).def.definition_range(db);
@@ -965,8 +965,7 @@ pub(crate) fn orig_range_with_focus_r(
                             // name lies outside the node, so instead point to the macro call which
                             // *should* contain the name
                             _ => {
-                                let call = call();
-                                let kind = call.kind;
+                                let kind = call_kind();
                                 let range = kind.clone().original_call_range_with_input(db);
                                 //If the focus range is in the attribute/derive body, we
                                 // need to point the call site to the entire body, if not, fall back
@@ -978,7 +977,7 @@ pub(crate) fn orig_range_with_focus_r(
                                 {
                                     range
                                 } else {
-                                    kind.original_call_range(db, call.krate)
+                                    kind.original_call_range(db)
                                 }
                             }
                         },
@@ -1007,14 +1006,11 @@ pub(crate) fn orig_range_with_focus_r(
                         },
                     ),
                     // node is in macro def, just show the focus
-                    _ => {
-                        let call = call();
-                        (
-                            // show the macro call
-                            (call.kind.original_call_range(db, call.krate), None),
-                            Some((focus_range, Some(focus_range))),
-                        )
-                    }
+                    _ => (
+                        // show the macro call
+                        (call_kind().original_call_range(db), None),
+                        Some((focus_range, Some(focus_range))),
+                    ),
                 }
             }
             // lost name? can't happen for single tokens
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index c4dcd588d6934..a53a192997274 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -1124,10 +1124,7 @@ pub(super) struct Foo$0 {
         check_with_scope(
             code,
             Some(&mut |db| {
-                SearchScope::single_file(EditionedFileId::current_edition_guess_origin(
-                    db,
-                    FileId::from_raw(2),
-                ))
+                SearchScope::single_file(EditionedFileId::current_edition(db, FileId::from_raw(2)))
             }),
             expect![[r#"
                 quux Function FileId(0) 19..35 26..30
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index 4b475dac87b59..494701d97def1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -3,13 +3,17 @@ use std::{fmt, sync::OnceLock};
 use arrayvec::ArrayVec;
 use ast::HasName;
 use cfg::{CfgAtom, CfgExpr};
-use hir::{AsAssocItem, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase, sym};
+use hir::{
+    AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, Semantics, Symbol, db::HirDatabase,
+    sym,
+};
 use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
 use ide_db::impl_empty_upmap_from_ra_fixture;
 use ide_db::{
     FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind,
     base_db::RootQueryDb,
     defs::Definition,
+    documentation::docs_from_attrs,
     helpers::visit_file_defs,
     search::{FileReferenceNode, SearchScope},
 };
@@ -319,7 +323,7 @@ pub(crate) fn runnable_fn(
     def: hir::Function,
 ) -> Option {
     let edition = def.krate(sema.db).edition(sema.db);
-    let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db).cfgs(sema.db));
+    let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db));
     let kind = if !under_cfg_test && def.is_main(sema.db) {
         RunnableKind::Bin
     } else {
@@ -354,7 +358,7 @@ pub(crate) fn runnable_fn(
     let file_range = fn_source.syntax().original_file_range_with_macro_call_input(sema.db);
     let update_test = UpdateTest::find_snapshot_macro(sema, file_range);
 
-    let cfg = def.attrs(sema.db).cfgs(sema.db).cloned();
+    let cfg = def.attrs(sema.db).cfg();
     Some(Runnable { use_name_in_title: false, nav, kind, cfg, update_test })
 }
 
@@ -362,8 +366,8 @@ pub(crate) fn runnable_mod(
     sema: &Semantics<'_, RootDatabase>,
     def: hir::Module,
 ) -> Option {
-    let cfg = def.attrs(sema.db).cfgs(sema.db);
-    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) {
+    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
+    {
         return None;
     }
     let path = def
@@ -377,7 +381,8 @@ pub(crate) fn runnable_mod(
         })
         .join("::");
 
-    let cfg = cfg.cloned();
+    let attrs = def.attrs(sema.db);
+    let cfg = attrs.cfg();
     let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site();
 
     let module_source = sema.module_definition_node(def);
@@ -404,10 +409,10 @@ pub(crate) fn runnable_impl(
     let display_target = def.module(sema.db).krate().to_display_target(sema.db);
     let edition = display_target.edition;
     let attrs = def.attrs(sema.db);
-    if !has_runnable_doc_test(sema.db, &attrs) {
+    if !has_runnable_doc_test(&attrs) {
         return None;
     }
-    let cfg = attrs.cfgs(sema.db).cloned();
+    let cfg = attrs.cfg();
     let nav = def.try_to_nav(sema)?.call_site();
     let ty = def.self_ty(sema.db);
     let adt_name = ty.as_adt()?.name(sema.db);
@@ -437,16 +442,8 @@ pub(crate) fn runnable_impl(
     })
 }
 
-fn has_cfg_test(cfg: Option<&CfgExpr>) -> bool {
-    return cfg.is_some_and(has_cfg_test_impl);
-
-    fn has_cfg_test_impl(cfg: &CfgExpr) -> bool {
-        match cfg {
-            CfgExpr::Atom(CfgAtom::Flag(s)) => *s == sym::test,
-            CfgExpr::Any(cfgs) | CfgExpr::All(cfgs) => cfgs.iter().any(has_cfg_test_impl),
-            _ => false,
-        }
-    }
+fn has_cfg_test(attrs: AttrsWithOwner) -> bool {
+    attrs.cfgs().any(|cfg| matches!(&cfg, CfgExpr::Atom(CfgAtom::Flag(s)) if *s == sym::test))
 }
 
 /// Creates a test mod runnable for outline modules at the top of their definition.
@@ -456,8 +453,8 @@ fn runnable_mod_outline_definition(
 ) -> Option {
     def.as_source_file_id(sema.db)?;
 
-    let cfg = def.attrs(sema.db).cfgs(sema.db);
-    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(cfg)) {
+    if !has_test_function_or_multiple_test_submodules(sema, &def, has_cfg_test(def.attrs(sema.db)))
+    {
         return None;
     }
     let path = def
@@ -471,7 +468,8 @@ fn runnable_mod_outline_definition(
         })
         .join("::");
 
-    let cfg = cfg.cloned();
+    let attrs = def.attrs(sema.db);
+    let cfg = attrs.cfg();
 
     let mod_source = sema.module_definition_node(def);
     let mod_syntax = mod_source.file_syntax(sema.db);
@@ -510,7 +508,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
     let display_target = krate
         .unwrap_or_else(|| (*db.all_crates().last().expect("no crate graph present")).into())
         .to_display_target(db);
-    if !has_runnable_doc_test(db, &attrs) {
+    if !has_runnable_doc_test(&attrs) {
         return None;
     }
     let def_name = def.name(db)?;
@@ -556,7 +554,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
         use_name_in_title: false,
         nav,
         kind: RunnableKind::DocTest { test_id },
-        cfg: attrs.cfgs(db).cloned(),
+        cfg: attrs.cfg(),
         update_test: UpdateTest::default(),
     };
     Some(res)
@@ -573,15 +571,15 @@ impl TestAttr {
     }
 }
 
-fn has_runnable_doc_test(db: &RootDatabase, attrs: &hir::AttrsWithOwner) -> bool {
+fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
     const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
     const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
         &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
 
-    attrs.hir_docs(db).is_some_and(|doc| {
+    docs_from_attrs(attrs).is_some_and(|doc| {
         let mut in_code_block = false;
 
-        for line in doc.docs().lines() {
+        for line in doc.lines() {
             if let Some(header) =
                 RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
             {
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
index a8fc57a431b4f..5f7e12cf53f84 100644
--- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -31,7 +31,7 @@ use crate::RootDatabase;
 /// edited.
 #[derive(Debug)]
 pub struct SignatureHelp {
-    pub doc: Option>,
+    pub doc: Option,
     pub signature: String,
     pub active_parameter: Option,
     parameters: Vec,
@@ -174,7 +174,7 @@ fn signature_help_for_call(
     let mut fn_params = None;
     match callable.kind() {
         hir::CallableKind::Function(func) => {
-            res.doc = func.docs(db).map(Documentation::into_owned);
+            res.doc = func.docs(db);
             format_to!(res.signature, "fn {}", func.name(db).display(db, edition));
 
             let generic_params = GenericDef::Function(func)
@@ -196,7 +196,7 @@ fn signature_help_for_call(
             });
         }
         hir::CallableKind::TupleStruct(strukt) => {
-            res.doc = strukt.docs(db).map(Documentation::into_owned);
+            res.doc = strukt.docs(db);
             format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition));
 
             let generic_params = GenericDef::Adt(strukt.into())
@@ -209,7 +209,7 @@ fn signature_help_for_call(
             }
         }
         hir::CallableKind::TupleEnumVariant(variant) => {
-            res.doc = variant.docs(db).map(Documentation::into_owned);
+            res.doc = variant.docs(db);
             format_to!(
                 res.signature,
                 "enum {}",
@@ -314,33 +314,33 @@ fn signature_help_for_generics(
     let db = sema.db;
     match generics_def {
         hir::GenericDef::Function(it) => {
-            res.doc = it.docs(db).map(Documentation::into_owned);
+            res.doc = it.docs(db);
             format_to!(res.signature, "fn {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::Adt(hir::Adt::Enum(it)) => {
-            res.doc = it.docs(db).map(Documentation::into_owned);
+            res.doc = it.docs(db);
             format_to!(res.signature, "enum {}", it.name(db).display(db, edition));
             if let Some(variant) = variant {
                 // In paths, generics of an enum can be specified *after* one of its variants.
                 // eg. `None::`
                 // We'll use the signature of the enum, but include the docs of the variant.
-                res.doc = variant.docs(db).map(Documentation::into_owned);
+                res.doc = variant.docs(db);
             }
         }
         hir::GenericDef::Adt(hir::Adt::Struct(it)) => {
-            res.doc = it.docs(db).map(Documentation::into_owned);
+            res.doc = it.docs(db);
             format_to!(res.signature, "struct {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::Adt(hir::Adt::Union(it)) => {
-            res.doc = it.docs(db).map(Documentation::into_owned);
+            res.doc = it.docs(db);
             format_to!(res.signature, "union {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::Trait(it) => {
-            res.doc = it.docs(db).map(Documentation::into_owned);
+            res.doc = it.docs(db);
             format_to!(res.signature, "trait {}", it.name(db).display(db, edition));
         }
         hir::GenericDef::TypeAlias(it) => {
-            res.doc = it.docs(db).map(Documentation::into_owned);
+            res.doc = it.docs(db);
             format_to!(res.signature, "type {}", it.name(db).display(db, edition));
         }
         // These don't have generic args that can be specified
@@ -495,7 +495,7 @@ fn signature_help_for_tuple_struct_pat(
     let fields: Vec<_> = if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
         let en = variant.parent_enum(db);
 
-        res.doc = en.docs(db).map(Documentation::into_owned);
+        res.doc = en.docs(db);
         format_to!(
             res.signature,
             "enum {}::{} (",
@@ -512,7 +512,7 @@ fn signature_help_for_tuple_struct_pat(
 
         match adt {
             hir::Adt::Struct(it) => {
-                res.doc = it.docs(db).map(Documentation::into_owned);
+                res.doc = it.docs(db);
                 format_to!(res.signature, "struct {} (", it.name(db).display(db, edition));
                 it.fields(db)
             }
@@ -622,7 +622,7 @@ fn signature_help_for_record_<'db>(
         fields = variant.fields(db);
         let en = variant.parent_enum(db);
 
-        res.doc = en.docs(db).map(Documentation::into_owned);
+        res.doc = en.docs(db);
         format_to!(
             res.signature,
             "enum {}::{} {{ ",
@@ -639,12 +639,12 @@ fn signature_help_for_record_<'db>(
         match adt {
             hir::Adt::Struct(it) => {
                 fields = it.fields(db);
-                res.doc = it.docs(db).map(Documentation::into_owned);
+                res.doc = it.docs(db);
                 format_to!(res.signature, "struct {} {{ ", it.name(db).display(db, edition));
             }
             hir::Adt::Union(it) => {
                 fields = it.fields(db);
-                res.doc = it.docs(db).map(Documentation::into_owned);
+                res.doc = it.docs(db);
                 format_to!(res.signature, "union {} {{ ", it.name(db).display(db, edition));
             }
             _ => return None,
@@ -740,12 +740,12 @@ mod tests {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (RootDatabase, FilePosition) {
         let mut database = RootDatabase::default();
-        let change_fixture = ChangeFixture::parse(ra_fixture);
+        let change_fixture = ChangeFixture::parse(&database, ra_fixture);
         database.apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ($0)");
         let offset = range_or_offset.expect_offset();
-        let position = FilePosition { file_id: file_id.file_id(), offset };
+        let position = FilePosition { file_id: file_id.file_id(&database), offset };
         (database, position)
     }
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index ec8292968dbf7..e261928c413f4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -42,8 +42,7 @@ pub struct ReferenceData {
 
 #[derive(Debug)]
 pub struct TokenStaticData {
-    // FIXME: Make this have the lifetime of the database.
-    pub documentation: Option>,
+    pub documentation: Option,
     pub hover: Option,
     pub definition: Option,
     pub references: Vec,
@@ -110,7 +109,7 @@ fn documentation_for_definition(
     sema: &Semantics<'_, RootDatabase>,
     def: Definition,
     scope_node: &SyntaxNode,
-) -> Option> {
+) -> Option {
     let famous_defs = match &def {
         Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())),
         _ => None,
@@ -125,7 +124,6 @@ fn documentation_for_definition(
             })
             .to_display_target(sema.db),
     )
-    .map(Documentation::into_owned)
 }
 
 // FIXME: This is a weird function
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 782a73d20ca3a..66895cb0b053c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -199,7 +199,7 @@ pub(crate) fn highlight(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
 
     // Determine the root based on the given range.
     let (root, range_to_highlight) = {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
index 597550b482cd2..75e46b8ebfdef 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -20,7 +20,7 @@ pub(crate) fn highlight_as_html_with_config(
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     let file = sema.parse(file_id);
     let file = file.syntax();
     fn rainbowify(seed: u64) -> String {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
index 26d2bb5e02884..7955f5ac0de99 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -1,13 +1,16 @@
 //! "Recursive" Syntax highlighting for code in doctests and fixtures.
 
-use hir::{EditionedFileId, HirFileId, InFile, Semantics};
+use std::mem;
+
+use either::Either;
+use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym};
+use ide_db::range_mapper::RangeMapper;
 use ide_db::{
-    SymbolKind, defs::Definition, documentation::Documentation, range_mapper::RangeMapper,
-    rust_doc::is_rust_fence,
+    SymbolKind, defs::Definition, documentation::docs_with_rangemap, rust_doc::is_rust_fence,
 };
 use syntax::{
-    SyntaxNode, TextRange, TextSize,
-    ast::{self, IsString},
+    AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
+    ast::{self, AstNode, IsString, QuoteOffsets},
 };
 
 use crate::{
@@ -93,79 +96,118 @@ pub(super) fn doc_comment(
         None => return,
     };
     let src_file_id: HirFileId = src_file_id.into();
-    let Some(docs) = attributes.hir_docs(sema.db) else { return };
 
     // Extract intra-doc links and emit highlights for them.
-    extract_definitions_from_docs(&Documentation::new_borrowed(docs.docs()))
-        .into_iter()
-        .filter_map(|(range, link, ns)| {
-            docs.find_ast_range(range)
-                .filter(|(mapping, _)| mapping.file_id == src_file_id)
-                .and_then(|(InFile { value: mapped_range, .. }, is_inner)| {
-                    Some(mapped_range)
-                        .zip(resolve_doc_path_for_def(sema.db, def, &link, ns, is_inner))
+    if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) {
+        extract_definitions_from_docs(&docs)
+            .into_iter()
+            .filter_map(|(range, link, ns)| {
+                doc_mapping
+                    .map(range)
+                    .filter(|(mapping, _)| mapping.file_id == src_file_id)
+                    .and_then(|(InFile { value: mapped_range, .. }, attr_id)| {
+                        Some(mapped_range).zip(resolve_doc_path_for_def(
+                            sema.db,
+                            def,
+                            &link,
+                            ns,
+                            attr_id.is_inner_attr(),
+                        ))
+                    })
+            })
+            .for_each(|(range, def)| {
+                hl.add(HlRange {
+                    range,
+                    highlight: module_def_to_hl_tag(def)
+                        | HlMod::Documentation
+                        | HlMod::Injected
+                        | HlMod::IntraDocLink,
+                    binding_hash: None,
                 })
-        })
-        .for_each(|(range, def)| {
-            hl.add(HlRange {
-                range,
-                highlight: module_def_to_hl_tag(def)
-                    | HlMod::Documentation
-                    | HlMod::Injected
-                    | HlMod::IntraDocLink,
-                binding_hash: None,
             })
-        });
+    }
 
     // Extract doc-test sources from the docs and calculate highlighting for them.
 
     let mut inj = RangeMapper::default();
     inj.add_unmapped("fn doctest() {\n");
 
+    let attrs_source_map = attributes.source_map(sema.db);
+
     let mut is_codeblock = false;
     let mut is_doctest = false;
 
-    let mut has_doctests = false;
-
-    let mut docs_offset = TextSize::new(0);
-    for mut line in docs.docs().split('\n') {
-        let mut line_docs_offset = docs_offset;
-        docs_offset += TextSize::of(line) + TextSize::of("\n");
-
-        match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
-            Some(idx) => {
-                is_codeblock = !is_codeblock;
-                // Check whether code is rust by inspecting fence guards
-                let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
-                let is_rust = is_rust_fence(guards);
-                is_doctest = is_codeblock && is_rust;
-                continue;
-            }
-            None if !is_doctest => continue,
-            None => (),
-        }
-
-        // lines marked with `#` should be ignored in output, we skip the `#` char
-        if line.starts_with('#') {
-            line_docs_offset += TextSize::of("#");
-            line = &line["#".len()..];
-        }
+    let mut new_comments = Vec::new();
+    let mut string;
 
-        let Some((InFile { file_id, value: mapped_range }, _)) =
-            docs.find_ast_range(TextRange::at(line_docs_offset, TextSize::of(line)))
-        else {
-            continue;
-        };
+    for attr in attributes.by_key(sym::doc).attrs() {
+        let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
         if file_id != src_file_id {
             continue;
         }
+        let (line, range) = match &src {
+            Either::Left(it) => {
+                string = match find_doc_string_in_attr(attr, it) {
+                    Some(it) => it,
+                    None => continue,
+                };
+                let text = string.text();
+                let text_range = string.syntax().text_range();
+                match string.quote_offsets() {
+                    Some(QuoteOffsets { contents, .. }) => {
+                        (&text[contents - text_range.start()], contents)
+                    }
+                    None => (text, text_range),
+                }
+            }
+            Either::Right(comment) => {
+                let value = comment.prefix().len();
+                let range = comment.syntax().text_range();
+                (
+                    &comment.text()[value..],
+                    TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()),
+                )
+            }
+        };
+
+        let mut range_start = range.start();
+        for line in line.split('\n') {
+            let line_len = TextSize::from(line.len() as u32);
+            let prev_range_start = {
+                let next_range_start = range_start + line_len + TextSize::from(1);
+                mem::replace(&mut range_start, next_range_start)
+            };
+            let mut pos = TextSize::from(0);
+
+            match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
+                Some(idx) => {
+                    is_codeblock = !is_codeblock;
+                    // Check whether code is rust by inspecting fence guards
+                    let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
+                    let is_rust = is_rust_fence(guards);
+                    is_doctest = is_codeblock && is_rust;
+                    continue;
+                }
+                None if !is_doctest => continue,
+                None => (),
+            }
 
-        has_doctests = true;
-        inj.add(line, mapped_range);
-        inj.add_unmapped("\n");
+            // whitespace after comment is ignored
+            if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
+                pos += TextSize::of(ws);
+            }
+            // lines marked with `#` should be ignored in output, we skip the `#` char
+            if line[pos.into()..].starts_with('#') {
+                pos += TextSize::of('#');
+            }
+
+            new_comments.push(TextRange::at(prev_range_start, pos));
+            inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
+            inj.add_unmapped("\n");
+        }
     }
 
-    if !has_doctests {
+    if new_comments.is_empty() {
         return; // no need to run an analysis on an empty file
     }
 
@@ -198,6 +240,37 @@ pub(super) fn doc_comment(
             }
         }
     }
+
+    for range in new_comments {
+        hl.add(HlRange {
+            range,
+            highlight: HlTag::Comment | HlMod::Documentation,
+            binding_hash: None,
+        });
+    }
+}
+
+fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option {
+    match it.expr() {
+        // #[doc = lit]
+        Some(ast::Expr::Literal(lit)) => match lit.kind() {
+            ast::LiteralKind::String(it) => Some(it),
+            _ => None,
+        },
+        // #[cfg_attr(..., doc = "", ...)]
+        None => {
+            // We gotta hunt the string token manually here
+            let text = attr.string_value()?.as_str();
+            // FIXME: We just pick the first string literal that has the same text as the doc attribute
+            // This means technically we might highlight the wrong one
+            it.syntax()
+                .descendants_with_tokens()
+                .filter_map(NodeOrToken::into_token)
+                .filter_map(ast::String::cast)
+                .find(|string| string.text().get(1..string.text().len() - 1) == Some(text))
+        }
+        _ => None,
+    }
 }
 
 fn module_def_to_hl_tag(def: Definition) -> HlTag {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index 53750ae0bac07..d00f279c82995 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -42,21 +42,21 @@
 
 
//! This is a module to test doc injection.
 //! ```
-//! fn test() {}
+//! fn test() {}
 //! ```
 
 //! Syntactic name ref highlighting testing
 //! ```rust
-//! extern crate self;
-//! extern crate other as otter;
-//! extern crate core;
-//! trait T { type Assoc; }
-//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
+//! extern crate self;
+//! extern crate other as otter;
+//! extern crate core;
+//! trait T { type Assoc; }
+//! fn f<Arg>() -> use<Arg> where (): T<Assoc = ()> {}
 //! ```
 mod outline_module;
 
 /// ```
-/// let _ = "early doctests should not go boom";
+/// let _ = "early doctests should not go boom";
 /// ```
 struct Foo {
     bar: bool,
@@ -65,15 +65,15 @@
 /// This is an impl of [`Foo`] with a code block.
 ///
 /// ```
-/// fn foo() {
+/// fn foo() {
 ///
-/// }
+/// }
 /// ```
 impl Foo {
     /// ```
-    /// let _ = "Call me
+    /// let _ = "Call me
     //    KILLER WHALE
-    ///     Ishmael.";
+    ///     Ishmael.";
     /// ```
     pub const bar: bool = true;
 
@@ -82,8 +82,8 @@
     /// # Examples
     ///
     /// ```
-    /// # #![allow(unused_mut)]
-    /// let mut foo: Foo = Foo::new();
+    /// # #![allow(unused_mut)]
+    /// let mut foo: Foo = Foo::new();
     /// ```
     pub const fn new() -> Foo {
         Foo { bar: true }
@@ -94,38 +94,38 @@
     /// # Examples
     ///
     /// ```
-    /// use x::y;
+    /// use x::y;
     ///
-    /// let foo = Foo::new();
+    /// let foo = Foo::new();
     ///
-    /// // calls bar on foo
-    /// assert!(foo.bar());
+    /// // calls bar on foo
+    /// assert!(foo.bar());
     ///
-    /// let bar = foo.bar || Foo::bar;
+    /// let bar = foo.bar || Foo::bar;
     ///
-    /// /* multi-line
-    ///        comment */
+    /// /* multi-line
+    ///        comment */
     ///
-    /// let multi_line_string = "Foo
-    ///   bar\n
-    ///          ";
+    /// let multi_line_string = "Foo
+    ///   bar\n
+    ///          ";
     ///
     /// ```
     ///
     /// ```rust,no_run
-    /// let foobar = Foo::new().bar();
+    /// let foobar = Foo::new().bar();
     /// ```
     ///
     /// ~~~rust,no_run
-    /// // code block with tilde.
-    /// let foobar = Foo::new().bar();
+    /// // code block with tilde.
+    /// let foobar = Foo::new().bar();
     /// ~~~
     ///
     /// ```
-    /// // functions
-    /// fn foo<T, const X: usize>(arg: i32) {
-    ///     let x: T = X;
-    /// }
+    /// // functions
+    /// fn foo<T, const X: usize>(arg: i32) {
+    ///     let x: T = X;
+    /// }
     /// ```
     ///
     /// ```sh
@@ -150,8 +150,8 @@
 }
 
 /// ```
-/// macro_rules! noop { ($expr:expr) => { $expr }}
-/// noop!(1);
+/// macro_rules! noop { ($expr:expr) => { $expr }}
+/// noop!(1);
 /// ```
 macro_rules! noop {
     ($expr:expr) => {
@@ -160,18 +160,18 @@
 }
 
 /// ```rust
-/// let _ = example(&[1, 2, 3]);
+/// let _ = example(&[1, 2, 3]);
 /// ```
 ///
 /// ```
-/// loop {}
+/// loop {}
 #[cfg_attr(not(feature = "false"), doc = "loop {}")]
 #[doc = "loop {}"]
 /// ```
 ///
 #[cfg_attr(feature = "alloc", doc = "```rust")]
 #[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
-/// let _ = example(&alloc::vec![1, 2, 3]);
+/// let _ = example(&alloc::vec![1, 2, 3]);
 /// ```
 pub fn mix_and_match() {}
 
@@ -187,7 +187,7 @@
 /**
     Really, I don't get it
     ```rust
-    let _ = example(&[1, 2, 3]);
+    let _ = example(&[1, 2, 3]);
     ```
     [`block_comments`] tests these without indentation
 */
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index 0381865fed457..ed55ac5bf04b0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -75,10 +75,7 @@ pub(crate) fn on_char_typed(
     // FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
     // causing the editor to feel sluggish!
     let edition = Edition::CURRENT_FIXME;
-    let editioned_file_id_wrapper = EditionedFileId::from_span_guess_origin(
-        db,
-        span::EditionedFileId::new(position.file_id, edition),
-    );
+    let editioned_file_id_wrapper = EditionedFileId::new(db, position.file_id, edition);
     let file = &db.parse(editioned_file_id_wrapper);
     let char_matches_position =
         file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
index 76a2802d294c0..fdc583a15cc71 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
@@ -51,7 +51,7 @@ use ide_db::text_edit::TextEdit;
 // ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
 pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option {
     let editioned_file_id_wrapper =
-        ide_db::base_db::EditionedFileId::current_edition_guess_origin(db, position.file_id);
+        ide_db::base_db::EditionedFileId::current_edition(db, position.file_id);
     let parse = db.parse(editioned_file_id_wrapper);
     let file = parse.tree();
     let token = file.syntax().token_at_offset(position.offset).left_biased()?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
index c9a2f31696f45..2cd751463bdb8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs
@@ -12,6 +12,6 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(db, file_id));
     db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition(db))
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index 54ad9603ba037..de24bc09ff0fa 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -145,9 +145,7 @@ impl flags::AnalysisStats {
                     if !source_root.is_library || self.with_deps {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(
-                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
-                            )
+                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
                             .item_tree_stats()
                             .into();
 
@@ -157,9 +155,7 @@ impl flags::AnalysisStats {
                     } else {
                         let length = db.file_text(file_id).text(db).lines().count();
                         let item_stats = db
-                            .file_item_tree(
-                                EditionedFileId::current_edition_guess_origin(db, file_id).into(),
-                            )
+                            .file_item_tree(EditionedFileId::current_edition(db, file_id).into())
                             .item_tree_stats()
                             .into();
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index 92bb2c1ce4fa1..37f83f6dee678 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -514,12 +514,12 @@ mod test {
 
     fn position(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (AnalysisHost, FilePosition) {
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(ra_fixture);
+        let change_fixture = ChangeFixture::parse(host.raw_database(), ra_fixture);
         host.raw_database_mut().apply_change(change_fixture.change);
         let (file_id, range_or_offset) =
             change_fixture.file_position.expect("expected a marker ()");
         let offset = range_or_offset.expect_offset();
-        let position = FilePosition { file_id: file_id.file_id(), offset };
+        let position = FilePosition { file_id: file_id.file_id(host.raw_database()), offset };
         (host, position)
     }
 
@@ -870,7 +870,7 @@ pub mod example_mod {
         let s = "/// foo\nfn bar() {}";
 
         let mut host = AnalysisHost::default();
-        let change_fixture = ChangeFixture::parse(s);
+        let change_fixture = ChangeFixture::parse(host.raw_database(), s);
         host.raw_database_mut().apply_change(change_fixture.change);
 
         let analysis = host.analysis();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index cc2ab0f07ca0a..e3e3a143de03a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -73,7 +73,7 @@ impl flags::Search {
                 let sr = db.source_root(root).source_root(db);
                 for file_id in sr.iter() {
                     for debug_info in match_finder.debug_where_text_equal(
-                        EditionedFileId::current_edition_guess_origin(db, file_id),
+                        EditionedFileId::current_edition(db, file_id),
                         debug_snippet,
                     ) {
                         println!("{debug_info:#?}");
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
index 2cb0fe9eefadf..0362e13b88b7b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -141,7 +141,7 @@ fn all_unresolved_references(
 ) -> Vec {
     let file_id = sema
         .attach_first_edition(file_id)
-        .unwrap_or_else(|| EditionedFileId::current_edition_guess_origin(sema.db, file_id));
+        .unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
     let file = sema.parse(file_id);
     let root = file.syntax();
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index 5a42cbd933f99..04b20033062eb 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -119,7 +119,7 @@ pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSe
     }
 }
 
-pub(crate) fn documentation(documentation: Documentation<'_>) -> lsp_types::Documentation {
+pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
     let value = format_docs(&documentation);
     let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
     lsp_types::Documentation::MarkupContent(markup_content)
@@ -1970,7 +1970,7 @@ pub(crate) fn markup_content(
         ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
         ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
     };
-    let value = format_docs(&Documentation::new_owned(markup.into()));
+    let value = format_docs(&Documentation::new(markup.into()));
     lsp_types::MarkupContent { kind, value }
 }
 
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
index 2d1955d1f651e..4e525be3fe3c8 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
@@ -1,6 +1,6 @@
 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
 
-use std::{collections::VecDeque, fmt, hash::Hash};
+use std::{fmt, hash::Hash};
 
 use intern::Symbol;
 use rustc_hash::{FxHashMap, FxHashSet};
@@ -102,34 +102,26 @@ where
     SpanData: Copy + fmt::Debug,
     SpanMap: SpanMapper>,
 {
-    let mut c =
-        Converter::new(node, map, Default::default(), Default::default(), span, mode, |_, _| {
-            (true, Vec::new())
-        });
+    let mut c = Converter::new(node, map, Default::default(), Default::default(), span, mode);
     convert_tokens(&mut c)
 }
 
 /// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
 /// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
 /// be injected or hidden from the output.
-pub fn syntax_node_to_token_tree_modified(
+pub fn syntax_node_to_token_tree_modified(
     node: &SyntaxNode,
     map: SpanMap,
     append: FxHashMap>>>,
     remove: FxHashSet,
     call_site: SpanData,
     mode: DocCommentDesugarMode,
-    on_enter: OnEvent,
 ) -> tt::TopSubtree>
 where
     SpanMap: SpanMapper>,
     SpanData: Copy + fmt::Debug,
-    OnEvent: FnMut(
-        &mut PreorderWithTokens,
-        &WalkEvent,
-    ) -> (bool, Vec>>),
 {
-    let mut c = Converter::new(node, map, append, remove, call_site, mode, on_enter);
+    let mut c = Converter::new(node, map, append, remove, call_site, mode);
     convert_tokens(&mut c)
 }
 
@@ -632,9 +624,9 @@ where
     }
 }
 
-struct Converter {
+struct Converter {
     current: Option,
-    current_leaves: VecDeque>,
+    current_leaves: Vec>,
     preorder: PreorderWithTokens,
     range: TextRange,
     punct_offset: Option<(SyntaxToken, TextSize)>,
@@ -644,13 +636,9 @@ struct Converter {
     remove: FxHashSet,
     call_site: S,
     mode: DocCommentDesugarMode,
-    on_event: OnEvent,
 }
 
-impl Converter
-where
-    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec>),
-{
+impl Converter {
     fn new(
         node: &SyntaxNode,
         map: SpanMap,
@@ -658,9 +646,8 @@ where
         remove: FxHashSet,
         call_site: S,
         mode: DocCommentDesugarMode,
-        on_enter: OnEvent,
     ) -> Self {
-        let mut converter = Converter {
+        let mut this = Converter {
             current: None,
             preorder: node.preorder_with_tokens(),
             range: node.text_range(),
@@ -669,21 +656,16 @@ where
             append,
             remove,
             call_site,
-            current_leaves: VecDeque::new(),
+            current_leaves: vec![],
             mode,
-            on_event: on_enter,
         };
-        converter.current = converter.next_token();
-        converter
+        let first = this.next_token();
+        this.current = first;
+        this
     }
 
     fn next_token(&mut self) -> Option {
         while let Some(ev) = self.preorder.next() {
-            let (keep_event, insert_leaves) = (self.on_event)(&mut self.preorder, &ev);
-            self.current_leaves.extend(insert_leaves);
-            if !keep_event {
-                continue;
-            }
             match ev {
                 WalkEvent::Enter(token) => {
                     if self.remove.contains(&token) {
@@ -693,9 +675,10 @@ where
                             }
                             node => {
                                 self.preorder.skip_subtree();
-                                if let Some(v) = self.append.remove(&node) {
+                                if let Some(mut v) = self.append.remove(&node) {
+                                    v.reverse();
                                     self.current_leaves.extend(v);
-                                    continue;
+                                    return None;
                                 }
                             }
                         }
@@ -704,9 +687,10 @@ where
                     }
                 }
                 WalkEvent::Leave(ele) => {
-                    if let Some(v) = self.append.remove(&ele) {
+                    if let Some(mut v) = self.append.remove(&ele) {
+                        v.reverse();
                         self.current_leaves.extend(v);
-                        continue;
+                        return None;
                     }
                 }
             }
@@ -731,8 +715,8 @@ impl SynToken {
     }
 }
 
-impl SrcToken, S> for SynToken {
-    fn kind(&self, _ctx: &Converter) -> SyntaxKind {
+impl SrcToken, S> for SynToken {
+    fn kind(&self, _ctx: &Converter) -> SyntaxKind {
         match self {
             SynToken::Ordinary(token) => token.kind(),
             SynToken::Punct { token, offset: i } => {
@@ -744,14 +728,14 @@ impl SrcToken, S> for SynTok
             }
         }
     }
-    fn to_char(&self, _ctx: &Converter) -> Option {
+    fn to_char(&self, _ctx: &Converter) -> Option {
         match self {
             SynToken::Ordinary(_) => None,
             SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
             SynToken::Leaf(_) => None,
         }
     }
-    fn to_text(&self, _ctx: &Converter) -> SmolStr {
+    fn to_text(&self, _ctx: &Converter) -> SmolStr {
         match self {
             SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
             SynToken::Leaf(_) => {
@@ -768,11 +752,10 @@ impl SrcToken, S> for SynTok
     }
 }
 
-impl TokenConverter for Converter
+impl TokenConverter for Converter
 where
     S: Copy,
     SpanMap: SpanMapper,
-    OnEvent: FnMut(&mut PreorderWithTokens, &WalkEvent) -> (bool, Vec>),
 {
     type Token = SynToken;
     fn convert_doc_comment(
@@ -798,7 +781,10 @@ where
             ));
         }
 
-        if let Some(leaf) = self.current_leaves.pop_front() {
+        if let Some(leaf) = self.current_leaves.pop() {
+            if self.current_leaves.is_empty() {
+                self.current = self.next_token();
+            }
             return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
         }
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
index 5d67fd4491755..aea99a4389b9b 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -26,8 +26,7 @@ pub use self::{
     generated::{nodes::*, tokens::*},
     node_ext::{
         AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind,
-        SlicePatComponents, StructKind, TokenTreeChildren, TypeBoundKind, TypeOrConstParam,
-        VisibilityKind,
+        SlicePatComponents, StructKind, TypeBoundKind, TypeOrConstParam, VisibilityKind,
     },
     operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
     token_ext::{
@@ -36,7 +35,6 @@ pub use self::{
     traits::{
         AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericArgs,
         HasGenericParams, HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility,
-        attrs_including_inner,
     },
 };
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index 901d17bb14911..af741d100f680 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -10,7 +10,7 @@ use parser::SyntaxKind;
 use rowan::{GreenNodeData, GreenTokenData};
 
 use crate::{
-    NodeOrToken, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxToken, T, TokenText,
+    NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, T, TokenText,
     ast::{
         self, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName,
         HasTypeBounds, SyntaxNode, support,
@@ -1114,39 +1114,3 @@ impl ast::OrPat {
             .filter(|it| it.kind() == T![|])
     }
 }
-
-/// An iterator over the elements in an [`ast::TokenTree`].
-///
-/// Does not yield trivia or the delimiters.
-#[derive(Clone)]
-pub struct TokenTreeChildren {
-    iter: SyntaxElementChildren,
-}
-
-impl TokenTreeChildren {
-    #[inline]
-    pub fn new(tt: &ast::TokenTree) -> Self {
-        let mut iter = tt.syntax.children_with_tokens();
-        iter.next(); // Bump the opening delimiter.
-        Self { iter }
-    }
-}
-
-impl Iterator for TokenTreeChildren {
-    type Item = NodeOrToken;
-
-    #[inline]
-    fn next(&mut self) -> Option {
-        self.iter.find_map(|item| match item {
-            NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
-            NodeOrToken::Token(token) => {
-                let kind = token.kind();
-                (!matches!(
-                    kind,
-                    SyntaxKind::WHITESPACE | SyntaxKind::COMMENT | T![')'] | T![']'] | T!['}']
-                ))
-                .then_some(NodeOrToken::Token(token))
-            }
-        })
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index 83ab87c1c687e..e1a9f3ac03418 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -40,8 +40,8 @@ impl ast::Comment {
     }
 
     /// Returns the textual content of a doc comment node as a single string with prefix and suffix
-    /// removed, plus the offset of the returned string from the beginning of the comment.
-    pub fn doc_comment(&self) -> Option<(&str, TextSize)> {
+    /// removed.
+    pub fn doc_comment(&self) -> Option<&str> {
         let kind = self.kind();
         match kind {
             CommentKind { shape, doc: Some(_) } => {
@@ -52,7 +52,7 @@ impl ast::Comment {
                 } else {
                     text
                 };
-                Some((text, TextSize::of(prefix)))
+                Some(text)
             }
             _ => None,
         }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
index 2f4109a2c9760..5290f32dd27db 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -4,9 +4,8 @@
 use either::Either;
 
 use crate::{
-    SyntaxElement, SyntaxNode, SyntaxToken, T,
+    SyntaxElement, SyntaxToken, T,
     ast::{self, AstChildren, AstNode, AstToken, support},
-    match_ast,
     syntax_node::SyntaxElementChildren,
 };
 
@@ -77,44 +76,34 @@ pub trait HasAttrs: AstNode {
         self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
     }
 
-    /// This may return the same node as called with (with `SourceFile`). The caller has the responsibility
-    /// to avoid duplicate attributes.
-    fn inner_attributes_node(&self) -> Option {
-        let syntax = self.syntax();
-        Some(match_ast! {
-            match syntax {
-                // A `SourceFile` contains the inner attributes of itself.
-                ast::SourceFile(_) => syntax.clone(),
-                ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
-                ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
-                ast::MatchExpr(it) => it.match_arm_list()?.syntax().clone(),
-                ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
-                ast::Trait(it) => it.assoc_item_list()?.syntax().clone(),
-                ast::Module(it) => it.item_list()?.syntax().clone(),
-                ast::BlockExpr(it) => {
-                    if !it.may_carry_attributes() {
-                        return None;
-                    }
-                    syntax.clone()
-                },
-                _ => return None,
-            }
-        })
+    /// Returns all attributes of this node, including inner attributes that may not be directly under this node
+    /// but under a child.
+    fn attrs_including_inner(self) -> impl Iterator
+    where
+        Self: Sized,
+    {
+        let inner_attrs_node = if let Some(it) =
+            support::child::(self.syntax()).and_then(|it| it.stmt_list())
+        {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else if let Some(it) = support::child::(self.syntax()) {
+            Some(it.syntax)
+        } else {
+            None
+        };
+
+        self.attrs().chain(inner_attrs_node.into_iter().flat_map(|it| support::children(&it)))
     }
 }
 
-/// Returns all attributes of this node, including inner attributes that may not be directly under this node
-/// but under a child.
-pub fn attrs_including_inner(owner: &dyn HasAttrs) -> impl Iterator + Clone {
-    owner.attrs().filter(|attr| attr.kind().is_outer()).chain(
-        owner
-            .inner_attributes_node()
-            .into_iter()
-            .flat_map(|node| support::children::(&node))
-            .filter(|attr| attr.kind().is_inner()),
-    )
-}
-
 pub trait HasDocComments: HasAttrs {
     fn doc_comments(&self) -> DocCommentIter {
         DocCommentIter { iter: self.syntax().children_with_tokens() }
@@ -129,7 +118,7 @@ impl DocCommentIter {
     #[cfg(test)]
     pub fn doc_comment_text(self) -> Option {
         let docs = itertools::Itertools::join(
-            &mut self.filter_map(|comment| comment.doc_comment().map(|it| it.0.to_owned())),
+            &mut self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)),
             "\n",
         );
         if docs.is_empty() { None } else { Some(docs) }
@@ -162,7 +151,7 @@ impl AttrDocCommentIter {
 impl Iterator for AttrDocCommentIter {
     type Item = Either;
     fn next(&mut self) -> Option {
-        self.iter.find_map(|el| match el {
+        self.iter.by_ref().find_map(|el| match el {
             SyntaxElement::Node(node) => ast::Attr::cast(node).map(Either::Left),
             SyntaxElement::Token(tok) => {
                 ast::Comment::cast(tok).filter(ast::Comment::is_doc).map(Either::Right)
diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
index 2b05add55216d..aefe81f83e294 100644
--- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
@@ -5,7 +5,7 @@ use base_db::target::TargetData;
 use base_db::{
     Crate, CrateDisplayName, CrateGraphBuilder, CrateName, CrateOrigin, CrateWorkspaceData,
     DependencyBuilder, Env, FileChange, FileSet, FxIndexMap, LangCrateOrigin, SourceDatabase,
-    SourceRoot, Version, VfsPath,
+    SourceRoot, Version, VfsPath, salsa,
 };
 use cfg::CfgOptions;
 use hir_expand::{
@@ -37,11 +37,10 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
-        let file = EditionedFileId::from_span_guess_origin(&db, fixture.files[0]);
-        (db, file)
+        (db, fixture.files[0])
     }
 
     #[track_caller]
@@ -49,21 +48,16 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, Vec) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
-        let files = fixture
-            .files
-            .into_iter()
-            .map(|file| EditionedFileId::from_span_guess_origin(&db, file))
-            .collect();
-        (db, files)
+        (db, fixture.files)
     }
 
     #[track_caller]
     fn with_files(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Self {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -75,8 +69,12 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         proc_macros: Vec<(String, ProcMacro)>,
     ) -> Self {
         let mut db = Self::default();
-        let fixture =
-            ChangeFixture::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, proc_macros);
+        let fixture = ChangeFixture::parse_with_proc_macros(
+            &db,
+            ra_fixture,
+            MiniCore::RAW_SOURCE,
+            proc_macros,
+        );
         fixture.change.apply(&mut db);
         assert!(fixture.file_position.is_none());
         db
@@ -101,13 +99,12 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
     ) -> (Self, EditionedFileId, RangeOrOffset) {
         let mut db = Self::default();
-        let fixture = ChangeFixture::parse(ra_fixture);
+        let fixture = ChangeFixture::parse(&db, ra_fixture);
         fixture.change.apply(&mut db);
 
         let (file_id, range_or_offset) = fixture
             .file_position
             .expect("Could not find file position in fixture. Did you forget to add an `$0`?");
-        let file_id = EditionedFileId::from_span_guess_origin(&db, file_id);
         (db, file_id, range_or_offset)
     }
 
@@ -119,9 +116,9 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
 impl WithFixture for DB {}
 
 pub struct ChangeFixture {
-    pub file_position: Option<(span::EditionedFileId, RangeOrOffset)>,
+    pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
     pub file_lines: Vec,
-    pub files: Vec,
+    pub files: Vec,
     pub change: ChangeWithProcMacros,
     pub sysroot_files: Vec,
 }
@@ -129,11 +126,15 @@ pub struct ChangeFixture {
 const SOURCE_ROOT_PREFIX: &str = "/";
 
 impl ChangeFixture {
-    pub fn parse(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> ChangeFixture {
-        Self::parse_with_proc_macros(ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
+    pub fn parse(
+        db: &dyn salsa::Database,
+        #[rust_analyzer::rust_fixture] ra_fixture: &str,
+    ) -> ChangeFixture {
+        Self::parse_with_proc_macros(db, ra_fixture, MiniCore::RAW_SOURCE, Vec::new())
     }
 
     pub fn parse_with_proc_macros(
+        db: &dyn salsa::Database,
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
         minicore_raw: &str,
         mut proc_macro_defs: Vec<(String, ProcMacro)>,
@@ -201,7 +202,7 @@ impl ChangeFixture {
             let meta = FileMeta::from_fixture(entry, current_source_root_kind);
             if let Some(range_or_offset) = range_or_offset {
                 file_position =
-                    Some((span::EditionedFileId::new(file_id, meta.edition), range_or_offset));
+                    Some((EditionedFileId::new(db, file_id, meta.edition), range_or_offset));
             }
 
             assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
@@ -258,7 +259,7 @@ impl ChangeFixture {
             source_change.change_file(file_id, Some(text));
             let path = VfsPath::new_virtual_path(meta.path);
             file_set.insert(file_id, path);
-            files.push(span::EditionedFileId::new(file_id, meta.edition));
+            files.push(EditionedFileId::new(db, file_id, meta.edition));
             file_id = FileId::from_raw(file_id.index() + 1);
         }
 

From 40e36a0687b40d9e1790bc6fa20ed7320ad2daac Mon Sep 17 00:00:00 2001
From: Shoyu Vanilla 
Date: Wed, 22 Oct 2025 01:58:02 +0900
Subject: [PATCH 52/76] Bump rustc deps

---
 src/tools/rust-analyzer/Cargo.lock            |  44 +++----
 src/tools/rust-analyzer/Cargo.toml            |  16 +--
 .../crates/hir-ty/src/display.rs              |  17 ++-
 .../rust-analyzer/crates/hir-ty/src/lib.rs    |  12 +-
 .../rust-analyzer/crates/hir-ty/src/lower.rs  |  16 +--
 .../crates/hir-ty/src/next_solver/consts.rs   |  50 ++++----
 .../crates/hir-ty/src/next_solver/fold.rs     |  19 ++-
 .../infer/canonical/canonicalizer.rs          |  51 +++-----
 .../hir-ty/src/next_solver/infer/select.rs    |   8 +-
 .../crates/hir-ty/src/next_solver/interner.rs |  25 ++--
 .../crates/hir-ty/src/next_solver/opaques.rs  |  79 +------------
 .../crates/hir-ty/src/next_solver/region.rs   |  30 ++++-
 .../crates/hir-ty/src/next_solver/ty.rs       | 109 ++++++++----------
 13 files changed, 220 insertions(+), 256 deletions(-)

diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index ea8d1a781dccb..b557b10e5c77f 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -1824,9 +1824,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_abi"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c063a7fef3c49d03837ee9a5d988aad83630c3460b03b32355c279d3fafa7d07"
+checksum = "a4ce5c9ea794353e02beae390c4674f74ffb23a2ad9de763469fdcef5c1026ef"
 dependencies = [
  "bitflags 2.9.4",
  "ra-ap-rustc_hashes",
@@ -1836,24 +1836,24 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_ast_ir"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a210dbd77e794b33ff17d2d15750dee44eeabd1330685d69a6bad272d515892a"
+checksum = "1696b77af9bbfe1fcc7a09c907561061c6ef4c8bd6d5f1675b927bc62d349103"
 
 [[package]]
 name = "ra-ap-rustc_hashes"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dea031ea45bb92cd346ed222b35c812e355f304183096ee91bb437b3813c6348"
+checksum = "c055d8b0d8a592d8cf9547495189f52c1ee5c691d28df1628253a816214e8521"
 dependencies = [
  "rustc-stable-hash",
 ]
 
 [[package]]
 name = "ra-ap-rustc_index"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db0114f842b20cba9beb2d9002ca31ae706b47f28ba2d6a49cbf9fd65fa72b9d"
+checksum = "a08a03e3d4a452144b68f48130eda3a2894d4d79e99ddb44bdb4e0ab8c384e10"
 dependencies = [
  "ra-ap-rustc_index_macros",
  "smallvec",
@@ -1861,9 +1861,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_index_macros"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc738a5bb06fb3893725fbeb3640ff1822bb2aae3f416c4a49f0a706ba89d1cc"
+checksum = "a1e0446b4d65a8ce19d8fd12826c4bf2365ffa4b8fe0ee94daf5968fe36e920c"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1872,9 +1872,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_lexer"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "31c35b3d812cfc101d3f534640c13f24c0ec50ee2249685e4c20b2868609c9ee"
+checksum = "ac80365383a3c749f38af567fdcfaeff3fa6ea5df3846852abbce73e943921b9"
 dependencies = [
  "memchr",
  "unicode-properties",
@@ -1883,9 +1883,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_next_trait_solver"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e7b0fa6fb8e0717ebd0836f8de4a6efc954fca1a8652980fd2584dbe448c7d95"
+checksum = "a39b419d2d6f7fdec7e0981b7fb7d5beb5dda7140064f1199704ec9dadbb6f73"
 dependencies = [
  "derive-where",
  "ra-ap-rustc_index",
@@ -1896,9 +1896,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_parse_format"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "33d01bad23470cc749ef607476890aabcc8993ca3ef87d4241d0f6a08c6f9402"
+checksum = "b743b0c8f795842e41b1720bbc5af6e896129fb9acf04e9785774bfb0dc5947c"
 dependencies = [
  "ra-ap-rustc_lexer",
  "rustc-literal-escaper 0.0.5",
@@ -1906,9 +1906,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_pattern_analysis"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a181cf7943dc16e888046584d6172be95818811b25d695dbacbb4dd71973cc3"
+checksum = "cf944dce80137195528f89a576f70153c2060a6f8ca49c3fa9f55f9da14ab937"
 dependencies = [
  "ra-ap-rustc_index",
  "rustc-hash 2.1.1",
@@ -1919,9 +1919,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_type_ir"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87c99f33be18d9e50cefef5442822da1f0b416e9a17a483879a9704e08a6a6e6"
+checksum = "1bfe2722b20bc889a9d7711bd3a1f4f7b082940491241615aa643c17e0deffec"
 dependencies = [
  "arrayvec",
  "bitflags 2.9.4",
@@ -1939,9 +1939,9 @@ dependencies = [
 
 [[package]]
 name = "ra-ap-rustc_type_ir_macros"
-version = "0.133.0"
+version = "0.137.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77b162d65e058abfc058e6b67ae68156cc282fbd78da148c1a7ec77b4230661e"
+checksum = "6fad1527df26aaa77367393fae86f42818b33e02b3737a19f3846d1c7671e7f9"
 dependencies = [
  "proc-macro2",
  "quote",
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 8a108974681a1..ecb2686a2277e 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -86,14 +86,14 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
 vfs = { path = "./crates/vfs", version = "0.0.0" }
 edition = { path = "./crates/edition", version = "0.0.0" }
 
-ra-ap-rustc_lexer = { version = "0.133", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.133", default-features = false }
-ra-ap-rustc_index = { version = "0.133", default-features = false }
-ra-ap-rustc_abi = { version = "0.133", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.133", default-features = false }
-ra-ap-rustc_ast_ir = { version = "0.133", default-features = false }
-ra-ap-rustc_type_ir = { version = "0.133", default-features = false }
-ra-ap-rustc_next_trait_solver = { version = "0.133", default-features = false }
+ra-ap-rustc_lexer = { version = "0.137", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.137", default-features = false }
+ra-ap-rustc_index = { version = "0.137", default-features = false }
+ra-ap-rustc_abi = { version = "0.137", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.137", default-features = false }
+ra-ap-rustc_ast_ir = { version = "0.137", default-features = false }
+ra-ap-rustc_type_ir = { version = "0.137", default-features = false }
+ra-ap-rustc_next_trait_solver = { version = "0.137", default-features = false }
 
 # local crates that aren't published to crates.io. These should not have versions.
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index 2b92408f0f6b8..e807ce62e8cfd 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -38,7 +38,7 @@ use rustc_apfloat::{
 use rustc_ast_ir::FloatTy;
 use rustc_hash::FxHashSet;
 use rustc_type_ir::{
-    AliasTyKind, CoroutineArgsParts, RegionKind, Upcast,
+    AliasTyKind, BoundVarIndexKind, CoroutineArgsParts, RegionKind, Upcast,
     inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, Tys as _},
 };
 use smallvec::SmallVec;
@@ -682,9 +682,12 @@ impl<'db> HirDisplay<'db> for Const<'db> {
     fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result<(), HirDisplayError> {
         match self.kind() {
             ConstKind::Placeholder(_) => write!(f, ""),
-            ConstKind::Bound(db, bound_const) => {
+            ConstKind::Bound(BoundVarIndexKind::Bound(db), bound_const) => {
                 write!(f, "?{}.{}", db.as_u32(), bound_const.var.as_u32())
             }
+            ConstKind::Bound(BoundVarIndexKind::Canonical, bound_const) => {
+                write!(f, "?c.{}", bound_const.var.as_u32())
+            }
             ConstKind::Infer(..) => write!(f, "#c#"),
             ConstKind::Param(param) => {
                 let generics = generics(f.db, param.id.parent());
@@ -1525,9 +1528,12 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                     }
                 }
             }
-            TyKind::Bound(debruijn, ty) => {
+            TyKind::Bound(BoundVarIndexKind::Bound(debruijn), ty) => {
                 write!(f, "?{}.{}", debruijn.as_usize(), ty.var.as_usize())?
             }
+            TyKind::Bound(BoundVarIndexKind::Canonical, ty) => {
+                write!(f, "?c.{}", ty.var.as_usize())?
+            }
             TyKind::Dynamic(bounds, region) => {
                 // We want to put auto traits after principal traits, regardless of their written order.
                 let mut bounds_to_display = SmallVec::<[_; 4]>::new();
@@ -1955,9 +1961,12 @@ impl<'db> HirDisplay<'db> for Region<'db> {
                 write!(f, "{}", param_data.name.display(f.db, f.edition()))?;
                 Ok(())
             }
-            RegionKind::ReBound(db, idx) => {
+            RegionKind::ReBound(BoundVarIndexKind::Bound(db), idx) => {
                 write!(f, "?{}.{}", db.as_u32(), idx.var.as_u32())
             }
+            RegionKind::ReBound(BoundVarIndexKind::Canonical, idx) => {
+                write!(f, "?c.{}", idx.var.as_u32())
+            }
             RegionKind::ReVar(_) => write!(f, "_"),
             RegionKind::ReStatic => write!(f, "'static"),
             RegionKind::ReError(..) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 536c81ab03b2c..094a3e5326e95 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -61,7 +61,7 @@ use la_arena::Idx;
 use mir::{MirEvalError, VTableMap};
 use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
 use rustc_type_ir::{
-    TypeSuperVisitable, TypeVisitableExt, UpcastFrom,
+    BoundVarIndexKind, TypeSuperVisitable, TypeVisitableExt, UpcastFrom,
     inherent::{IntoKind, SliceLike, Ty as _},
 };
 use syntax::ast::{ConstArg, make};
@@ -405,7 +405,7 @@ where
                     ))
                 }
                 TyKind::Infer(_) => error(),
-                TyKind::Bound(index, _) if index > self.binder => error(),
+                TyKind::Bound(BoundVarIndexKind::Bound(index), _) if index > self.binder => error(),
                 _ => t.try_super_fold_with(self),
             }
         }
@@ -432,7 +432,9 @@ where
                     Ok(Const::new_bound(self.interner, self.binder, BoundConst { var }))
                 }
                 ConstKind::Infer(_) => error(),
-                ConstKind::Bound(index, _) if index > self.binder => error(),
+                ConstKind::Bound(BoundVarIndexKind::Bound(index), _) if index > self.binder => {
+                    error()
+                }
                 _ => ct.try_super_fold_with(self),
             }
         }
@@ -454,7 +456,9 @@ where
                     ))
                 }
                 RegionKind::ReVar(_) => error(),
-                RegionKind::ReBound(index, _) if index > self.binder => error(),
+                RegionKind::ReBound(BoundVarIndexKind::Bound(index), _) if index > self.binder => {
+                    error()
+                }
                 _ => Ok(region),
             }
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 42f7290962bd4..6f7ca4829d52c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -42,8 +42,8 @@ use rustc_ast_ir::Mutability;
 use rustc_hash::FxHashSet;
 use rustc_pattern_analysis::Captures;
 use rustc_type_ir::{
-    AliasTyKind, ConstKind, DebruijnIndex, ExistentialPredicate, ExistentialProjection,
-    ExistentialTraitRef, FnSig, OutlivesPredicate,
+    AliasTyKind, BoundVarIndexKind, ConstKind, DebruijnIndex, ExistentialPredicate,
+    ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate,
     TyKind::{self},
     TypeVisitableExt,
     inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _},
@@ -858,11 +858,13 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
         if let Some(bounds) = bounds {
             let region = match lifetime {
                 Some(it) => match it.kind() {
-                    rustc_type_ir::RegionKind::ReBound(db, var) => Region::new_bound(
-                        self.interner,
-                        db.shifted_out_to_binder(DebruijnIndex::from_u32(2)),
-                        var,
-                    ),
+                    rustc_type_ir::RegionKind::ReBound(BoundVarIndexKind::Bound(db), var) => {
+                        Region::new_bound(
+                            self.interner,
+                            db.shifted_out_to_binder(DebruijnIndex::from_u32(2)),
+                            var,
+                        )
+                    }
                     _ => it,
                 },
                 None => Region::new_static(self.interner),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs
index c28af948bfc83..926dbdc03d037 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/consts.rs
@@ -6,8 +6,9 @@ use hir_def::ConstParamId;
 use macros::{TypeFoldable, TypeVisitable};
 use rustc_ast_ir::visit::VisitorResult;
 use rustc_type_ir::{
-    BoundVar, DebruijnIndex, FlagComputation, Flags, TypeFoldable, TypeSuperFoldable,
-    TypeSuperVisitable, TypeVisitable, TypeVisitableExt, WithCachedTypeInfo,
+    BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags, InferConst,
+    TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
+    WithCachedTypeInfo,
     inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike},
     relate::Relate,
 };
@@ -49,11 +50,11 @@ impl<'db> Const<'db> {
     }
 
     pub fn error(interner: DbInterner<'db>) -> Self {
-        Const::new(interner, rustc_type_ir::ConstKind::Error(ErrorGuaranteed))
+        Const::new(interner, ConstKind::Error(ErrorGuaranteed))
     }
 
     pub fn new_param(interner: DbInterner<'db>, param: ParamConst) -> Self {
-        Const::new(interner, rustc_type_ir::ConstKind::Param(param))
+        Const::new(interner, ConstKind::Param(param))
     }
 
     pub fn new_placeholder(interner: DbInterner<'db>, placeholder: PlaceholderConst) -> Self {
@@ -61,7 +62,7 @@ impl<'db> Const<'db> {
     }
 
     pub fn new_bound(interner: DbInterner<'db>, index: DebruijnIndex, bound: BoundConst) -> Self {
-        Const::new(interner, ConstKind::Bound(index, bound))
+        Const::new(interner, ConstKind::Bound(BoundVarIndexKind::Bound(index), bound))
     }
 
     pub fn new_valtree(
@@ -340,28 +341,34 @@ impl<'db> Flags for Const<'db> {
 }
 
 impl<'db> rustc_type_ir::inherent::Const> for Const<'db> {
-    fn new_infer(interner: DbInterner<'db>, var: rustc_type_ir::InferConst) -> Self {
+    fn new_infer(interner: DbInterner<'db>, var: InferConst) -> Self {
         Const::new(interner, ConstKind::Infer(var))
     }
 
-    fn new_var(interner: DbInterner<'db>, var: rustc_type_ir::ConstVid) -> Self {
-        Const::new(interner, ConstKind::Infer(rustc_type_ir::InferConst::Var(var)))
+    fn new_var(interner: DbInterner<'db>, var: ConstVid) -> Self {
+        Const::new(interner, ConstKind::Infer(InferConst::Var(var)))
     }
 
-    fn new_bound(
-        interner: DbInterner<'db>,
-        debruijn: rustc_type_ir::DebruijnIndex,
-        var: BoundConst,
-    ) -> Self {
-        Const::new(interner, ConstKind::Bound(debruijn, var))
+    fn new_bound(interner: DbInterner<'db>, debruijn: DebruijnIndex, var: BoundConst) -> Self {
+        Const::new(interner, ConstKind::Bound(BoundVarIndexKind::Bound(debruijn), var))
+    }
+
+    fn new_anon_bound(interner: DbInterner<'db>, debruijn: DebruijnIndex, var: BoundVar) -> Self {
+        Const::new(
+            interner,
+            ConstKind::Bound(BoundVarIndexKind::Bound(debruijn), BoundConst { var }),
+        )
     }
 
-    fn new_anon_bound(
+    fn new_canonical_bound(interner: DbInterner<'db>, var: BoundVar) -> Self {
+        Const::new(interner, ConstKind::Bound(BoundVarIndexKind::Canonical, BoundConst { var }))
+    }
+
+    fn new_placeholder(
         interner: DbInterner<'db>,
-        debruijn: rustc_type_ir::DebruijnIndex,
-        var: rustc_type_ir::BoundVar,
+        param:  as rustc_type_ir::Interner>::PlaceholderConst,
     ) -> Self {
-        Const::new(interner, ConstKind::Bound(debruijn, BoundConst { var }))
+        Const::new(interner, ConstKind::Placeholder(param))
     }
 
     fn new_unevaluated(
@@ -378,13 +385,6 @@ impl<'db> rustc_type_ir::inherent::Const> for Const<'db> {
     fn new_error(interner: DbInterner<'db>, guar: ErrorGuaranteed) -> Self {
         Const::new(interner, ConstKind::Error(guar))
     }
-
-    fn new_placeholder(
-        interner: DbInterner<'db>,
-        param:  as rustc_type_ir::Interner>::PlaceholderConst,
-    ) -> Self {
-        Const::new(interner, ConstKind::Placeholder(param))
-    }
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs
index 588d42857493d..f776b6ecfc438 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/fold.rs
@@ -1,8 +1,8 @@
 //! Fold impls for the next-trait-solver.
 
 use rustc_type_ir::{
-    DebruijnIndex, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt,
-    inherent::IntoKind,
+    BoundVarIndexKind, DebruijnIndex, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable,
+    TypeVisitableExt, inherent::IntoKind,
 };
 
 use crate::next_solver::BoundConst;
@@ -79,7 +79,9 @@ where
 
     fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
         match t.kind() {
-            TyKind::Bound(debruijn, bound_ty) if debruijn == self.current_index => {
+            TyKind::Bound(BoundVarIndexKind::Bound(debruijn), bound_ty)
+                if debruijn == self.current_index =>
+            {
                 let ty = self.delegate.replace_ty(bound_ty);
                 debug_assert!(!ty.has_vars_bound_above(DebruijnIndex::ZERO));
                 rustc_type_ir::shift_vars(self.interner, ty, self.current_index.as_u32())
@@ -96,9 +98,12 @@ where
 
     fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
         match r.kind() {
-            RegionKind::ReBound(debruijn, br) if debruijn == self.current_index => {
+            RegionKind::ReBound(BoundVarIndexKind::Bound(debruijn), br)
+                if debruijn == self.current_index =>
+            {
                 let region = self.delegate.replace_region(br);
-                if let RegionKind::ReBound(debruijn1, br) = region.kind() {
+                if let RegionKind::ReBound(BoundVarIndexKind::Bound(debruijn1), br) = region.kind()
+                {
                     // If the callback returns a bound region,
                     // that region should always use the INNERMOST
                     // debruijn index. Then we adjust it to the
@@ -115,7 +120,9 @@ where
 
     fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> {
         match ct.kind() {
-            ConstKind::Bound(debruijn, bound_const) if debruijn == self.current_index => {
+            ConstKind::Bound(BoundVarIndexKind::Bound(debruijn), bound_const)
+                if debruijn == self.current_index =>
+            {
                 let ct = self.delegate.replace_const(bound_const);
                 debug_assert!(!ct.has_vars_bound_above(DebruijnIndex::ZERO));
                 rustc_type_ir::shift_vars(self.interner, ct, self.current_index.as_u32())
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs
index e6a818fdf3bc3..7995545b0eed5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs
@@ -8,19 +8,18 @@
 use rustc_hash::FxHashMap;
 use rustc_index::Idx;
 use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar};
-use rustc_type_ir::inherent::{Const as _, IntoKind as _, SliceLike, Ty as _};
+use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _};
 use rustc_type_ir::{
-    BoundVar, CanonicalQueryInput, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags,
-    TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
+    BoundVar, BoundVarIndexKind, CanonicalQueryInput, DebruijnIndex, Flags, InferConst, RegionKind,
+    TyVid, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
 };
 use smallvec::SmallVec;
 use tracing::debug;
 
 use crate::next_solver::infer::InferCtxt;
 use crate::next_solver::{
-    Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, Canonical, CanonicalVarKind,
-    CanonicalVars, Const, ConstKind, DbInterner, GenericArg, ParamEnvAnd, Placeholder, Region, Ty,
-    TyKind,
+    Binder, Canonical, CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, GenericArg,
+    ParamEnvAnd, Placeholder, Region, Ty, TyKind,
 };
 
 /// When we canonicalize a value to form a query, we wind up replacing
@@ -345,12 +344,9 @@ impl<'cx, 'db> TypeFolder> for Canonicalizer<'cx, 'db> {
 
     fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
         match r.kind() {
-            RegionKind::ReBound(index, ..) => {
-                if index >= self.binder_index {
-                    panic!("escaping late-bound region during canonicalization");
-                } else {
-                    r
-                }
+            RegionKind::ReBound(BoundVarIndexKind::Bound(..), ..) => r,
+            RegionKind::ReBound(BoundVarIndexKind::Canonical, ..) => {
+                panic!("canonicalized bound var found during canonicalization");
             }
 
             RegionKind::ReStatic
@@ -427,12 +423,9 @@ impl<'cx, 'db> TypeFolder> for Canonicalizer<'cx, 'db> {
                 self.canonicalize_ty_var(CanonicalVarKind::PlaceholderTy(placeholder), t)
             }
 
-            TyKind::Bound(debruijn, _) => {
-                if debruijn >= self.binder_index {
-                    panic!("escaping bound type during canonicalization")
-                } else {
-                    t
-                }
+            TyKind::Bound(BoundVarIndexKind::Bound(..), _) => t,
+            TyKind::Bound(BoundVarIndexKind::Canonical, ..) => {
+                panic!("canonicalized bound var found during canonicalization");
             }
 
             TyKind::Closure(..)
@@ -503,12 +496,11 @@ impl<'cx, 'db> TypeFolder> for Canonicalizer<'cx, 'db> {
             ConstKind::Infer(InferConst::Fresh(_)) => {
                 panic!("encountered a fresh const during canonicalization")
             }
-            ConstKind::Bound(debruijn, _) => {
-                if debruijn >= self.binder_index {
-                    panic!("escaping bound const during canonicalization")
-                } else {
-                    return ct;
-                }
+            ConstKind::Bound(BoundVarIndexKind::Bound(..), _) => {
+                return ct;
+            }
+            ConstKind::Bound(BoundVarIndexKind::Canonical, ..) => {
+                panic!("canonicalized bound var found during canonicalization");
             }
             ConstKind::Placeholder(placeholder) => {
                 return self
@@ -758,8 +750,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> {
         r: Region<'db>,
     ) -> Region<'db> {
         let var = self.canonical_var(info, r.into());
-        let br = BoundRegion { var, kind: BoundRegionKind::Anon };
-        Region::new_bound(self.cx(), self.binder_index, br)
+        Region::new_canonical_bound(self.cx(), var)
     }
 
     /// Given a type variable `ty_var` of the given kind, first check
@@ -769,11 +760,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> {
     fn canonicalize_ty_var(&mut self, info: CanonicalVarKind<'db>, ty_var: Ty<'db>) -> Ty<'db> {
         debug_assert_eq!(ty_var, self.infcx.shallow_resolve(ty_var));
         let var = self.canonical_var(info, ty_var.into());
-        Ty::new_bound(
-            self.tcx,
-            self.binder_index,
-            BoundTy { kind: crate::next_solver::BoundTyKind::Anon, var },
-        )
+        Ty::new_canonical_bound(self.cx(), var)
     }
 
     /// Given a type variable `const_var` of the given kind, first check
@@ -787,6 +774,6 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> {
     ) -> Const<'db> {
         debug_assert_eq!(const_var, self.infcx.shallow_resolve_const(const_var));
         let var = self.canonical_var(info, const_var.into());
-        Const::new_bound(self.tcx, self.binder_index, BoundConst { var })
+        Const::new_canonical_bound(self.cx(), var)
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs
index d2f584b38cf47..52ad410df6be7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/select.rs
@@ -353,7 +353,9 @@ fn candidate_should_be_dropped_in_favor_of<'db>(
         // Prefer dyn candidates over non-dyn candidates. This is necessary to
         // handle the unsoundness between `impl Any for T` and `dyn Any: Any`.
         (
-            CandidateSource::Impl(_) | CandidateSource::ParamEnv(_) | CandidateSource::AliasBound,
+            CandidateSource::Impl(_)
+            | CandidateSource::ParamEnv(_)
+            | CandidateSource::AliasBound(_),
             CandidateSource::BuiltinImpl(BuiltinImplSource::Object { .. }),
         ) => true,
 
@@ -399,7 +401,9 @@ fn to_selection<'db>(cand: InspectCandidate<'_, 'db>) -> Option>
                 })
             }
             CandidateSource::BuiltinImpl(builtin) => ImplSource::Builtin(builtin, nested),
-            CandidateSource::ParamEnv(_) | CandidateSource::AliasBound => ImplSource::Param(nested),
+            CandidateSource::ParamEnv(_) | CandidateSource::AliasBound(_) => {
+                ImplSource::Param(nested)
+            }
             CandidateSource::CoherenceUnknowable => {
                 panic!("didn't expect to select an unknowable candidate")
             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
index ce8b76837a3c7..a509fd893d3c7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
@@ -18,8 +18,8 @@ use rustc_hash::FxHashSet;
 use rustc_index::bit_set::DenseBitSet;
 use rustc_type_ir::{
     AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, DebruijnIndex, EarlyBinder,
-    FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, TraitRef, TypeVisitableExt,
-    UniverseIndex, Upcast, Variance,
+    FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef,
+    TypeVisitableExt, UniverseIndex, Upcast, Variance,
     elaborate::elaborate,
     error::TypeError,
     inherent::{self, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _},
@@ -33,8 +33,8 @@ use crate::{
     method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint},
     next_solver::{
         AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
-        CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, RegionAssumptions,
-        SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
+        CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, OpaqueTypeKey,
+        RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
         util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls},
     },
 };
@@ -850,7 +850,7 @@ macro_rules! as_lang_item {
     }};
 }
 
-impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
+impl<'db> Interner for DbInterner<'db> {
     type DefId = SolverDefId;
     type LocalDefId = SolverDefId;
     type LocalDefIds = SolverDefIds<'db>;
@@ -877,9 +877,9 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
 
     fn mk_predefined_opaques_in_body(
         self,
-        data: rustc_type_ir::solve::PredefinedOpaquesData,
+        data: &[(OpaqueTypeKey<'db>, Self::Ty)],
     ) -> Self::PredefinedOpaques {
-        PredefinedOpaques::new(self, data)
+        PredefinedOpaques::new_from_iter(self, data.iter().cloned())
     }
 
     type CanonicalVarKinds = CanonicalVars<'db>;
@@ -997,8 +997,8 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
         from_entry(&f())
     }
 
-    fn evaluation_is_concurrent(&self) -> bool {
-        false
+    fn assert_evaluation_is_concurrent(&self) {
+        panic!("evaluation shouldn't be concurrent yet")
     }
 
     fn expand_abstract_consts>(self, _: T) -> T {
@@ -1953,6 +1953,13 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
             Self,
         >(self, canonical_goal)
     }
+
+    fn is_sizedness_trait(self, def_id: Self::TraitId) -> bool {
+        matches!(
+            self.as_trait_lang_item(def_id),
+            Some(SolverTraitLangItem::Sized | SolverTraitLangItem::MetaSized)
+        )
+    }
 }
 
 impl<'db> DbInterner<'db> {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs
index 8714c95f27d8d..e8f5be2eb5988 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/opaques.rs
@@ -1,84 +1,17 @@
 //! Things related to opaques in the next-trait-solver.
 
 use rustc_ast_ir::try_visit;
+use rustc_type_ir::inherent::SliceLike;
 
-use crate::next_solver::SolverDefId;
-
-use super::{DbInterner, interned_vec_nolifetime_salsa};
+use super::{DbInterner, SolverDefId, Ty, interned_vec_db, interned_vec_nolifetime_salsa};
 
 pub type OpaqueTypeKey<'db> = rustc_type_ir::OpaqueTypeKey>;
-pub type PredefinedOpaquesData<'db> = rustc_type_ir::solve::PredefinedOpaquesData>;
-pub type ExternalConstraintsData<'db> =
-    rustc_type_ir::solve::ExternalConstraintsData>;
-
-#[salsa::interned(constructor = new_, debug)]
-pub struct PredefinedOpaques<'db> {
-    #[returns(ref)]
-    kind_: rustc_type_ir::solve::PredefinedOpaquesData>,
-}
 
-impl<'db> PredefinedOpaques<'db> {
-    pub fn new(interner: DbInterner<'db>, data: PredefinedOpaquesData<'db>) -> Self {
-        PredefinedOpaques::new_(interner.db(), data)
-    }
+type PredefinedOpaque<'db> = (OpaqueTypeKey<'db>, Ty<'db>);
+interned_vec_db!(PredefinedOpaques, PredefinedOpaque);
 
-    pub fn inner(&self) -> &PredefinedOpaquesData<'db> {
-        crate::with_attached_db(|db| {
-            let inner = self.kind_(db);
-            // SAFETY: ¯\_(ツ)_/¯
-            unsafe { std::mem::transmute(inner) }
-        })
-    }
-}
-
-impl<'db> rustc_type_ir::TypeVisitable> for PredefinedOpaques<'db> {
-    fn visit_with>>(
-        &self,
-        visitor: &mut V,
-    ) -> V::Result {
-        self.opaque_types.visit_with(visitor)
-    }
-}
-
-impl<'db> rustc_type_ir::TypeFoldable> for PredefinedOpaques<'db> {
-    fn try_fold_with>>(
-        self,
-        folder: &mut F,
-    ) -> Result {
-        Ok(PredefinedOpaques::new(
-            folder.cx(),
-            PredefinedOpaquesData {
-                opaque_types: self
-                    .opaque_types
-                    .iter()
-                    .cloned()
-                    .map(|opaque| opaque.try_fold_with(folder))
-                    .collect::>()?,
-            },
-        ))
-    }
-    fn fold_with>>(self, folder: &mut F) -> Self {
-        PredefinedOpaques::new(
-            folder.cx(),
-            PredefinedOpaquesData {
-                opaque_types: self
-                    .opaque_types
-                    .iter()
-                    .cloned()
-                    .map(|opaque| opaque.fold_with(folder))
-                    .collect(),
-            },
-        )
-    }
-}
-
-impl<'db> std::ops::Deref for PredefinedOpaques<'db> {
-    type Target = PredefinedOpaquesData<'db>;
-
-    fn deref(&self) -> &Self::Target {
-        self.inner()
-    }
-}
+pub type ExternalConstraintsData<'db> =
+    rustc_type_ir::solve::ExternalConstraintsData>;
 
 interned_vec_nolifetime_salsa!(SolverDefIds, SolverDefId);
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs
index a3cfa65eb3734..b5f0e6de2910d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/region.rs
@@ -3,7 +3,8 @@
 use hir_def::LifetimeParamId;
 use intern::Symbol;
 use rustc_type_ir::{
-    BoundVar, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags, TypeFoldable, TypeVisitable,
+    BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags,
+    TypeFoldable, TypeVisitable,
     inherent::{IntoKind, PlaceholderLike, SliceLike},
     relate::Relate,
 };
@@ -67,7 +68,7 @@ impl<'db> Region<'db> {
         index: DebruijnIndex,
         bound: BoundRegion,
     ) -> Region<'db> {
-        Region::new(interner, RegionKind::ReBound(index, bound))
+        Region::new(interner, RegionKind::ReBound(BoundVarIndexKind::Bound(index), bound))
     }
 
     pub fn is_placeholder(&self) -> bool {
@@ -116,7 +117,11 @@ impl<'db> Region<'db> {
             RegionKind::ReStatic => {
                 flags |= TypeFlags::HAS_FREE_REGIONS;
             }
-            RegionKind::ReBound(..) => {
+            RegionKind::ReBound(BoundVarIndexKind::Canonical, ..) => {
+                flags |= TypeFlags::HAS_RE_BOUND;
+                flags |= TypeFlags::HAS_CANONICAL_BOUND;
+            }
+            RegionKind::ReBound(BoundVarIndexKind::Bound(..), ..) => {
                 flags |= TypeFlags::HAS_RE_BOUND;
             }
             RegionKind::ReErased => {
@@ -293,7 +298,7 @@ impl<'db> Flags for Region<'db> {
 
     fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex {
         match &self.inner() {
-            RegionKind::ReBound(debruijn, _) => debruijn.shifted_in(1),
+            RegionKind::ReBound(BoundVarIndexKind::Bound(debruijn), _) => debruijn.shifted_in(1),
             _ => INNERMOST,
         }
     }
@@ -305,7 +310,7 @@ impl<'db> rustc_type_ir::inherent::Region> for Region<'db> {
         debruijn: rustc_type_ir::DebruijnIndex,
         var: BoundRegion,
     ) -> Self {
-        Region::new(interner, RegionKind::ReBound(debruijn, var))
+        Region::new(interner, RegionKind::ReBound(BoundVarIndexKind::Bound(debruijn), var))
     }
 
     fn new_anon_bound(
@@ -315,7 +320,20 @@ impl<'db> rustc_type_ir::inherent::Region> for Region<'db> {
     ) -> Self {
         Region::new(
             interner,
-            RegionKind::ReBound(debruijn, BoundRegion { var, kind: BoundRegionKind::Anon }),
+            RegionKind::ReBound(
+                BoundVarIndexKind::Bound(debruijn),
+                BoundRegion { var, kind: BoundRegionKind::Anon },
+            ),
+        )
+    }
+
+    fn new_canonical_bound(interner: DbInterner<'db>, var: rustc_type_ir::BoundVar) -> Self {
+        Region::new(
+            interner,
+            RegionKind::ReBound(
+                BoundVarIndexKind::Canonical,
+                BoundRegion { var, kind: BoundRegionKind::Anon },
+            ),
         )
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
index 7cf23b82f63d9..3abbd28657467 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
@@ -11,9 +11,10 @@ use hir_def::{TraitId, type_ref::Rawness};
 use rustc_abi::{Float, Integer, Size};
 use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult};
 use rustc_type_ir::{
-    AliasTyKind, BoundVar, ClosureKind, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy,
-    IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable,
-    TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo,
+    AliasTyKind, BoundVar, BoundVarIndexKind, ClosureKind, DebruijnIndex, FlagComputation, Flags,
+    FloatTy, FloatVid, InferTy, IntTy, IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable,
+    TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, UintTy, Upcast,
+    WithCachedTypeInfo,
     inherent::{
         AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _,
         IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _,
@@ -27,7 +28,7 @@ use crate::{
     ImplTraitId,
     db::HirDatabase,
     next_solver::{
-        AdtDef, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
+        AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
         CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
         abi::Safety,
         interner::InternedWrapperNoDebug,
@@ -895,27 +896,28 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
         Ty::new(interner, TyKind::Placeholder(param))
     }
 
-    fn new_bound(
-        interner: DbInterner<'db>,
-        debruijn: rustc_type_ir::DebruijnIndex,
-        var: BoundTy,
-    ) -> Self {
-        Ty::new(interner, TyKind::Bound(debruijn, var))
+    fn new_bound(interner: DbInterner<'db>, debruijn: DebruijnIndex, var: BoundTy) -> Self {
+        Ty::new(interner, TyKind::Bound(BoundVarIndexKind::Bound(debruijn), var))
     }
 
-    fn new_anon_bound(
-        interner: DbInterner<'db>,
-        debruijn: rustc_type_ir::DebruijnIndex,
-        var: BoundVar,
-    ) -> Self {
-        Ty::new(interner, TyKind::Bound(debruijn, BoundTy { var, kind: BoundTyKind::Anon }))
+    fn new_anon_bound(interner: DbInterner<'db>, debruijn: DebruijnIndex, var: BoundVar) -> Self {
+        Ty::new(
+            interner,
+            TyKind::Bound(
+                BoundVarIndexKind::Bound(debruijn),
+                BoundTy { var, kind: BoundTyKind::Anon },
+            ),
+        )
     }
 
-    fn new_alias(
-        interner: DbInterner<'db>,
-        kind: rustc_type_ir::AliasTyKind,
-        alias_ty: rustc_type_ir::AliasTy>,
-    ) -> Self {
+    fn new_canonical_bound(interner: DbInterner<'db>, var: BoundVar) -> Self {
+        Ty::new(
+            interner,
+            TyKind::Bound(BoundVarIndexKind::Canonical, BoundTy { var, kind: BoundTyKind::Anon }),
+        )
+    }
+
+    fn new_alias(interner: DbInterner<'db>, kind: AliasTyKind, alias_ty: AliasTy<'db>) -> Self {
         Ty::new(interner, TyKind::Alias(kind, alias_ty))
     }
 
@@ -925,7 +927,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
 
     fn new_adt(
         interner: DbInterner<'db>,
-        adt_def:  as rustc_type_ir::Interner>::AdtDef,
+        adt_def:  as Interner>::AdtDef,
         args: GenericArgs<'db>,
     ) -> Self {
         Ty::new(interner, TyKind::Adt(adt_def, args))
@@ -937,8 +939,8 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
 
     fn new_dynamic(
         interner: DbInterner<'db>,
-        preds:  as rustc_type_ir::Interner>::BoundExistentialPredicates,
-        region:  as rustc_type_ir::Interner>::Region,
+        preds:  as Interner>::BoundExistentialPredicates,
+        region:  as Interner>::Region,
     ) -> Self {
         Ty::new(interner, TyKind::Dynamic(preds, region))
     }
@@ -946,7 +948,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_coroutine(
         interner: DbInterner<'db>,
         def_id: CoroutineIdWrapper,
-        args:  as rustc_type_ir::Interner>::GenericArgs,
+        args:  as Interner>::GenericArgs,
     ) -> Self {
         Ty::new(interner, TyKind::Coroutine(def_id, args))
     }
@@ -954,7 +956,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_coroutine_closure(
         interner: DbInterner<'db>,
         def_id: CoroutineIdWrapper,
-        args:  as rustc_type_ir::Interner>::GenericArgs,
+        args:  as Interner>::GenericArgs,
     ) -> Self {
         Ty::new(interner, TyKind::CoroutineClosure(def_id, args))
     }
@@ -962,7 +964,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_closure(
         interner: DbInterner<'db>,
         def_id: ClosureIdWrapper,
-        args:  as rustc_type_ir::Interner>::GenericArgs,
+        args:  as Interner>::GenericArgs,
     ) -> Self {
         Ty::new(interner, TyKind::Closure(def_id, args))
     }
@@ -970,7 +972,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_coroutine_witness(
         interner: DbInterner<'db>,
         def_id: CoroutineIdWrapper,
-        args:  as rustc_type_ir::Interner>::GenericArgs,
+        args:  as Interner>::GenericArgs,
     ) -> Self {
         Ty::new(interner, TyKind::CoroutineWitness(def_id, args))
     }
@@ -978,7 +980,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_coroutine_witness_for_coroutine(
         interner: DbInterner<'db>,
         def_id: CoroutineIdWrapper,
-        coroutine_args:  as rustc_type_ir::Interner>::GenericArgs,
+        coroutine_args:  as Interner>::GenericArgs,
     ) -> Self {
         // HACK: Coroutine witness types are lifetime erased, so they
         // never reference any lifetime args from the coroutine. We erase
@@ -1006,7 +1008,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
 
     fn new_ref(
         interner: DbInterner<'db>,
-        region:  as rustc_type_ir::Interner>::Region,
+        region:  as Interner>::Region,
         ty: Self,
         mutbl: rustc_ast_ir::Mutability,
     ) -> Self {
@@ -1016,7 +1018,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_array_with_const_len(
         interner: DbInterner<'db>,
         ty: Self,
-        len:  as rustc_type_ir::Interner>::Const,
+        len:  as Interner>::Const,
     ) -> Self {
         Ty::new(interner, TyKind::Array(ty, len))
     }
@@ -1025,10 +1027,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
         Ty::new(interner, TyKind::Slice(ty))
     }
 
-    fn new_tup(
-        interner: DbInterner<'db>,
-        tys: &[ as rustc_type_ir::Interner>::Ty],
-    ) -> Self {
+    fn new_tup(interner: DbInterner<'db>, tys: &[ as Interner>::Ty]) -> Self {
         Ty::new(interner, TyKind::Tuple(Tys::new_from_iter(interner, tys.iter().cloned())))
     }
 
@@ -1043,7 +1042,7 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_fn_def(
         interner: DbInterner<'db>,
         def_id: CallableIdWrapper,
-        args:  as rustc_type_ir::Interner>::GenericArgs,
+        args:  as Interner>::GenericArgs,
     ) -> Self {
         Ty::new(interner, TyKind::FnDef(def_id, args))
     }
@@ -1059,12 +1058,19 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
     fn new_pat(
         interner: DbInterner<'db>,
         ty: Self,
-        pat:  as rustc_type_ir::Interner>::Pat,
+        pat:  as Interner>::Pat,
     ) -> Self {
         Ty::new(interner, TyKind::Pat(ty, pat))
     }
 
-    fn tuple_fields(self) ->  as rustc_type_ir::Interner>::Tys {
+    fn new_unsafe_binder(
+        interner: DbInterner<'db>,
+        ty: rustc_type_ir::Binder,  as Interner>::Ty>,
+    ) -> Self {
+        Ty::new(interner, TyKind::UnsafeBinder(ty.into()))
+    }
+
+    fn tuple_fields(self) ->  as Interner>::Tys {
         match self.kind() {
             TyKind::Tuple(args) => args,
             _ => panic!("tuple_fields called on non-tuple: {self:?}"),
@@ -1111,10 +1117,11 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
         }
     }
 
-    fn discriminant_ty(
-        self,
-        interner: DbInterner<'db>,
-    ) ->  as rustc_type_ir::Interner>::Ty {
+    fn has_unsafe_fields(self) -> bool {
+        false
+    }
+
+    fn discriminant_ty(self, interner: DbInterner<'db>) ->  as Interner>::Ty {
         match self.kind() {
             TyKind::Adt(adt, _) if adt.is_enum() => adt.repr().discr_type().to_ty(interner),
             TyKind::Coroutine(_, args) => args.as_coroutine().discr_ty(interner),
@@ -1168,20 +1175,6 @@ impl<'db> rustc_type_ir::inherent::Ty> for Ty<'db> {
             TyKind::UnsafeBinder(..) => unimplemented!(),
         }
     }
-
-    fn new_unsafe_binder(
-        interner: DbInterner<'db>,
-        ty: rustc_type_ir::Binder<
-            DbInterner<'db>,
-             as rustc_type_ir::Interner>::Ty,
-        >,
-    ) -> Self {
-        Ty::new(interner, TyKind::UnsafeBinder(ty.into()))
-    }
-
-    fn has_unsafe_fields(self) -> bool {
-        false
-    }
 }
 
 interned_vec_db!(Tys, Ty);
@@ -1193,14 +1186,14 @@ impl<'db> Tys<'db> {
 }
 
 impl<'db> rustc_type_ir::inherent::Tys> for Tys<'db> {
-    fn inputs(self) ->  as rustc_type_ir::Interner>::FnInputTys {
+    fn inputs(self) ->  as Interner>::FnInputTys {
         Tys::new_from_iter(
             DbInterner::conjure(),
             self.as_slice().split_last().unwrap().1.iter().copied(),
         )
     }
 
-    fn output(self) ->  as rustc_type_ir::Interner>::Ty {
+    fn output(self) ->  as Interner>::Ty {
         *self.as_slice().split_last().unwrap().0
     }
 }

From 9610f01c52f398de62fcfb14b626c63a4c54cad9 Mon Sep 17 00:00:00 2001
From: Shoyu Vanilla 
Date: Wed, 22 Oct 2025 02:22:55 +0900
Subject: [PATCH 53/76] Port `CanonicalInstantiator` from rustc

---
 .../infer/canonical/instantiate.rs            | 85 +++++++++++++++++--
 1 file changed, 80 insertions(+), 5 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
index 64287fe47261d..6360291071540 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
@@ -6,13 +6,15 @@
 //!
 //! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html
 
-use crate::next_solver::BoundConst;
 use crate::next_solver::{
-    BoundRegion, BoundTy, Canonical, CanonicalVarValues, DbInterner, fold::FnMutDelegate,
+    BoundConst, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Clauses, Const, ConstKind,
+    DbInterner, GenericArg, Predicate, Region, RegionKind, Ty, TyKind, fold::FnMutDelegate,
 };
+use rustc_hash::FxHashMap;
 use rustc_type_ir::{
-    GenericArgKind, TypeFoldable,
-    inherent::{IntoKind, SliceLike},
+    BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable,
+    TypeVisitableExt,
+    inherent::{GenericArg as _, IntoKind, SliceLike},
 };
 
 pub trait CanonicalExt<'db, V> {
@@ -91,6 +93,79 @@ where
             },
         };
 
-        tcx.replace_escaping_bound_vars_uncached(value, delegate)
+        let value = tcx.replace_escaping_bound_vars_uncached(value, delegate);
+        value.fold_with(&mut CanonicalInstantiator {
+            tcx,
+            var_values: var_values.var_values.as_slice(),
+            cache: Default::default(),
+        })
+    }
+}
+
+/// Replaces the bound vars in a canonical binder with var values.
+struct CanonicalInstantiator<'db, 'a> {
+    tcx: DbInterner<'db>,
+
+    // The values that the bound vars are being instantiated with.
+    var_values: &'a [GenericArg<'db>],
+
+    // Because we use `BoundVarIndexKind::Canonical`, we can cache
+    // based only on the entire ty, not worrying about a `DebruijnIndex`
+    cache: FxHashMap, Ty<'db>>,
+}
+
+impl<'db, 'a> TypeFolder> for CanonicalInstantiator<'db, 'a> {
+    fn cx(&self) -> DbInterner<'db> {
+        self.tcx
+    }
+
+    fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
+        match t.kind() {
+            TyKind::Bound(BoundVarIndexKind::Canonical, bound_ty) => {
+                self.var_values[bound_ty.var.as_usize()].expect_ty()
+            }
+            _ => {
+                if !t.has_type_flags(TypeFlags::HAS_CANONICAL_BOUND) {
+                    t
+                } else if let Some(&t) = self.cache.get(&t) {
+                    t
+                } else {
+                    let res = t.super_fold_with(self);
+                    assert!(self.cache.insert(t, res).is_none());
+                    res
+                }
+            }
+        }
+    }
+
+    fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
+        match r.kind() {
+            RegionKind::ReBound(BoundVarIndexKind::Canonical, br) => {
+                self.var_values[br.var.as_usize()].expect_region()
+            }
+            _ => r,
+        }
+    }
+
+    fn fold_const(&mut self, ct: Const<'db>) -> Const<'db> {
+        match ct.kind() {
+            ConstKind::Bound(BoundVarIndexKind::Canonical, bound_const) => {
+                self.var_values[bound_const.var.as_usize()].expect_const()
+            }
+            _ => ct.super_fold_with(self),
+        }
+    }
+
+    fn fold_predicate(&mut self, p: Predicate<'db>) -> Predicate<'db> {
+        if p.has_type_flags(TypeFlags::HAS_CANONICAL_BOUND) { p.super_fold_with(self) } else { p }
+    }
+
+    fn fold_clauses(&mut self, c: Clauses<'db>) -> Clauses<'db> {
+        if !c.has_type_flags(TypeFlags::HAS_CANONICAL_BOUND) {
+            return c;
+        }
+
+        // FIXME: We might need cache here for perf like rustc
+        c.super_fold_with(self)
     }
 }

From c00dfa3a11409a7bc117f08cc39f6a6400067d47 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Thu, 18 Sep 2025 13:26:16 +0800
Subject: [PATCH 54/76] Heuristic sensing parenthesis completion of fields

We have conducted heuristic sensing on method parentheses, but it cannot complete fields

Example
---
```rust
struct Foo { far: i32 }
impl Foo {
    fn foo(&self) {}
}
fn foo() -> (i32, i32) {
    let foo = Foo { far: 4 };
    foo.f$0
    (2, 3)
}
```

**Before this PR**:

```text
me foo()  fn(&self)
...
```

**After this PR**:

```text
fd far          i32
me foo()  fn(&self)
...
```
---
 .../ide-completion/src/completions/dot.rs     |  85 +++++++---
 .../ide-completion/src/completions/postfix.rs |   2 +-
 .../crates/ide-completion/src/context.rs      |   4 +-
 .../ide-completion/src/context/analysis.rs    |  82 +++++----
 .../crates/ide-completion/src/render.rs       |   3 +-
 .../ide-completion/src/render/function.rs     |   4 +-
 .../ide-completion/src/tests/expression.rs    | 157 ++++++++++++++++++
 7 files changed, 274 insertions(+), 63 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
index 72b245ccafd90..511b59385702d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -25,9 +25,7 @@ pub(crate) fn complete_dot(
         _ => return,
     };
 
-    let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
-    let is_method_access_with_parens =
-        matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
+    let has_parens = matches!(dot_access.kind, DotAccessKind::Method);
     let traits_in_scope = ctx.traits_in_scope();
 
     // Suggest .await syntax for types that implement Future trait
@@ -48,7 +46,7 @@ pub(crate) fn complete_dot(
                 DotAccessKind::Field { receiver_is_ambiguous_float_literal: _ } => {
                     DotAccessKind::Field { receiver_is_ambiguous_float_literal: false }
                 }
-                it @ DotAccessKind::Method { .. } => *it,
+                it @ DotAccessKind::Method => *it,
             };
             let dot_access = DotAccess {
                 receiver: dot_access.receiver.clone(),
@@ -67,8 +65,7 @@ pub(crate) fn complete_dot(
                     acc.add_field(ctx, &dot_access, Some(await_str.clone()), field, &ty)
                 },
                 |acc, field, ty| acc.add_tuple_field(ctx, Some(await_str.clone()), field, &ty),
-                is_field_access,
-                is_method_access_with_parens,
+                has_parens,
             );
             complete_methods(ctx, &future_output, &traits_in_scope, |func| {
                 acc.add_method(ctx, &dot_access, func, Some(await_str.clone()), None)
@@ -82,8 +79,7 @@ pub(crate) fn complete_dot(
         receiver_ty,
         |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
         |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
-        is_field_access,
-        is_method_access_with_parens,
+        has_parens,
     );
     complete_methods(ctx, receiver_ty, &traits_in_scope, |func| {
         acc.add_method(ctx, dot_access, func, None, None)
@@ -112,7 +108,7 @@ pub(crate) fn complete_dot(
                 DotAccessKind::Field { receiver_is_ambiguous_float_literal: _ } => {
                     DotAccessKind::Field { receiver_is_ambiguous_float_literal: false }
                 }
-                it @ DotAccessKind::Method { .. } => *it,
+                it @ DotAccessKind::Method => *it,
             };
             let dot_access = DotAccess {
                 receiver: dot_access.receiver.clone(),
@@ -173,7 +169,6 @@ pub(crate) fn complete_undotted_self(
             )
         },
         |acc, field, ty| acc.add_tuple_field(ctx, Some(SmolStr::new_static("self")), field, &ty),
-        true,
         false,
     );
     complete_methods(ctx, &ty, &ctx.traits_in_scope(), |func| {
@@ -182,7 +177,7 @@ pub(crate) fn complete_undotted_self(
             &DotAccess {
                 receiver: None,
                 receiver_ty: None,
-                kind: DotAccessKind::Method { has_parens: false },
+                kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal: false },
                 ctx: DotAccessExprCtx {
                     in_block_expr: expr_ctx.in_block_expr,
                     in_breakable: expr_ctx.in_breakable,
@@ -201,15 +196,13 @@ fn complete_fields(
     receiver: &hir::Type<'_>,
     mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type<'_>),
     mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type<'_>),
-    is_field_access: bool,
-    is_method_access_with_parens: bool,
+    has_parens: bool,
 ) {
     let mut seen_names = FxHashSet::default();
     for receiver in receiver.autoderef(ctx.db) {
         for (field, ty) in receiver.fields(ctx.db) {
             if seen_names.insert(field.name(ctx.db))
-                && (is_field_access
-                    || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure())))
+                && (!has_parens || ty.is_fn() || ty.is_closure())
             {
                 named_field(acc, field, ty);
             }
@@ -218,8 +211,7 @@ fn complete_fields(
             // Tuples are always the last type in a deref chain, so just check if the name is
             // already seen without inserting into the hashset.
             if !seen_names.contains(&hir::Name::new_tuple_field(i))
-                && (is_field_access
-                    || (is_method_access_with_parens && (ty.is_fn() || ty.is_closure())))
+                && (!has_parens || ty.is_fn() || ty.is_closure())
             {
                 // Tuple fields are always public (tuple struct fields are handled above).
                 tuple_index(acc, i, ty);
@@ -1364,18 +1356,71 @@ fn foo() {
             r#"
 struct Foo { baz: fn() }
 impl Foo {
-    fn bar(self, t: T): T { t }
+    fn bar(self, t: T) -> T { t }
 }
 
 fn baz() {
     let foo = Foo{ baz: || {} };
-    foo.ba$0::<>;
+    foo.ba$0;
 }
 "#,
             expect![[r#"
-                me bar(…) fn(self, T)
+                fd baz                fn()
+                me bar(…) fn(self, T) -> T
             "#]],
         );
+
+        check_edit(
+            "baz",
+            r#"
+struct Foo { baz: fn() }
+impl Foo {
+    fn bar(self, t: T) -> T { t }
+}
+
+fn baz() {
+    let foo = Foo{ baz: || {} };
+    foo.ba$0;
+}
+"#,
+            r#"
+struct Foo { baz: fn() }
+impl Foo {
+    fn bar(self, t: T) -> T { t }
+}
+
+fn baz() {
+    let foo = Foo{ baz: || {} };
+    (foo.baz)();
+}
+"#,
+        );
+
+        check_edit(
+            "bar",
+            r#"
+struct Foo { baz: fn() }
+impl Foo {
+    fn bar(self, t: T) -> T { t }
+}
+
+fn baz() {
+    let foo = Foo{ baz: || {} };
+    foo.ba$0;
+}
+"#,
+            r#"
+struct Foo { baz: fn() }
+impl Foo {
+    fn bar(self, t: T) -> T { t }
+}
+
+fn baz() {
+    let foo = Foo{ baz: || {} };
+    foo.bar(${1:t})$0;
+}
+"#,
+        );
     }
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
index 4474d6181c209..70761534cd2c4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -43,7 +43,7 @@ pub(crate) fn complete_postfix(
                 DotAccessKind::Field { receiver_is_ambiguous_float_literal } => {
                     receiver_is_ambiguous_float_literal
                 }
-                DotAccessKind::Method { .. } => false,
+                DotAccessKind::Method => false,
             },
         ),
         _ => return,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index c95b83ef8a027..b245c0d9831ce 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -405,9 +405,7 @@ pub(crate) enum DotAccessKind {
         /// like `0.$0`
         receiver_is_ambiguous_float_literal: bool,
     },
-    Method {
-        has_parens: bool,
-    },
+    Method,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index f0a03dedfe881..c01b544ff6ef8 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -891,44 +891,53 @@ fn classify_name_ref<'db>(
         return Some(make_res(kind));
     }
 
+    let field_expr_handle = |recviver, node| {
+        let receiver = find_opt_node_in_file(original_file, recviver);
+        let receiver_is_ambiguous_float_literal = match &receiver {
+            Some(ast::Expr::Literal(l)) => matches! {
+                l.kind(),
+                ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().is_some_and(|it| it.text().ends_with('.'))
+            },
+            _ => false,
+        };
+
+        let receiver_is_part_of_indivisible_expression = match &receiver {
+            Some(ast::Expr::IfExpr(_)) => {
+                let next_token_kind =
+                    next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
+                next_token_kind == Some(SyntaxKind::ELSE_KW)
+            }
+            _ => false,
+        };
+        if receiver_is_part_of_indivisible_expression {
+            return None;
+        }
+
+        let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it));
+        if receiver_is_ambiguous_float_literal {
+            // `123.|` is parsed as a float but should actually be an integer.
+            always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float()));
+            receiver_ty =
+                Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None });
+        }
+
+        let kind = NameRefKind::DotAccess(DotAccess {
+            receiver_ty,
+            kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
+            receiver,
+            ctx: DotAccessExprCtx {
+                in_block_expr: is_in_block(node),
+                in_breakable: is_in_breakable(node).unzip().0,
+            },
+        });
+        Some(make_res(kind))
+    };
+
     let segment = match_ast! {
         match parent {
             ast::PathSegment(segment) => segment,
             ast::FieldExpr(field) => {
-                let receiver = find_opt_node_in_file(original_file, field.expr());
-                let receiver_is_ambiguous_float_literal = match &receiver {
-                    Some(ast::Expr::Literal(l)) => matches! {
-                        l.kind(),
-                        ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().is_some_and(|it| it.text().ends_with('.'))
-                    },
-                    _ => false,
-                };
-
-                let receiver_is_part_of_indivisible_expression = match &receiver {
-                    Some(ast::Expr::IfExpr(_)) => {
-                        let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
-                        next_token_kind == Some(SyntaxKind::ELSE_KW)
-                    },
-                    _ => false
-                };
-                if receiver_is_part_of_indivisible_expression {
-                    return None;
-                }
-
-                let mut receiver_ty = receiver.as_ref().and_then(|it| sema.type_of_expr(it));
-                if receiver_is_ambiguous_float_literal {
-                    // `123.|` is parsed as a float but should actually be an integer.
-                    always!(receiver_ty.as_ref().is_none_or(|receiver_ty| receiver_ty.original.is_float()));
-                    receiver_ty = Some(TypeInfo { original: hir::BuiltinType::i32().ty(sema.db), adjusted: None });
-                }
-
-                let kind = NameRefKind::DotAccess(DotAccess {
-                    receiver_ty,
-                    kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
-                    receiver,
-                    ctx: DotAccessExprCtx { in_block_expr: is_in_block(field.syntax()), in_breakable: is_in_breakable(field.syntax()).unzip().0 }
-                });
-                return Some(make_res(kind));
+                return field_expr_handle(field.expr(), field.syntax());
             },
             ast::ExternCrate(_) => {
                 let kind = NameRefKind::ExternCrate;
@@ -937,9 +946,12 @@ fn classify_name_ref<'db>(
             ast::MethodCallExpr(method) => {
                 let receiver = find_opt_node_in_file(original_file, method.receiver());
                 let has_parens = has_parens(&method);
+                if !has_parens && let Some(res) = field_expr_handle(method.receiver(), method.syntax()) {
+                    return Some(res)
+                }
                 let kind = NameRefKind::DotAccess(DotAccess {
                     receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
-                    kind: DotAccessKind::Method { has_parens },
+                    kind: DotAccessKind::Method,
                     receiver,
                     ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()).unzip().0 }
                 });
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index 77a2a3a3a9a02..bc5589a64550b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -170,8 +170,7 @@ pub(crate) fn render_field(
             builder.insert(receiver.syntax().text_range().start(), "(".to_owned());
             builder.insert(ctx.source_range().end(), ")".to_owned());
 
-            let is_parens_needed =
-                !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
+            let is_parens_needed = !matches!(dot_access.kind, DotAccessKind::Method);
 
             if is_parens_needed {
                 builder.insert(ctx.source_range().end(), "()".to_owned());
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
index c466019f991f7..3235323b3a590 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
@@ -93,8 +93,8 @@ fn render(
             has_call_parens,
             ..
         }) => (false, has_call_parens, ctx.completion.config.snippet_cap),
-        FuncKind::Method(&DotAccess { kind: DotAccessKind::Method { has_parens }, .. }, _) => {
-            (true, has_parens, ctx.completion.config.snippet_cap)
+        FuncKind::Method(&DotAccess { kind: DotAccessKind::Method, .. }, _) => {
+            (true, true, ctx.completion.config.snippet_cap)
         }
         FuncKind::Method(DotAccess { kind: DotAccessKind::Field { .. }, .. }, _) => {
             (true, false, ctx.completion.config.snippet_cap)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index f75fa7943ba60..09af635f01ca1 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -2936,6 +2936,43 @@ fn foo() {
     );
 }
 
+#[test]
+fn ambiguous_float_literal_in_ambiguous_method_call() {
+    check(
+        r#"
+#![rustc_coherence_is_core]
+
+impl i32 {
+    pub fn int_method(self) {}
+}
+impl f64 {
+    pub fn float_method(self) {}
+}
+
+fn foo() -> (i32, i32) {
+    1.$0
+    (2, 3)
+}
+    "#,
+        expect![[r#"
+            me int_method() fn(self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn const        const {}
+            sn dbg        dbg!(expr)
+            sn dbgr      dbg!(&expr)
+            sn deref           *expr
+            sn let               let
+            sn letm          let mut
+            sn match   match expr {}
+            sn ref             &expr
+            sn refm        &mut expr
+            sn return    return expr
+            sn unsafe      unsafe {}
+        "#]],
+    );
+}
+
 #[test]
 fn let_in_condition() {
     check_edit("let", r#"fn f() { if $0 {} }"#, r#"fn f() { if let $1 = $0 {} }"#);
@@ -3113,6 +3150,126 @@ fn let_in_previous_line_of_ambiguous_expr() {
     );
 }
 
+#[test]
+fn field_in_previous_line_of_ambiguous_expr() {
+    check(
+        r#"
+        struct Foo { field: i32 }
+        impl Foo {
+            fn method(&self) {}
+        }
+        fn foo() -> (i32, i32) {
+            let foo = Foo { field: 4 };
+            foo.$0
+            (2, 3)
+        }"#,
+        expect![[r#"
+            fd field           i32
+            me method()  fn(&self)
+            sn box  Box::new(expr)
+            sn call function(expr)
+            sn const      const {}
+            sn dbg      dbg!(expr)
+            sn dbgr    dbg!(&expr)
+            sn deref         *expr
+            sn let             let
+            sn letm        let mut
+            sn match match expr {}
+            sn ref           &expr
+            sn refm      &mut expr
+            sn return  return expr
+            sn unsafe    unsafe {}
+        "#]],
+    );
+
+    check(
+        r#"
+        struct Foo { field: i32 }
+        impl Foo {
+            fn method(&self) {}
+        }
+        fn foo() -> (i32, i32) {
+            let foo = Foo { field: 4 };
+            foo.a$0
+            (2, 3)
+        }"#,
+        expect![[r#"
+            fd field           i32
+            me method()  fn(&self)
+            sn box  Box::new(expr)
+            sn call function(expr)
+            sn const      const {}
+            sn dbg      dbg!(expr)
+            sn dbgr    dbg!(&expr)
+            sn deref         *expr
+            sn let             let
+            sn letm        let mut
+            sn match match expr {}
+            sn ref           &expr
+            sn refm      &mut expr
+            sn return  return expr
+            sn unsafe    unsafe {}
+        "#]],
+    );
+}
+
+#[test]
+fn fn_field_in_previous_line_of_ambiguous_expr() {
+    check(
+        r#"
+        struct Foo { field: fn() }
+        impl Foo {
+            fn method(&self) {}
+        }
+        fn foo() -> (i32, i32) {
+            let foo = Foo { field: || () };
+            foo.$0
+            (2, 3)
+        }"#,
+        expect![[r#"
+            fd field          fn()
+            me method()  fn(&self)
+            sn box  Box::new(expr)
+            sn call function(expr)
+            sn const      const {}
+            sn dbg      dbg!(expr)
+            sn dbgr    dbg!(&expr)
+            sn deref         *expr
+            sn let             let
+            sn letm        let mut
+            sn match match expr {}
+            sn ref           &expr
+            sn refm      &mut expr
+            sn return  return expr
+            sn unsafe    unsafe {}
+        "#]],
+    );
+
+    check_edit(
+        "field",
+        r#"
+        struct Foo { field: fn() }
+        impl Foo {
+            fn method(&self) {}
+        }
+        fn foo() -> (i32, i32) {
+            let foo = Foo { field: || () };
+            foo.a$0
+            (2, 3)
+        }"#,
+        r#"
+        struct Foo { field: fn() }
+        impl Foo {
+            fn method(&self) {}
+        }
+        fn foo() -> (i32, i32) {
+            let foo = Foo { field: || () };
+            (foo.field)()
+            (2, 3)
+        }"#,
+    );
+}
+
 #[test]
 fn private_inherent_and_public_trait() {
     check(

From 68cb0a9b3fb7e632640b5337d3dd39d304ea727c Mon Sep 17 00:00:00 2001
From: daladim 
Date: Thu, 23 Oct 2025 16:35:49 +0200
Subject: [PATCH 55/76] Added the "negation" semantic token

---
 .../crates/ide/src/syntax_highlighting/highlight.rs          | 2 +-
 .../rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs | 5 ++++-
 .../crates/rust-analyzer/src/lsp/semantic_tokens.rs          | 1 +
 .../rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs   | 1 +
 4 files changed, 7 insertions(+), 2 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index d73575fb9549a..829d1279a839b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -137,7 +137,7 @@ fn punctuation(
         }
         (T![!], MACRO_RULES) => HlPunct::MacroBang.into(),
         (T![!], NEVER_TYPE) => HlTag::BuiltinType.into(),
-        (T![!], PREFIX_EXPR) => HlOperator::Logical.into(),
+        (T![!], PREFIX_EXPR) => HlOperator::Negation.into(),
         (T![*], PTR_TYPE) => HlTag::Keyword.into(),
         (T![*], PREFIX_EXPR) => {
             let h = HlTag::Operator(HlOperator::Other).into();
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
index 4b8762640c743..456a612987418 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
@@ -124,8 +124,10 @@ pub enum HlOperator {
     Bitwise,
     /// +, -, *, /, +=, -=, *=, /=
     Arithmetic,
-    /// &&, ||, !
+    /// &&, ||
     Logical,
+    /// !
+    Negation,
     /// >, <, ==, >=, <=, !=
     Comparison,
     /// Other operators
@@ -194,6 +196,7 @@ impl HlTag {
                 HlOperator::Arithmetic => "arithmetic",
                 HlOperator::Logical => "logical",
                 HlOperator::Comparison => "comparison",
+                HlOperator::Negation => "negation",
                 HlOperator::Other => "operator",
             },
             HlTag::StringLiteral => "string_literal",
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs
index 3c21e19925257..828118a0866d9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/semantic_tokens.rs
@@ -91,6 +91,7 @@ define_semantic_token_types![
         (LIFETIME, "lifetime"),
         (LOGICAL, "logical") => OPERATOR,
         (MACRO_BANG, "macroBang") => MACRO,
+        (NEGATION, "negation") => OPERATOR,
         (PARENTHESIS, "parenthesis"),
         (PROC_MACRO, "procMacro") => MACRO,
         (PUNCTUATION, "punctuation"),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index cd384ca713ec5..2d2dacbe2e150 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -840,6 +840,7 @@ fn semantic_token_type_and_modifiers(
             HlOperator::Bitwise => types::BITWISE,
             HlOperator::Arithmetic => types::ARITHMETIC,
             HlOperator::Logical => types::LOGICAL,
+            HlOperator::Negation => types::NEGATION,
             HlOperator::Comparison => types::COMPARISON,
             HlOperator::Other => types::OPERATOR,
         },

From 1ef688810f938a5ea1f4cec78e5727d6abcedb3e Mon Sep 17 00:00:00 2001
From: daladim 
Date: Thu, 23 Oct 2025 16:40:40 +0200
Subject: [PATCH 56/76] Updated unit tests

---
 .../src/syntax_highlighting/test_data/highlight_general.html    | 2 +-
 .../src/syntax_highlighting/test_data/highlight_operators.html  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index d99b29cfb8fa6..d058191aef722 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -148,7 +148,7 @@
     let baz = (-42,);
     let baz = -baz.0;
 
-    let _ = !true;
+    let _ = !true;
 
     'foo: loop {
         break 'foo;
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
index 9c42401ed0775..cceb159c9dd41 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
@@ -41,7 +41,7 @@
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 
 
fn main() {
-    1 + 1 - 1 * 1 / 1 % 1 | 1 & 1 ! 1 ^ 1 >> 1 << 1;
+    1 + 1 - 1 * 1 / 1 % 1 | 1 & 1 ! 1 ^ 1 >> 1 << 1;
     let mut a = 0;
     a += 1;
     a -= 1;

From 03b8682865d8db2180096b7ca8d917cbb4c6bcbf Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Fri, 24 Oct 2025 01:06:27 +0800
Subject: [PATCH 57/76] Fix some typos

---
 .../crates/ide-completion/src/context/analysis.rs             | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index c01b544ff6ef8..d39bff1577f3c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -891,8 +891,8 @@ fn classify_name_ref<'db>(
         return Some(make_res(kind));
     }
 
-    let field_expr_handle = |recviver, node| {
-        let receiver = find_opt_node_in_file(original_file, recviver);
+    let field_expr_handle = |receiver, node| {
+        let receiver = find_opt_node_in_file(original_file, receiver);
         let receiver_is_ambiguous_float_literal = match &receiver {
             Some(ast::Expr::Literal(l)) => matches! {
                 l.kind(),

From c1ecea6d7ee9ea3e00f88e09b61a26da1eb7a52a Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Thu, 23 Oct 2025 20:48:51 +0300
Subject: [PATCH 58/76] Implement `Interner::impl_specializes()`

Using specialization logic ported from rustc.
---
 .../rust-analyzer/crates/hir-ty/src/lib.rs    |   1 +
 .../crates/hir-ty/src/mir/eval/tests.rs       |   7 +-
 .../crates/hir-ty/src/next_solver/def_id.rs   |   2 +-
 .../crates/hir-ty/src/next_solver/interner.rs |  10 +-
 .../crates/hir-ty/src/specialization.rs       | 150 ++++++++++++++++++
 .../crates/intern/src/symbol/symbols.rs       |   2 +
 6 files changed, 163 insertions(+), 9 deletions(-)
 create mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 536c81ab03b2c..96dd48b53aa82 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -27,6 +27,7 @@ mod infer;
 mod inhabitedness;
 mod lower;
 pub mod next_solver;
+mod specialization;
 mod target_feature;
 mod utils;
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
index 4eb4aa91598e3..f242115afeff6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
@@ -636,16 +636,13 @@ fn main() {
     );
 }
 
-#[ignore = "
-FIXME(next-solver):
-This does not work currently because I replaced homemade selection with selection by the trait solver;
-This will work once we implement `Interner::impl_specializes()` properly.
-"]
 #[test]
 fn specialization_array_clone() {
     check_pass(
         r#"
 //- minicore: copy, derive, slice, index, coerce_unsized, panic
+#![feature(min_specialization)]
+
 impl Clone for [T; N] {
     #[inline]
     fn clone(&self) -> Self {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs
index 928e1321e7388..0ff0b086a0877 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/def_id.rs
@@ -211,7 +211,7 @@ macro_rules! declare_id_wrapper {
 
         impl std::fmt::Debug for $name {
             fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-                std::fmt::Debug::fmt(&self.0, f)
+                std::fmt::Debug::fmt(&SolverDefId::from(self.0), f)
             }
         }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
index 42f1d926d7db3..43b47398f96db 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
@@ -1922,10 +1922,14 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
 
     fn impl_specializes(
         self,
-        _specializing_impl_def_id: Self::ImplId,
-        _parent_impl_def_id: Self::ImplId,
+        specializing_impl_def_id: Self::ImplId,
+        parent_impl_def_id: Self::ImplId,
     ) -> bool {
-        false
+        crate::specialization::specializes(
+            self.db,
+            specializing_impl_def_id.0,
+            parent_impl_def_id.0,
+        )
     }
 
     fn next_trait_solver_globally(self) -> bool {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs
new file mode 100644
index 0000000000000..611947b96b713
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/specialization.rs
@@ -0,0 +1,150 @@
+//! Impl specialization related things
+
+use hir_def::{ImplId, nameres::crate_def_map};
+use intern::sym;
+use tracing::debug;
+
+use crate::{
+    db::HirDatabase,
+    next_solver::{
+        DbInterner, TypingMode,
+        infer::{
+            DbInternerInferExt,
+            traits::{Obligation, ObligationCause},
+        },
+        obligation_ctxt::ObligationCtxt,
+    },
+};
+
+// rustc does not have a cycle handling for the `specializes` query, meaning a cycle is a bug,
+// and indeed I was unable to cause cycles even with erroneous code. However, in r-a we can
+// create a cycle if there is an error in the impl's where clauses. I believe well formed code
+// cannot create a cycle, but a cycle handler is required nevertheless.
+fn specializes_cycle(
+    _db: &dyn HirDatabase,
+    _specializing_impl_def_id: ImplId,
+    _parent_impl_def_id: ImplId,
+) -> bool {
+    false
+}
+
+/// Is `specializing_impl_def_id` a specialization of `parent_impl_def_id`?
+///
+/// For every type that could apply to `specializing_impl_def_id`, we prove that
+/// the `parent_impl_def_id` also applies (i.e. it has a valid impl header and
+/// its where-clauses hold).
+///
+/// For the purposes of const traits, we also check that the specializing
+/// impl is not more restrictive than the parent impl. That is, if the
+/// `parent_impl_def_id` is a const impl (conditionally based off of some `[const]`
+/// bounds), then `specializing_impl_def_id` must also be const for the same
+/// set of types.
+#[salsa::tracked(cycle_result = specializes_cycle)]
+pub(crate) fn specializes(
+    db: &dyn HirDatabase,
+    specializing_impl_def_id: ImplId,
+    parent_impl_def_id: ImplId,
+) -> bool {
+    let module = specializing_impl_def_id.loc(db).container;
+
+    // We check that the specializing impl comes from a crate that has specialization enabled.
+    //
+    // We don't really care if the specialized impl (the parent) is in a crate that has
+    // specialization enabled, since it's not being specialized.
+    //
+    // rustc also checks whether the specializing impls comes from a macro marked
+    // `#[allow_internal_unstable(specialization)]`, but `#[allow_internal_unstable]`
+    // is an internal feature, std is not using it for specialization nor is likely to
+    // ever use it, and we don't have the span information necessary to replicate that.
+    let def_map = crate_def_map(db, module.krate());
+    if !def_map.is_unstable_feature_enabled(&sym::specialization)
+        && !def_map.is_unstable_feature_enabled(&sym::min_specialization)
+    {
+        return false;
+    }
+
+    let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+
+    let specializing_impl_signature = db.impl_signature(specializing_impl_def_id);
+    let parent_impl_signature = db.impl_signature(parent_impl_def_id);
+
+    // We determine whether there's a subset relationship by:
+    //
+    // - replacing bound vars with placeholders in impl1,
+    // - assuming the where clauses for impl1,
+    // - instantiating impl2 with fresh inference variables,
+    // - unifying,
+    // - attempting to prove the where clauses for impl2
+    //
+    // The last three steps are encapsulated in `fulfill_implication`.
+    //
+    // See RFC 1210 for more details and justification.
+
+    // Currently we do not allow e.g., a negative impl to specialize a positive one
+    if specializing_impl_signature.is_negative() != parent_impl_signature.is_negative() {
+        return false;
+    }
+
+    // create a parameter environment corresponding to an identity instantiation of the specializing impl,
+    // i.e. the most generic instantiation of the specializing impl.
+    let param_env = db.trait_environment(specializing_impl_def_id.into()).env;
+
+    // Create an infcx, taking the predicates of the specializing impl as assumptions:
+    let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
+
+    let specializing_impl_trait_ref =
+        db.impl_trait(specializing_impl_def_id).unwrap().instantiate_identity();
+    let cause = &ObligationCause::dummy();
+    debug!(
+        "fulfill_implication({:?}, trait_ref={:?} |- {:?} applies)",
+        param_env, specializing_impl_trait_ref, parent_impl_def_id
+    );
+
+    // Attempt to prove that the parent impl applies, given all of the above.
+
+    let mut ocx = ObligationCtxt::new(&infcx);
+
+    let parent_args = infcx.fresh_args_for_item(parent_impl_def_id.into());
+    let parent_impl_trait_ref = db
+        .impl_trait(parent_impl_def_id)
+        .expect("expected source impl to be a trait impl")
+        .instantiate(interner, parent_args);
+
+    // do the impls unify? If not, no specialization.
+    let Ok(()) = ocx.eq(cause, param_env, specializing_impl_trait_ref, parent_impl_trait_ref)
+    else {
+        return false;
+    };
+
+    // Now check that the source trait ref satisfies all the where clauses of the target impl.
+    // This is not just for correctness; we also need this to constrain any params that may
+    // only be referenced via projection predicates.
+    if let Some(predicates) =
+        db.generic_predicates(parent_impl_def_id.into()).instantiate(interner, parent_args)
+    {
+        ocx.register_obligations(
+            predicates
+                .map(|predicate| Obligation::new(interner, cause.clone(), param_env, predicate)),
+        );
+    }
+
+    let errors = ocx.evaluate_obligations_error_on_ambiguity();
+    if !errors.is_empty() {
+        // no dice!
+        debug!(
+            "fulfill_implication: for impls on {:?} and {:?}, \
+                 could not fulfill: {:?} given {:?}",
+            specializing_impl_trait_ref, parent_impl_trait_ref, errors, param_env
+        );
+        return false;
+    }
+
+    // FIXME: Check impl constness (when we implement const impls).
+
+    debug!(
+        "fulfill_implication: an impl for {:?} specializes {:?}",
+        specializing_impl_trait_ref, parent_impl_trait_ref
+    );
+
+    true
+}
diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs
index 920bdd9568fcf..756377fe56f71 100644
--- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs
+++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs
@@ -517,4 +517,6 @@ define_symbols! {
     precision,
     width,
     never_type_fallback,
+    specialization,
+    min_specialization,
 }

From 0ab44184a42a27970f6d0611af98a389f9b34a5f Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Wed, 15 Oct 2025 18:48:31 +0800
Subject: [PATCH 59/76] Add applicable on assignment for add_braces

```rust
fn foo() {
    let x =$0 n + 100;
}
```
->
```rust
fn foo() {
    let x = {
        n + 100
    };
}
```
---
 .../ide-assists/src/handlers/add_braces.rs    | 62 ++++++++++++++-----
 1 file changed, 46 insertions(+), 16 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
index d855fb771846a..f5bbe8dda8c5d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
@@ -1,7 +1,8 @@
 use either::Either;
 use syntax::{
-    AstNode,
+    AstNode, T,
     ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory},
+    match_ast,
 };
 
 use crate::{AssistContext, AssistId, Assists};
@@ -37,6 +38,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
         match expr_type {
             ParentType::ClosureExpr => "Add braces to this closure body",
             ParentType::MatchArmExpr => "Add braces to this match arm expression",
+            ParentType::Assignment => "Add braces to this assignment expression",
         },
         expr.syntax().text_range(),
         |builder| {
@@ -57,29 +59,38 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
 enum ParentType {
     MatchArmExpr,
     ClosureExpr,
+    Assignment,
 }
 
 fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Expr)> {
-    let node = ctx.find_node_at_offset::>()?;
-    if let Either::Left(match_arm) = &node {
+    let node = ctx.find_node_at_offset::>();
+    let (parent_type, body) = if let Some(eq_token) = ctx.find_token_syntax_at_offset(T![=]) {
+        let parent = eq_token.parent()?;
+        let body = match_ast! {
+            match parent {
+                ast::LetStmt(it) => it.initializer()?,
+                ast::LetExpr(it) => it.expr()?,
+                ast::Static(it) => it.body()?,
+                ast::Const(it) => it.body()?,
+                _ => return None,
+            }
+        };
+        (ParentType::Assignment, body)
+    } else if let Some(Either::Left(match_arm)) = &node {
         let match_arm_expr = match_arm.expr()?;
-
-        if matches!(match_arm_expr, ast::Expr::BlockExpr(_)) {
-            return None;
-        }
-
-        return Some((ParentType::MatchArmExpr, match_arm_expr));
-    } else if let Either::Right(closure_expr) = &node {
+        (ParentType::MatchArmExpr, match_arm_expr)
+    } else if let Some(Either::Right(closure_expr)) = &node {
         let body = closure_expr.body()?;
+        (ParentType::ClosureExpr, body)
+    } else {
+        return None;
+    };
 
-        if matches!(body, ast::Expr::BlockExpr(_)) {
-            return None;
-        }
-
-        return Some((ParentType::ClosureExpr, body));
+    if matches!(body, ast::Expr::BlockExpr(_)) {
+        return None;
     }
 
-    None
+    Some((parent_type, body))
 }
 
 #[cfg(test)]
@@ -134,6 +145,25 @@ fn foo() {
         );
     }
 
+    #[test]
+    fn suggest_add_braces_for_assignment() {
+        check_assist(
+            add_braces,
+            r#"
+fn foo() {
+    let x =$0 n + 100;
+}
+"#,
+            r#"
+fn foo() {
+    let x = {
+        n + 100
+    };
+}
+"#,
+        );
+    }
+
     #[test]
     fn no_assist_for_closures_with_braces() {
         check_assist_not_applicable(

From 71851588e9075c623c5fd55540d21127e43c64ec Mon Sep 17 00:00:00 2001
From: Johannes Altmanninger 
Date: Thu, 23 Oct 2025 19:57:56 +0200
Subject: [PATCH 60/76] Fix rustfmt for files that use 2024-edition syntax

"cargo fmt" works fine but "rustfmt" fails to format some files.

	$ rustfmt crates/ide-db/src/search.rs
	error: let chains are only allowed in Rust 2024 or later
	   --> /home/johannes/git/rust-analyzer/crates/ide-db/src/search.rs:298:12
	    |
	298 |         if let &Definition::Module(module) = self
	    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

I guess I could work around this by setting my format command to
"cargo fmt -- $filename" instead of "rustfmt $filename".

But it'd be nice if this worked OOTB. Make it so by adding specifying
the edition in rustfmt.toml.  We already have several other places
specifying the edition.

changelog internal
---
 .../crates/syntax/src/ast/generated/nodes.rs  | 926 +++---------------
 .../crates/syntax/src/ast/generated/tokens.rs |  62 +-
 src/tools/rust-analyzer/rustfmt.toml          |   1 +
 3 files changed, 167 insertions(+), 822 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index d60196d492fc3..6c1dcf336ac50 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -2,9 +2,9 @@
 
 #![allow(non_snake_case)]
 use crate::{
-    ast::{self, support, AstChildren, AstNode},
     SyntaxKind::{self, *},
     SyntaxNode, SyntaxToken, T,
+    ast::{self, AstChildren, AstNode, support},
 };
 use std::{fmt, hash};
 pub struct Abi {
@@ -2262,11 +2262,7 @@ impl AstNode for Abi {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ABI }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2298,11 +2294,7 @@ impl AstNode for ArgList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ARG_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2334,11 +2326,7 @@ impl AstNode for ArrayExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2370,11 +2358,7 @@ impl AstNode for ArrayType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2406,11 +2390,7 @@ impl AstNode for AsmClobberAbi {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_CLOBBER_ABI }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2442,11 +2422,7 @@ impl AstNode for AsmConst {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_CONST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2478,11 +2454,7 @@ impl AstNode for AsmDirSpec {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_DIR_SPEC }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2514,11 +2486,7 @@ impl AstNode for AsmExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2550,11 +2518,7 @@ impl AstNode for AsmLabel {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_LABEL }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2586,11 +2550,7 @@ impl AstNode for AsmOperandExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_OPERAND_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2622,11 +2582,7 @@ impl AstNode for AsmOperandNamed {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_OPERAND_NAMED }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2658,11 +2614,7 @@ impl AstNode for AsmOption {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_OPTION }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2694,11 +2646,7 @@ impl AstNode for AsmOptions {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_OPTIONS }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2730,11 +2678,7 @@ impl AstNode for AsmRegOperand {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_REG_OPERAND }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2766,11 +2710,7 @@ impl AstNode for AsmRegSpec {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_REG_SPEC }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2802,11 +2742,7 @@ impl AstNode for AsmSym {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASM_SYM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2838,11 +2774,7 @@ impl AstNode for AssocItemList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_ITEM_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2874,11 +2806,7 @@ impl AstNode for AssocTypeArg {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_TYPE_ARG }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2910,11 +2838,7 @@ impl AstNode for Attr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ATTR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2946,11 +2870,7 @@ impl AstNode for AwaitExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == AWAIT_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -2982,11 +2902,7 @@ impl AstNode for BecomeExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == BECOME_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3018,11 +2934,7 @@ impl AstNode for BinExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == BIN_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3054,11 +2966,7 @@ impl AstNode for BlockExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3090,11 +2998,7 @@ impl AstNode for BoxPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3126,11 +3030,7 @@ impl AstNode for BreakExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == BREAK_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3162,11 +3062,7 @@ impl AstNode for CallExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CALL_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3198,11 +3094,7 @@ impl AstNode for CastExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CAST_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3234,11 +3126,7 @@ impl AstNode for ClosureExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3270,11 +3158,7 @@ impl AstNode for Const {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CONST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3306,11 +3190,7 @@ impl AstNode for ConstArg {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_ARG }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3342,11 +3222,7 @@ impl AstNode for ConstBlockPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_BLOCK_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3378,11 +3254,7 @@ impl AstNode for ConstParam {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_PARAM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3414,11 +3286,7 @@ impl AstNode for ContinueExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CONTINUE_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3450,11 +3318,7 @@ impl AstNode for DynTraitType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == DYN_TRAIT_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3486,11 +3350,7 @@ impl AstNode for Enum {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ENUM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3522,11 +3382,7 @@ impl AstNode for ExprStmt {
     fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3558,11 +3414,7 @@ impl AstNode for ExternBlock {
     fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_BLOCK }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3594,11 +3446,7 @@ impl AstNode for ExternCrate {
     fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_CRATE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3630,11 +3478,7 @@ impl AstNode for ExternItemList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_ITEM_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3666,11 +3510,7 @@ impl AstNode for FieldExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FIELD_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3702,11 +3542,7 @@ impl AstNode for Fn {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FN }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3738,11 +3574,7 @@ impl AstNode for FnPtrType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FN_PTR_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3774,11 +3606,7 @@ impl AstNode for ForBinder {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_BINDER }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3810,11 +3638,7 @@ impl AstNode for ForExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3846,11 +3670,7 @@ impl AstNode for ForType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3882,11 +3702,7 @@ impl AstNode for FormatArgsArg {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FORMAT_ARGS_ARG }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3918,11 +3734,7 @@ impl AstNode for FormatArgsExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FORMAT_ARGS_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3954,11 +3766,7 @@ impl AstNode for GenericArgList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_ARG_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -3990,11 +3798,7 @@ impl AstNode for GenericParamList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4026,11 +3830,7 @@ impl AstNode for IdentPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4062,11 +3862,7 @@ impl AstNode for IfExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == IF_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4098,11 +3894,7 @@ impl AstNode for Impl {
     fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4134,11 +3926,7 @@ impl AstNode for ImplTraitType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL_TRAIT_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4170,11 +3958,7 @@ impl AstNode for IndexExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == INDEX_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4206,11 +3990,7 @@ impl AstNode for InferType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == INFER_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4242,11 +4022,7 @@ impl AstNode for ItemList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == ITEM_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4278,11 +4054,7 @@ impl AstNode for Label {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LABEL }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4314,11 +4086,7 @@ impl AstNode for LetElse {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LET_ELSE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4350,11 +4118,7 @@ impl AstNode for LetExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4386,11 +4150,7 @@ impl AstNode for LetStmt {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LET_STMT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4422,11 +4182,7 @@ impl AstNode for Lifetime {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4458,11 +4214,7 @@ impl AstNode for LifetimeArg {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_ARG }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4494,11 +4246,7 @@ impl AstNode for LifetimeParam {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_PARAM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4530,11 +4278,7 @@ impl AstNode for Literal {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4566,11 +4310,7 @@ impl AstNode for LiteralPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4602,11 +4342,7 @@ impl AstNode for LoopExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == LOOP_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4638,11 +4374,7 @@ impl AstNode for MacroCall {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_CALL }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4674,11 +4406,7 @@ impl AstNode for MacroDef {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_DEF }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4710,11 +4438,7 @@ impl AstNode for MacroExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4746,11 +4470,7 @@ impl AstNode for MacroItems {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_ITEMS }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4782,11 +4502,7 @@ impl AstNode for MacroPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4818,11 +4534,7 @@ impl AstNode for MacroRules {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_RULES }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4854,11 +4566,7 @@ impl AstNode for MacroStmts {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4890,11 +4598,7 @@ impl AstNode for MacroType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4926,11 +4630,7 @@ impl AstNode for MatchArm {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4962,11 +4662,7 @@ impl AstNode for MatchArmList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -4998,11 +4694,7 @@ impl AstNode for MatchExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5034,11 +4726,7 @@ impl AstNode for MatchGuard {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_GUARD }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5070,11 +4758,7 @@ impl AstNode for Meta {
     fn can_cast(kind: SyntaxKind) -> bool { kind == META }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5106,11 +4790,7 @@ impl AstNode for MethodCallExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == METHOD_CALL_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5142,11 +4822,7 @@ impl AstNode for Module {
     fn can_cast(kind: SyntaxKind) -> bool { kind == MODULE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5178,11 +4854,7 @@ impl AstNode for Name {
     fn can_cast(kind: SyntaxKind) -> bool { kind == NAME }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5214,11 +4886,7 @@ impl AstNode for NameRef {
     fn can_cast(kind: SyntaxKind) -> bool { kind == NAME_REF }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5250,11 +4918,7 @@ impl AstNode for NeverType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == NEVER_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5286,11 +4950,7 @@ impl AstNode for OffsetOfExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == OFFSET_OF_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5322,11 +4982,7 @@ impl AstNode for OrPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == OR_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5358,11 +5014,7 @@ impl AstNode for Param {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5394,11 +5046,7 @@ impl AstNode for ParamList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5430,11 +5078,7 @@ impl AstNode for ParenExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5466,11 +5110,7 @@ impl AstNode for ParenPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5502,11 +5142,7 @@ impl AstNode for ParenType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5538,11 +5174,7 @@ impl AstNode for ParenthesizedArgList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PARENTHESIZED_ARG_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5574,11 +5206,7 @@ impl AstNode for Path {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PATH }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5610,11 +5238,7 @@ impl AstNode for PathExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5646,11 +5270,7 @@ impl AstNode for PathPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5682,11 +5302,7 @@ impl AstNode for PathSegment {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_SEGMENT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5718,11 +5334,7 @@ impl AstNode for PathType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5754,11 +5366,7 @@ impl AstNode for PrefixExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PREFIX_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5790,11 +5398,7 @@ impl AstNode for PtrType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == PTR_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5826,11 +5430,7 @@ impl AstNode for RangeExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5862,11 +5462,7 @@ impl AstNode for RangePat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5898,11 +5494,7 @@ impl AstNode for RecordExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5934,11 +5526,7 @@ impl AstNode for RecordExprField {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -5970,11 +5558,7 @@ impl AstNode for RecordExprFieldList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6006,11 +5590,7 @@ impl AstNode for RecordField {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6042,11 +5622,7 @@ impl AstNode for RecordFieldList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6078,11 +5654,7 @@ impl AstNode for RecordPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6114,11 +5686,7 @@ impl AstNode for RecordPatField {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6150,11 +5718,7 @@ impl AstNode for RecordPatFieldList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6186,11 +5750,7 @@ impl AstNode for RefExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == REF_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6222,11 +5782,7 @@ impl AstNode for RefPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == REF_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6258,11 +5814,7 @@ impl AstNode for RefType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == REF_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6294,11 +5846,7 @@ impl AstNode for Rename {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RENAME }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6330,11 +5878,7 @@ impl AstNode for RestPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == REST_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6366,11 +5910,7 @@ impl AstNode for RetType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RET_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6402,11 +5942,7 @@ impl AstNode for ReturnExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6438,11 +5974,7 @@ impl AstNode for ReturnTypeSyntax {
     fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_TYPE_SYNTAX }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6474,11 +6006,7 @@ impl AstNode for SelfParam {
     fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6510,11 +6038,7 @@ impl AstNode for SlicePat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6546,11 +6070,7 @@ impl AstNode for SliceType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6582,11 +6102,7 @@ impl AstNode for SourceFile {
     fn can_cast(kind: SyntaxKind) -> bool { kind == SOURCE_FILE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6618,11 +6134,7 @@ impl AstNode for Static {
     fn can_cast(kind: SyntaxKind) -> bool { kind == STATIC }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6654,11 +6166,7 @@ impl AstNode for StmtList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6690,11 +6198,7 @@ impl AstNode for Struct {
     fn can_cast(kind: SyntaxKind) -> bool { kind == STRUCT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6726,11 +6230,7 @@ impl AstNode for TokenTree {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TOKEN_TREE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6762,11 +6262,7 @@ impl AstNode for Trait {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6798,11 +6294,7 @@ impl AstNode for TryExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6834,11 +6326,7 @@ impl AstNode for TupleExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6870,11 +6358,7 @@ impl AstNode for TupleField {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6906,11 +6390,7 @@ impl AstNode for TupleFieldList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6942,11 +6422,7 @@ impl AstNode for TuplePat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -6978,11 +6454,7 @@ impl AstNode for TupleStructPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_STRUCT_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7014,11 +6486,7 @@ impl AstNode for TupleType {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_TYPE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7050,11 +6518,7 @@ impl AstNode for TypeAlias {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7086,11 +6550,7 @@ impl AstNode for TypeAnchor {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ANCHOR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7122,11 +6582,7 @@ impl AstNode for TypeArg {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ARG }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7158,11 +6614,7 @@ impl AstNode for TypeBound {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7194,11 +6646,7 @@ impl AstNode for TypeBoundList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7230,11 +6678,7 @@ impl AstNode for TypeParam {
     fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_PARAM }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7266,11 +6710,7 @@ impl AstNode for UnderscoreExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == UNDERSCORE_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7302,11 +6742,7 @@ impl AstNode for Union {
     fn can_cast(kind: SyntaxKind) -> bool { kind == UNION }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7338,11 +6774,7 @@ impl AstNode for Use {
     fn can_cast(kind: SyntaxKind) -> bool { kind == USE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7374,11 +6806,7 @@ impl AstNode for UseBoundGenericArgs {
     fn can_cast(kind: SyntaxKind) -> bool { kind == USE_BOUND_GENERIC_ARGS }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7410,11 +6838,7 @@ impl AstNode for UseTree {
     fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7446,11 +6870,7 @@ impl AstNode for UseTreeList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7482,11 +6902,7 @@ impl AstNode for Variant {
     fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7518,11 +6934,7 @@ impl AstNode for VariantList {
     fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT_LIST }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7554,11 +6966,7 @@ impl AstNode for Visibility {
     fn can_cast(kind: SyntaxKind) -> bool { kind == VISIBILITY }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7590,11 +6998,7 @@ impl AstNode for WhereClause {
     fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7626,11 +7030,7 @@ impl AstNode for WherePred {
     fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_PRED }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7662,11 +7062,7 @@ impl AstNode for WhileExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == WHILE_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7698,11 +7094,7 @@ impl AstNode for WildcardPat {
     fn can_cast(kind: SyntaxKind) -> bool { kind == WILDCARD_PAT }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7734,11 +7126,7 @@ impl AstNode for YeetExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == YEET_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
@@ -7770,11 +7158,7 @@ impl AstNode for YieldExpr {
     fn can_cast(kind: SyntaxKind) -> bool { kind == YIELD_EXPR }
     #[inline]
     fn cast(syntax: SyntaxNode) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
index b2f56c0b1dbf2..3dca0db826301 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/tokens.rs
@@ -1,9 +1,9 @@
 //! Generated by `cargo xtask codegen grammar`, do not edit by hand.
 
 use crate::{
-    ast::AstToken,
     SyntaxKind::{self, *},
     SyntaxToken,
+    ast::AstToken,
 };
 use std::{fmt, hash};
 pub struct Byte {
@@ -17,11 +17,7 @@ impl std::fmt::Display for Byte {
 impl AstToken for Byte {
     fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -51,11 +47,7 @@ impl std::fmt::Display for ByteString {
 impl AstToken for ByteString {
     fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -85,11 +77,7 @@ impl std::fmt::Display for CString {
 impl AstToken for CString {
     fn can_cast(kind: SyntaxKind) -> bool { kind == C_STRING }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -119,11 +107,7 @@ impl std::fmt::Display for Char {
 impl AstToken for Char {
     fn can_cast(kind: SyntaxKind) -> bool { kind == CHAR }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -153,11 +137,7 @@ impl std::fmt::Display for Comment {
 impl AstToken for Comment {
     fn can_cast(kind: SyntaxKind) -> bool { kind == COMMENT }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -187,11 +167,7 @@ impl std::fmt::Display for FloatNumber {
 impl AstToken for FloatNumber {
     fn can_cast(kind: SyntaxKind) -> bool { kind == FLOAT_NUMBER }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -221,11 +197,7 @@ impl std::fmt::Display for Ident {
 impl AstToken for Ident {
     fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -255,11 +227,7 @@ impl std::fmt::Display for IntNumber {
 impl AstToken for IntNumber {
     fn can_cast(kind: SyntaxKind) -> bool { kind == INT_NUMBER }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -289,11 +257,7 @@ impl std::fmt::Display for String {
 impl AstToken for String {
     fn can_cast(kind: SyntaxKind) -> bool { kind == STRING }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
@@ -323,11 +287,7 @@ impl std::fmt::Display for Whitespace {
 impl AstToken for Whitespace {
     fn can_cast(kind: SyntaxKind) -> bool { kind == WHITESPACE }
     fn cast(syntax: SyntaxToken) -> Option {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
+        if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
     }
     fn syntax(&self) -> &SyntaxToken { &self.syntax }
 }
diff --git a/src/tools/rust-analyzer/rustfmt.toml b/src/tools/rust-analyzer/rustfmt.toml
index 20bf59547b86a..3ee7fdabdc217 100644
--- a/src/tools/rust-analyzer/rustfmt.toml
+++ b/src/tools/rust-analyzer/rustfmt.toml
@@ -1,2 +1,3 @@
+edition = "2024"
 reorder_modules = true
 use_small_heuristics = "Max"

From 617683f8d4d593141ada647debea854cb0d4d5be Mon Sep 17 00:00:00 2001
From: Steven Malis 
Date: Thu, 23 Oct 2025 19:18:18 -0400
Subject: [PATCH 61/76] Remove hir-ty/src/next_solver/mapping.rs

---
 src/tools/rust-analyzer/crates/hir-ty/src/db.rs |  4 ++--
 .../rust-analyzer/crates/hir-ty/src/display.rs  | 17 +++++++----------
 .../rust-analyzer/crates/hir-ty/src/infer.rs    | 12 ++++--------
 .../rust-analyzer/crates/hir-ty/src/lib.rs      | 12 +++---------
 .../crates/hir-ty/src/next_solver.rs            |  1 -
 .../crates/hir-ty/src/next_solver/interner.rs   |  3 +--
 .../crates/hir-ty/src/next_solver/mapping.rs    | 13 -------------
 .../crates/hir-ty/src/next_solver/ty.rs         | 11 ++++-------
 8 files changed, 21 insertions(+), 52 deletions(-)
 delete mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 2ef7963322995..9b58abbe4f925 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -277,7 +277,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
 
     // Interned IDs for solver integration
     #[salsa::interned]
-    fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
+    fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId;
 
     #[salsa::interned]
     fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
@@ -322,7 +322,7 @@ pub struct InternedConstParamId {
 #[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
 #[derive(PartialOrd, Ord)]
 pub struct InternedOpaqueTyId {
-    pub loc: ImplTraitId,
+    pub loc: ImplTraitId<'db>,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index e807ce62e8cfd..f8d9add42a8b1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -58,7 +58,6 @@ use crate::{
         TraitRef, Ty, TyKind, TypingMode,
         abi::Safety,
         infer::{DbInternerInferExt, traits::ObligationCause},
-        mapping::ChalkToNextSolver,
     },
     primitive,
     utils::{self, detect_variant_from_bytes},
@@ -1126,9 +1125,9 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                             let datas = db
                                 .return_type_impl_traits(func)
                                 .expect("impl trait id without data");
-                            let data = (*datas).as_ref().map_bound(|rpit| {
-                                &rpit.impl_traits[idx.to_nextsolver(interner)].predicates
-                            });
+                            let data = (*datas)
+                                .as_ref()
+                                .map_bound(|rpit| &rpit.impl_traits[idx].predicates);
                             let bounds =
                                 || data.iter_instantiated_copied(f.interner, ty.args.as_slice());
                             let mut len = bounds().count();
@@ -1358,9 +1357,8 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                     ImplTraitId::ReturnTypeImplTrait(func, idx) => {
                         let datas =
                             db.return_type_impl_traits(func).expect("impl trait id without data");
-                        let data = (*datas).as_ref().map_bound(|rpit| {
-                            &rpit.impl_traits[idx.to_nextsolver(interner)].predicates
-                        });
+                        let data =
+                            (*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
                         let bounds = data
                             .iter_instantiated_copied(interner, alias_ty.args.as_slice())
                             .collect::>();
@@ -1377,9 +1375,8 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                     ImplTraitId::TypeAliasImplTrait(alias, idx) => {
                         let datas =
                             db.type_alias_impl_traits(alias).expect("impl trait id without data");
-                        let data = (*datas).as_ref().map_bound(|rpit| {
-                            &rpit.impl_traits[idx.to_nextsolver(interner)].predicates
-                        });
+                        let data =
+                            (*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
                         let bounds = data
                             .iter_instantiated_copied(interner, alias_ty.args.as_slice())
                             .collect::>();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 9891f3f248bd8..21b6e053cc3b7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -73,7 +73,6 @@ use crate::{
         abi::Safety,
         fold::fold_tys,
         infer::traits::{Obligation, ObligationCause},
-        mapping::ChalkToNextSolver,
     },
     traits::FnTrait,
     utils::TargetFeatureIsSafeInTarget,
@@ -1228,9 +1227,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
                     if matches!(mode, ImplTraitReplacingMode::TypeAlias) {
                         // RPITs don't have `tait_coercion_table`, so use inserted inference
                         // vars for them.
-                        if let Some(ty) =
-                            self.result.type_of_rpit.get(idx.to_nextsolver(self.interner()))
-                        {
+                        if let Some(ty) = self.result.type_of_rpit.get(idx) {
                             return *ty;
                         }
                         return ty;
@@ -1251,10 +1248,9 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
             let Some(impl_traits) = impl_traits else {
                 return ty;
             };
-            let bounds = (*impl_traits).as_ref().map_bound(|its| {
-                its.impl_traits[idx.to_nextsolver(self.interner())].predicates.as_slice()
-            });
-            let var = match self.result.type_of_rpit.entry(idx.to_nextsolver(self.interner())) {
+            let bounds =
+                (*impl_traits).as_ref().map_bound(|its| its.impl_traits[idx].predicates.as_slice());
+            let var = match self.result.type_of_rpit.entry(idx) {
                 Entry::Occupied(entry) => return *entry.get(),
                 Entry::Vacant(entry) => *entry.insert(self.table.next_ty_var()),
             };
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 094a3e5326e95..ecca1ef04da5f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -57,7 +57,6 @@ use hir_def::{CallableDefId, TypeOrConstParamId, hir::ExprId, type_ref::Rawness}
 use hir_expand::name::Name;
 use indexmap::{IndexMap, map::Entry};
 use intern::{Symbol, sym};
-use la_arena::Idx;
 use mir::{MirEvalError, VTableMap};
 use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
 use rustc_type_ir::{
@@ -332,17 +331,12 @@ impl FnAbi {
 }
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub enum ImplTraitId {
-    ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx), // FIXME(next-solver): Should be crate::nextsolver::ImplTraitIdx.
-    TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
+pub enum ImplTraitId<'db> {
+    ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx<'db>),
+    TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx<'db>),
     AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
 }
 
-#[derive(PartialEq, Eq, Debug, Hash)]
-pub struct ImplTrait {}
-
-pub type ImplTraitIdx = Idx;
-
 /// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
 /// ensures there are no unbound variables or inference variables anywhere in
 /// the `t`.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs
index 1fb9a82ac9e0f..8c52a847d1e91 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver.rs
@@ -11,7 +11,6 @@ pub mod infer;
 pub(crate) mod inspect;
 pub mod interner;
 mod ir_print;
-pub mod mapping;
 pub mod normalize;
 pub mod obligation_ctxt;
 mod opaques;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
index a509fd893d3c7..06d35ba93d952 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
@@ -47,7 +47,6 @@ use super::{
     abi::Safety,
     fold::{BoundVarReplacer, BoundVarReplacerDelegate, FnMutDelegate},
     generics::{Generics, generics},
-    mapping::ChalkToNextSolver,
     region::{
         BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, PlaceholderRegion, Region,
     },
@@ -1883,7 +1882,7 @@ impl<'db> Interner for DbInterner<'db> {
                 match impl_trait_id {
                     crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
                         let infer = self.db().infer(func.into());
-                        EarlyBinder::bind(infer.type_of_rpit[idx.to_nextsolver(self)])
+                        EarlyBinder::bind(infer.type_of_rpit[idx])
                     }
                     crate::ImplTraitId::TypeAliasImplTrait(..)
                     | crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs
deleted file mode 100644
index 2b29561393ee7..0000000000000
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/mapping.rs
+++ /dev/null
@@ -1,13 +0,0 @@
-//! Things useful for mapping to/from Chalk and next-trait-solver types.
-
-use crate::next_solver::interner::DbInterner;
-
-pub(crate) trait ChalkToNextSolver<'db, Out> {
-    fn to_nextsolver(&self, interner: DbInterner<'db>) -> Out;
-}
-
-impl<'db> ChalkToNextSolver<'db, crate::lower::ImplTraitIdx<'db>> for crate::ImplTraitIdx {
-    fn to_nextsolver(&self, _interner: DbInterner<'db>) -> crate::lower::ImplTraitIdx<'db> {
-        crate::lower::ImplTraitIdx::from_raw(self.into_raw())
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
index 3abbd28657467..95ee00d2754ba 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
@@ -32,7 +32,6 @@ use crate::{
         CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
         abi::Safety,
         interner::InternedWrapperNoDebug,
-        mapping::ChalkToNextSolver,
         util::{CoroutineArgsExt, IntegerTypeExt},
     },
 };
@@ -533,18 +532,16 @@ impl<'db> Ty<'db> {
                 match db.lookup_intern_impl_trait_id(opaque_ty.def_id.expect_opaque_ty()) {
                     ImplTraitId::ReturnTypeImplTrait(func, idx) => {
                         db.return_type_impl_traits(func).map(|it| {
-                            let data = (*it).as_ref().map_bound(|rpit| {
-                                &rpit.impl_traits[idx.to_nextsolver(interner)].predicates
-                            });
+                            let data =
+                                (*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
                             data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
                                 .collect()
                         })
                     }
                     ImplTraitId::TypeAliasImplTrait(alias, idx) => {
                         db.type_alias_impl_traits(alias).map(|it| {
-                            let data = (*it).as_ref().map_bound(|rpit| {
-                                &rpit.impl_traits[idx.to_nextsolver(interner)].predicates
-                            });
+                            let data =
+                                (*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
                             data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
                                 .collect()
                         })

From c859e76f57711b689aef85faec746d665057b93e Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Thu, 23 Oct 2025 22:04:37 +0300
Subject: [PATCH 62/76] Represent async blocks as `TyKind::Coroutine`, not as
 opaques

---
 .../crates/hir-ty/src/display.rs              | 88 +++++++++--------
 .../rust-analyzer/crates/hir-ty/src/infer.rs  |  1 -
 .../crates/hir-ty/src/infer/expr.rs           | 33 ++++---
 .../rust-analyzer/crates/hir-ty/src/lib.rs    |  3 +-
 .../hir-ty/src/next_solver/generic_arg.rs     |  1 -
 .../crates/hir-ty/src/next_solver/generics.rs | 26 +----
 .../crates/hir-ty/src/next_solver/interner.rs |  3 +-
 .../crates/hir-ty/src/next_solver/ty.rs       | 37 +++----
 .../crates/hir-ty/src/next_solver/util.rs     | 99 +------------------
 9 files changed, 94 insertions(+), 197 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index f8d9add42a8b1..c749a3d24a259 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -48,7 +48,7 @@ use triomphe::Arc;
 
 use crate::{
     CallableDefId, FnAbi, ImplTraitId, MemoryMap, TraitEnvironment, consteval,
-    db::{HirDatabase, InternedClosure},
+    db::{HirDatabase, InternedClosure, InternedCoroutine},
     generics::generics,
     layout::Layout,
     mir::pad16,
@@ -1389,33 +1389,6 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                             SizedByDefault::Sized { anchor: krate },
                         )?;
                     }
-                    ImplTraitId::AsyncBlockTypeImplTrait(body, ..) => {
-                        let future_trait =
-                            LangItem::Future.resolve_trait(db, body.module(db).krate());
-                        let output = future_trait.and_then(|t| {
-                            t.trait_items(db)
-                                .associated_type_by_name(&Name::new_symbol_root(sym::Output))
-                        });
-                        write!(f, "impl ")?;
-                        if let Some(t) = future_trait {
-                            f.start_location_link(t.into());
-                        }
-                        write!(f, "Future")?;
-                        if future_trait.is_some() {
-                            f.end_location_link();
-                        }
-                        write!(f, "<")?;
-                        if let Some(t) = output {
-                            f.start_location_link(t.into());
-                        }
-                        write!(f, "Output")?;
-                        if output.is_some() {
-                            f.end_location_link();
-                        }
-                        write!(f, " = ")?;
-                        alias_ty.args.type_at(0).hir_fmt(f)?;
-                        write!(f, ">")?;
-                    }
                 }
             }
             TyKind::Closure(id, substs) => {
@@ -1567,23 +1540,56 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                 }
             }
             TyKind::Infer(..) => write!(f, "_")?,
-            TyKind::Coroutine(_, subst) => {
-                if f.display_kind.is_source_code() {
-                    return Err(HirDisplayError::DisplaySourceCodeError(
-                        DisplaySourceCodeError::Coroutine,
-                    ));
-                }
+            TyKind::Coroutine(coroutine_id, subst) => {
+                let InternedCoroutine(owner, expr_id) = coroutine_id.0.loc(db);
                 let CoroutineArgsParts { resume_ty, yield_ty, return_ty, .. } =
                     subst.split_coroutine_args();
-                write!(f, "|")?;
-                resume_ty.hir_fmt(f)?;
-                write!(f, "|")?;
+                let body = db.body(owner);
+                match &body[expr_id] {
+                    hir_def::hir::Expr::Async { .. } => {
+                        let future_trait =
+                            LangItem::Future.resolve_trait(db, owner.module(db).krate());
+                        let output = future_trait.and_then(|t| {
+                            t.trait_items(db)
+                                .associated_type_by_name(&Name::new_symbol_root(sym::Output))
+                        });
+                        write!(f, "impl ")?;
+                        if let Some(t) = future_trait {
+                            f.start_location_link(t.into());
+                        }
+                        write!(f, "Future")?;
+                        if future_trait.is_some() {
+                            f.end_location_link();
+                        }
+                        write!(f, "<")?;
+                        if let Some(t) = output {
+                            f.start_location_link(t.into());
+                        }
+                        write!(f, "Output")?;
+                        if output.is_some() {
+                            f.end_location_link();
+                        }
+                        write!(f, " = ")?;
+                        return_ty.hir_fmt(f)?;
+                        write!(f, ">")?;
+                    }
+                    _ => {
+                        if f.display_kind.is_source_code() {
+                            return Err(HirDisplayError::DisplaySourceCodeError(
+                                DisplaySourceCodeError::Coroutine,
+                            ));
+                        }
+                        write!(f, "|")?;
+                        resume_ty.hir_fmt(f)?;
+                        write!(f, "|")?;
 
-                write!(f, " yields ")?;
-                yield_ty.hir_fmt(f)?;
+                        write!(f, " yields ")?;
+                        yield_ty.hir_fmt(f)?;
 
-                write!(f, " -> ")?;
-                return_ty.hir_fmt(f)?;
+                        write!(f, " -> ")?;
+                        return_ty.hir_fmt(f)?;
+                    }
+                }
             }
             TyKind::CoroutineWitness(..) => write!(f, "{{coroutine witness}}")?,
             TyKind::Pat(_, _) => write!(f, "{{pat}}")?,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 21b6e053cc3b7..361e66522df65 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -1243,7 +1243,6 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
                     }
                     (self.db.type_alias_impl_traits(def), idx)
                 }
-                _ => unreachable!(),
             };
             let Some(impl_traits) = impl_traits else {
                 return ty;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index efb7244ff6375..fd4e374d9c899 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -18,7 +18,7 @@ use hir_expand::name::Name;
 use intern::sym;
 use rustc_ast_ir::Mutability;
 use rustc_type_ir::{
-    AliasTyKind, InferTy, Interner,
+    CoroutineArgs, CoroutineArgsParts, InferTy, Interner,
     inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _},
 };
 use syntax::ast::RangeOp;
@@ -29,6 +29,7 @@ use crate::{
     IncorrectGenericsLenKind, Rawness, TraitEnvironment,
     autoderef::overloaded_deref_ty,
     consteval,
+    db::InternedCoroutine,
     generics::generics,
     infer::{
         AllowTwoPhase, BreakableKind,
@@ -43,7 +44,7 @@ use crate::{
     },
     method_resolution::{self, VisibleFromModule},
     next_solver::{
-        AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, TraitRef, Ty, TyKind,
+        Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, TraitRef, Ty, TyKind,
         TypeError,
         infer::{
             InferOk,
@@ -1132,18 +1133,26 @@ impl<'db> InferenceContext<'_, 'db> {
         inner_ty: Ty<'db>,
         tgt_expr: ExprId,
     ) -> Ty<'db> {
-        // Use the first type parameter as the output type of future.
-        // existential type AsyncBlockImplTrait: Future
-        let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, tgt_expr);
-        let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
-        Ty::new_alias(
+        let coroutine_id = InternedCoroutine(self.owner, tgt_expr);
+        let coroutine_id = self.db.intern_coroutine(coroutine_id).into();
+        let parent_args = GenericArgs::identity_for_item(self.interner(), self.generic_def.into());
+        Ty::new_coroutine(
             self.interner(),
-            AliasTyKind::Opaque,
-            AliasTy::new(
+            coroutine_id,
+            CoroutineArgs::new(
                 self.interner(),
-                opaque_ty_id,
-                GenericArgs::new_from_iter(self.interner(), [inner_ty.into()]),
-            ),
+                CoroutineArgsParts {
+                    parent_args,
+                    kind_ty: self.types.unit,
+                    // rustc uses a special lang item type for the resume ty. I don't believe this can cause us problems.
+                    resume_ty: self.types.unit,
+                    yield_ty: self.types.unit,
+                    return_ty: inner_ty,
+                    // FIXME: Infer upvars.
+                    tupled_upvars_ty: self.types.unit,
+                },
+            )
+            .args,
         )
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index ecca1ef04da5f..2942c0f7a9d36 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -53,7 +53,7 @@ mod variance;
 
 use std::hash::Hash;
 
-use hir_def::{CallableDefId, TypeOrConstParamId, hir::ExprId, type_ref::Rawness};
+use hir_def::{CallableDefId, TypeOrConstParamId, type_ref::Rawness};
 use hir_expand::name::Name;
 use indexmap::{IndexMap, map::Entry};
 use intern::{Symbol, sym};
@@ -334,7 +334,6 @@ impl FnAbi {
 pub enum ImplTraitId<'db> {
     ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx<'db>),
     TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx<'db>),
-    AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
 }
 
 /// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs
index 24f22bcb0c3e2..90bd44aee86f0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generic_arg.rs
@@ -446,7 +446,6 @@ impl<'db> rustc_type_ir::inherent::GenericArgs> for GenericArgs<
                 signature_parts_ty,
                 tupled_upvars_ty,
                 coroutine_captures_by_ref_ty,
-                _coroutine_witness_ty,
             ] => rustc_type_ir::CoroutineClosureArgsParts {
                 parent_args: GenericArgs::new_from_iter(
                     DbInterner::conjure(),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs
index d5a9a6f527bb5..4d164a7e3bc5e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/generics.rs
@@ -2,10 +2,7 @@
 
 use hir_def::{
     ConstParamId, GenericDefId, GenericParamId, LifetimeParamId, TypeOrConstParamId, TypeParamId,
-    hir::generics::{
-        GenericParams, LocalTypeOrConstParamId, TypeOrConstParamData, TypeParamData,
-        TypeParamProvenance,
-    },
+    hir::generics::{GenericParams, TypeOrConstParamData},
 };
 
 use crate::{db::HirDatabase, generics::parent_generic_def};
@@ -67,27 +64,6 @@ pub(crate) fn generics(db: &dyn HirDatabase, def: SolverDefId) -> Generics {
                 crate::ImplTraitId::TypeAliasImplTrait(type_alias_id, _) => {
                     (Some(type_alias_id.into()), Vec::new())
                 }
-                crate::ImplTraitId::AsyncBlockTypeImplTrait(_def, _) => {
-                    let param = TypeOrConstParamData::TypeParamData(TypeParamData {
-                        name: None,
-                        default: None,
-                        provenance: TypeParamProvenance::TypeParamList,
-                    });
-                    // Yes, there is a parent but we don't include it in the generics
-                    // FIXME: It seems utterly sensitive to fake a generic param here.
-                    // Also, what a horrible mess!
-                    (
-                        None,
-                        vec![mk_ty(
-                            GenericDefId::FunctionId(salsa::plumbing::FromId::from_id(unsafe {
-                                salsa::Id::from_index(salsa::Id::MAX_U32 - 1)
-                            })),
-                            0,
-                            LocalTypeOrConstParamId::from_raw(la_arena::RawIdx::from_u32(0)),
-                            ¶m,
-                        )],
-                    )
-                }
             }
         }
         _ => panic!("No generics for {def:?}"),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
index 06d35ba93d952..e3c65689d3fd5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
@@ -1884,8 +1884,7 @@ impl<'db> Interner for DbInterner<'db> {
                         let infer = self.db().infer(func.into());
                         EarlyBinder::bind(infer.type_of_rpit[idx])
                     }
-                    crate::ImplTraitId::TypeAliasImplTrait(..)
-                    | crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
+                    crate::ImplTraitId::TypeAliasImplTrait(..) => {
                         // FIXME(next-solver)
                         EarlyBinder::bind(Ty::new_error(self, ErrorGuaranteed))
                     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
index 95ee00d2754ba..1443e2f0b3126 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
@@ -26,7 +26,7 @@ use rustc_type_ir::{
 
 use crate::{
     ImplTraitId,
-    db::HirDatabase,
+    db::{HirDatabase, InternedCoroutine},
     next_solver::{
         AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
         CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
@@ -546,23 +546,6 @@ impl<'db> Ty<'db> {
                                 .collect()
                         })
                     }
-                    ImplTraitId::AsyncBlockTypeImplTrait(def, _) => {
-                        let krate = def.module(db).krate();
-                        if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) {
-                            // This is only used by type walking.
-                            // Parameters will be walked outside, and projection predicate is not used.
-                            // So just provide the Future trait.
-                            let impl_bound = TraitRef::new(
-                                interner,
-                                future_trait.into(),
-                                GenericArgs::new_from_iter(interner, []),
-                            )
-                            .upcast(interner);
-                            Some(vec![impl_bound])
-                        } else {
-                            None
-                        }
-                    }
                 }
             }
             TyKind::Param(param) => {
@@ -592,6 +575,24 @@ impl<'db> Ty<'db> {
                     _ => None,
                 }
             }
+            TyKind::Coroutine(coroutine_id, _args) => {
+                let InternedCoroutine(owner, _) = coroutine_id.0.loc(db);
+                let krate = owner.module(db).krate();
+                if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) {
+                    // This is only used by type walking.
+                    // Parameters will be walked outside, and projection predicate is not used.
+                    // So just provide the Future trait.
+                    let impl_bound = TraitRef::new(
+                        interner,
+                        future_trait.into(),
+                        GenericArgs::new_from_iter(interner, []),
+                    )
+                    .upcast(interner);
+                    Some(vec![impl_bound])
+                } else {
+                    None
+                }
+            }
             _ => None,
         }
     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs
index bb0d0552c7109..d113f76a327d0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/util.rs
@@ -7,11 +7,10 @@ use std::{
 
 use base_db::Crate;
 use hir_def::{BlockId, HasModule, lang_item::LangItem};
-use intern::sym;
 use la_arena::Idx;
 use rustc_abi::{Float, HasDataLayout, Integer, IntegerType, Primitive, ReprOptions};
 use rustc_type_ir::{
-    ConstKind, CoroutineArgs, DebruijnIndex, FloatTy, GenericArgKind, INNERMOST, IntTy, Interner,
+    ConstKind, CoroutineArgs, DebruijnIndex, FloatTy, INNERMOST, IntTy, Interner,
     PredicatePolarity, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable,
     TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex,
     inherent::{
@@ -32,9 +31,8 @@ use crate::{
 };
 
 use super::{
-    AliasTerm, AliasTy, Binder, BoundRegion, BoundTy, BoundTyKind, BoundVarKind, BoundVarKinds,
-    Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder, GenericArgs, Predicate,
-    PredicateKind, ProjectionPredicate, Region, SolverDefId, Term, TraitPredicate, TraitRef, Ty,
+    Binder, BoundRegion, BoundTy, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder,
+    GenericArgs, Predicate, PredicateKind, Region, SolverDefId, TraitPredicate, TraitRef, Ty,
     TyKind,
     fold::{BoundVarReplacer, FnMutDelegate},
 };
@@ -578,98 +576,9 @@ pub fn explicit_item_bounds<'db>(
                     let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())];
                     EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone()))
                 }
-                crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
-                    if let Some((future_trait, future_output)) = LangItem::Future
-                        .resolve_trait(db, interner.krate.expect("Must have interner.krate"))
-                        .and_then(|trait_| {
-                            let alias = trait_.trait_items(db).associated_type_by_name(
-                                &hir_expand::name::Name::new_symbol_root(sym::Output.clone()),
-                            )?;
-                            Some((trait_, alias))
-                        })
-                    {
-                        let args = GenericArgs::identity_for_item(interner, def_id);
-                        let out = args.as_slice()[0];
-                        let mut predicates = vec![];
-
-                        let item_ty = Ty::new_alias(
-                            interner,
-                            rustc_type_ir::AliasTyKind::Opaque,
-                            AliasTy::new_from_args(interner, def_id, args),
-                        );
-
-                        let kind = PredicateKind::Clause(ClauseKind::Trait(TraitPredicate {
-                            polarity: rustc_type_ir::PredicatePolarity::Positive,
-                            trait_ref: TraitRef::new_from_args(
-                                interner,
-                                future_trait.into(),
-                                GenericArgs::new_from_iter(interner, [item_ty.into()]),
-                            ),
-                        }));
-                        predicates.push(Clause(Predicate::new(
-                            interner,
-                            Binder::bind_with_vars(
-                                kind,
-                                BoundVarKinds::new_from_iter(
-                                    interner,
-                                    [BoundVarKind::Ty(BoundTyKind::Anon)],
-                                ),
-                            ),
-                        )));
-                        let sized_trait = LangItem::Sized
-                            .resolve_trait(db, interner.krate.expect("Must have interner.krate"));
-                        if let Some(sized_trait_) = sized_trait {
-                            let kind = PredicateKind::Clause(ClauseKind::Trait(TraitPredicate {
-                                polarity: rustc_type_ir::PredicatePolarity::Positive,
-                                trait_ref: TraitRef::new_from_args(
-                                    interner,
-                                    sized_trait_.into(),
-                                    GenericArgs::new_from_iter(interner, [item_ty.into()]),
-                                ),
-                            }));
-                            predicates.push(Clause(Predicate::new(
-                                interner,
-                                Binder::bind_with_vars(
-                                    kind,
-                                    BoundVarKinds::new_from_iter(
-                                        interner,
-                                        [BoundVarKind::Ty(BoundTyKind::Anon)],
-                                    ),
-                                ),
-                            )));
-                        }
-                        let kind =
-                            PredicateKind::Clause(ClauseKind::Projection(ProjectionPredicate {
-                                projection_term: AliasTerm::new_from_args(
-                                    interner,
-                                    future_output.into(),
-                                    GenericArgs::new_from_iter(interner, [item_ty.into()]),
-                                ),
-                                term: match out.kind() {
-                                    GenericArgKind::Lifetime(_lt) => panic!(),
-                                    GenericArgKind::Type(ty) => Term::Ty(ty),
-                                    GenericArgKind::Const(const_) => Term::Const(const_),
-                                },
-                            }));
-                        predicates.push(Clause(Predicate::new(
-                            interner,
-                            Binder::bind_with_vars(
-                                kind,
-                                BoundVarKinds::new_from_iter(
-                                    interner,
-                                    [BoundVarKind::Ty(BoundTyKind::Anon)],
-                                ),
-                            ),
-                        )));
-                        EarlyBinder::bind(Clauses::new_from_iter(interner, predicates))
-                    } else {
-                        // If failed to find Symbol’s value as variable is void: Future::Output, return empty bounds as fallback.
-                        EarlyBinder::bind(Clauses::new_from_iter(interner, []))
-                    }
-                }
             }
         }
-        _ => panic!("Unexpected GeneridDefId"),
+        _ => panic!("Unexpected GenericDefId"),
     }
 }
 

From 3a47c64664d2ee97e9f5c6d76c87d4fe60cc1bd0 Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Fri, 24 Oct 2025 01:11:50 +0300
Subject: [PATCH 63/76] Lower async closures to `TyKind::CoroutineClosure`

Instead of `TyKind::Closure`.

Note: the same `InternedCoroutineId` is used both for the *async closure* as well as for the *async block it returns*. When used in `TyKind::CoroutineClosure`, it represents the closure. When used in `TyKind::Coroutine`, it represents the async block. The generic args are different, though.

Also noteworthy is that we distinguish between the different kinds of coroutines (general coroutines, async coroutines and eventually gen coroutines too) via the expression producing them (stored in the `InternedCoroutineId`). It might be worth it to introduce a `CoroutineKind` field to `InternedCoroutineId`, although this is not done in this PR.
---
 .../crates/hir-ty/src/display.rs              | 92 +++++++++++++++--
 .../crates/hir-ty/src/infer/closure.rs        | 79 ++++++++++++---
 .../crates/hir-ty/src/next_solver/interner.rs | 72 ++++++++++----
 .../crates/hir-ty/src/next_solver/ty.rs       | 42 +++++++-
 .../crates/hir-ty/src/tests/simple.rs         |  4 +-
 .../crates/hir-ty/src/tests/traits.rs         | 12 +--
 src/tools/rust-analyzer/crates/hir/src/lib.rs | 98 ++++++++++++-------
 .../src/handlers/convert_closure_to_fn.rs     |  1 -
 8 files changed, 314 insertions(+), 86 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index c749a3d24a259..dd1b212d4c294 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -38,7 +38,8 @@ use rustc_apfloat::{
 use rustc_ast_ir::FloatTy;
 use rustc_hash::FxHashSet;
 use rustc_type_ir::{
-    AliasTyKind, BoundVarIndexKind, CoroutineArgsParts, RegionKind, Upcast,
+    AliasTyKind, BoundVarIndexKind, CoroutineArgsParts, CoroutineClosureArgsParts, RegionKind,
+    Upcast,
     inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, Tys as _},
 };
 use smallvec::SmallVec;
@@ -1444,14 +1445,83 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                     }
                     if f.closure_style == ClosureStyle::RANotation || !sig.output().is_unit() {
                         write!(f, " -> ")?;
-                        // FIXME: We display `AsyncFn` as `-> impl Future`, but this is hard to fix because
-                        // we don't have a trait environment here, required to normalize `::Output`.
                         sig.output().hir_fmt(f)?;
                     }
                 } else {
                     write!(f, "{{closure}}")?;
                 }
             }
+            TyKind::CoroutineClosure(id, args) => {
+                let id = id.0;
+                if f.display_kind.is_source_code() {
+                    if !f.display_kind.allows_opaque() {
+                        return Err(HirDisplayError::DisplaySourceCodeError(
+                            DisplaySourceCodeError::OpaqueType,
+                        ));
+                    } else if f.closure_style != ClosureStyle::ImplFn {
+                        never!("Only `impl Fn` is valid for displaying closures in source code");
+                    }
+                }
+                match f.closure_style {
+                    ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
+                    ClosureStyle::ClosureWithId => {
+                        return write!(
+                            f,
+                            "{{async closure#{:?}}}",
+                            salsa::plumbing::AsId::as_id(&id).index()
+                        );
+                    }
+                    ClosureStyle::ClosureWithSubst => {
+                        write!(
+                            f,
+                            "{{async closure#{:?}}}",
+                            salsa::plumbing::AsId::as_id(&id).index()
+                        )?;
+                        return hir_fmt_generics(f, args.as_slice(), None, None);
+                    }
+                    _ => (),
+                }
+                let CoroutineClosureArgsParts { closure_kind_ty, signature_parts_ty, .. } =
+                    args.split_coroutine_closure_args();
+                let kind = closure_kind_ty.to_opt_closure_kind().unwrap();
+                let kind = match kind {
+                    rustc_type_ir::ClosureKind::Fn => "AsyncFn",
+                    rustc_type_ir::ClosureKind::FnMut => "AsyncFnMut",
+                    rustc_type_ir::ClosureKind::FnOnce => "AsyncFnOnce",
+                };
+                let TyKind::FnPtr(coroutine_sig, _) = signature_parts_ty.kind() else {
+                    unreachable!("invalid coroutine closure signature");
+                };
+                let coroutine_sig = coroutine_sig.skip_binder();
+                let coroutine_inputs = coroutine_sig.inputs();
+                let TyKind::Tuple(coroutine_inputs) = coroutine_inputs.as_slice()[1].kind() else {
+                    unreachable!("invalid coroutine closure signature");
+                };
+                let TyKind::Tuple(coroutine_output) = coroutine_sig.output().kind() else {
+                    unreachable!("invalid coroutine closure signature");
+                };
+                let coroutine_output = coroutine_output.as_slice()[1];
+                match f.closure_style {
+                    ClosureStyle::ImplFn => write!(f, "impl {kind}(")?,
+                    ClosureStyle::RANotation => write!(f, "async |")?,
+                    _ => unreachable!(),
+                }
+                if coroutine_inputs.is_empty() {
+                } else if f.should_truncate() {
+                    write!(f, "{TYPE_HINT_TRUNCATION}")?;
+                } else {
+                    f.write_joined(coroutine_inputs, ", ")?;
+                };
+                match f.closure_style {
+                    ClosureStyle::ImplFn => write!(f, ")")?,
+                    ClosureStyle::RANotation => write!(f, "|")?,
+                    _ => unreachable!(),
+                }
+                if f.closure_style == ClosureStyle::RANotation || !coroutine_output.is_unit() {
+                    write!(f, " -> ")?;
+                    coroutine_output.hir_fmt(f)?;
+                }
+            }
             TyKind::Placeholder(_) => write!(f, "{{placeholder}}")?,
             TyKind::Param(param) => {
                 // FIXME: We should not access `param.id`, it should be removed, and we should know the
@@ -1545,8 +1615,13 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                 let CoroutineArgsParts { resume_ty, yield_ty, return_ty, .. } =
                     subst.split_coroutine_args();
                 let body = db.body(owner);
-                match &body[expr_id] {
-                    hir_def::hir::Expr::Async { .. } => {
+                let expr = &body[expr_id];
+                match expr {
+                    hir_def::hir::Expr::Closure {
+                        closure_kind: hir_def::hir::ClosureKind::Async,
+                        ..
+                    }
+                    | hir_def::hir::Expr::Async { .. } => {
                         let future_trait =
                             LangItem::Future.resolve_trait(db, owner.module(db).krate());
                         let output = future_trait.and_then(|t| {
@@ -1573,7 +1648,10 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                         return_ty.hir_fmt(f)?;
                         write!(f, ">")?;
                     }
-                    _ => {
+                    hir_def::hir::Expr::Closure {
+                        closure_kind: hir_def::hir::ClosureKind::Coroutine(..),
+                        ..
+                    } => {
                         if f.display_kind.is_source_code() {
                             return Err(HirDisplayError::DisplaySourceCodeError(
                                 DisplaySourceCodeError::Coroutine,
@@ -1589,12 +1667,12 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
                         write!(f, " -> ")?;
                         return_ty.hir_fmt(f)?;
                     }
+                    _ => panic!("invalid expr for coroutine: {expr:?}"),
                 }
             }
             TyKind::CoroutineWitness(..) => write!(f, "{{coroutine witness}}")?,
             TyKind::Pat(_, _) => write!(f, "{{pat}}")?,
             TyKind::UnsafeBinder(_) => write!(f, "{{unsafe binder}}")?,
-            TyKind::CoroutineClosure(_, _) => write!(f, "{{coroutine closure}}")?,
             TyKind::Alias(_, _) => write!(f, "{{alias}}")?,
         }
         Ok(())
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index 3dc277023a325..06f8307eb0ab9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -11,8 +11,9 @@ use hir_def::{
     type_ref::TypeRefId,
 };
 use rustc_type_ir::{
-    ClosureArgs, ClosureArgsParts, CoroutineArgs, CoroutineArgsParts, Interner, TypeSuperVisitable,
-    TypeVisitable, TypeVisitableExt, TypeVisitor,
+    ClosureArgs, ClosureArgsParts, CoroutineArgs, CoroutineArgsParts, CoroutineClosureArgs,
+    CoroutineClosureArgsParts, Interner, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
+    TypeVisitor,
     inherent::{BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Ty as _},
 };
 use tracing::debug;
@@ -22,8 +23,9 @@ use crate::{
     db::{InternedClosure, InternedCoroutine},
     infer::{BreakableKind, Diverges, coerce::CoerceMany},
     next_solver::{
-        AliasTy, Binder, ClauseKind, DbInterner, ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig,
-        PolyProjectionPredicate, Predicate, PredicateKind, SolverDefId, Ty, TyKind,
+        AliasTy, Binder, BoundRegionKind, BoundVarKind, BoundVarKinds, ClauseKind, DbInterner,
+        ErrorGuaranteed, FnSig, GenericArgs, PolyFnSig, PolyProjectionPredicate, Predicate,
+        PredicateKind, SolverDefId, Ty, TyKind,
         abi::Safety,
         infer::{
             BoundRegionConversionTime, InferOk, InferResult,
@@ -72,6 +74,8 @@ impl<'db> InferenceContext<'_, 'db> {
         let sig_ty = Ty::new_fn_ptr(interner, bound_sig);
 
         let parent_args = GenericArgs::identity_for_item(interner, self.generic_def.into());
+        // FIXME: Make this an infer var and infer it later.
+        let tupled_upvars_ty = self.types.unit;
         let (id, ty, resume_yield_tys) = match closure_kind {
             ClosureKind::Coroutine(_) => {
                 let yield_ty = self.table.next_ty_var();
@@ -80,11 +84,11 @@ impl<'db> InferenceContext<'_, 'db> {
                 // FIXME: Infer the upvars later.
                 let parts = CoroutineArgsParts {
                     parent_args,
-                    kind_ty: Ty::new_unit(interner),
+                    kind_ty: self.types.unit,
                     resume_ty,
                     yield_ty,
                     return_ty: body_ret_ty,
-                    tupled_upvars_ty: Ty::new_unit(interner),
+                    tupled_upvars_ty,
                 };
 
                 let coroutine_id =
@@ -97,9 +101,7 @@ impl<'db> InferenceContext<'_, 'db> {
 
                 (None, coroutine_ty, Some((resume_ty, yield_ty)))
             }
-            // FIXME(next-solver): `ClosureKind::Async` should really be a separate arm that creates a `CoroutineClosure`.
-            // But for now we treat it as a closure.
-            ClosureKind::Closure | ClosureKind::Async => {
+            ClosureKind::Closure => {
                 let closure_id = self.db.intern_closure(InternedClosure(self.owner, tgt_expr));
                 match expected_kind {
                     Some(kind) => {
@@ -117,7 +119,7 @@ impl<'db> InferenceContext<'_, 'db> {
                     }
                     None => {}
                 };
-                // FIXME: Infer the kind and the upvars later when needed.
+                // FIXME: Infer the kind later if needed.
                 let parts = ClosureArgsParts {
                     parent_args,
                     closure_kind_ty: Ty::from_closure_kind(
@@ -125,7 +127,7 @@ impl<'db> InferenceContext<'_, 'db> {
                         expected_kind.unwrap_or(rustc_type_ir::ClosureKind::Fn),
                     ),
                     closure_sig_as_fn_ptr_ty: sig_ty,
-                    tupled_upvars_ty: Ty::new_unit(interner),
+                    tupled_upvars_ty,
                 };
                 let closure_ty = Ty::new_closure(
                     interner,
@@ -136,6 +138,61 @@ impl<'db> InferenceContext<'_, 'db> {
                 self.add_current_closure_dependency(closure_id);
                 (Some(closure_id), closure_ty, None)
             }
+            ClosureKind::Async => {
+                // async closures always return the type ascribed after the `->` (if present),
+                // and yield `()`.
+                let bound_return_ty = bound_sig.skip_binder().output();
+                let bound_yield_ty = self.types.unit;
+                // rustc uses a special lang item type for the resume ty. I don't believe this can cause us problems.
+                let resume_ty = self.types.unit;
+
+                // FIXME: Infer the kind later if needed.
+                let closure_kind_ty = Ty::from_closure_kind(
+                    interner,
+                    expected_kind.unwrap_or(rustc_type_ir::ClosureKind::Fn),
+                );
+
+                // FIXME: Infer captures later.
+                // `for<'env> fn() -> ()`, for no captures.
+                let coroutine_captures_by_ref_ty = Ty::new_fn_ptr(
+                    interner,
+                    Binder::bind_with_vars(
+                        interner.mk_fn_sig([], self.types.unit, false, Safety::Safe, FnAbi::Rust),
+                        BoundVarKinds::new_from_iter(
+                            interner,
+                            [BoundVarKind::Region(BoundRegionKind::ClosureEnv)],
+                        ),
+                    ),
+                );
+                let closure_args = CoroutineClosureArgs::new(
+                    interner,
+                    CoroutineClosureArgsParts {
+                        parent_args,
+                        closure_kind_ty,
+                        signature_parts_ty: Ty::new_fn_ptr(
+                            interner,
+                            bound_sig.map_bound(|sig| {
+                                interner.mk_fn_sig(
+                                    [
+                                        resume_ty,
+                                        Ty::new_tup_from_iter(interner, sig.inputs().iter()),
+                                    ],
+                                    Ty::new_tup(interner, &[bound_yield_ty, bound_return_ty]),
+                                    sig.c_variadic,
+                                    sig.safety,
+                                    sig.abi,
+                                )
+                            }),
+                        ),
+                        tupled_upvars_ty,
+                        coroutine_captures_by_ref_ty,
+                    },
+                );
+
+                let coroutine_id =
+                    self.db.intern_coroutine(InternedCoroutine(self.owner, tgt_expr)).into();
+                (None, Ty::new_coroutine_closure(interner, coroutine_id, closure_args.args), None)
+            }
         };
 
         // Now go through the argument patterns
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
index e3c65689d3fd5..081865a99e5ce 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/interner.rs
@@ -17,8 +17,8 @@ use rustc_abi::{ReprFlags, ReprOptions};
 use rustc_hash::FxHashSet;
 use rustc_index::bit_set::DenseBitSet;
 use rustc_type_ir::{
-    AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, DebruijnIndex, EarlyBinder,
-    FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef,
+    AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, CoroutineWitnessTypes, DebruijnIndex,
+    EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef,
     TypeVisitableExt, UniverseIndex, Upcast, Variance,
     elaborate::elaborate,
     error::TypeError,
@@ -29,7 +29,7 @@ use rustc_type_ir::{
 
 use crate::{
     FnAbi,
-    db::HirDatabase,
+    db::{HirDatabase, InternedCoroutine},
     method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint},
     next_solver::{
         AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
@@ -1205,12 +1205,28 @@ impl<'db> Interner for DbInterner<'db> {
         self.db().callable_item_signature(def_id.0)
     }
 
-    fn coroutine_movability(self, _def_id: Self::CoroutineId) -> rustc_ast_ir::Movability {
-        unimplemented!()
+    fn coroutine_movability(self, def_id: Self::CoroutineId) -> rustc_ast_ir::Movability {
+        // FIXME: Make this a query? I don't believe this can be accessed from bodies other than
+        // the current infer query, except with revealed opaques - is it rare enough to not matter?
+        let InternedCoroutine(owner, expr_id) = def_id.0.loc(self.db);
+        let body = self.db.body(owner);
+        let expr = &body[expr_id];
+        match *expr {
+            hir_def::hir::Expr::Closure { closure_kind, .. } => match closure_kind {
+                hir_def::hir::ClosureKind::Coroutine(movability) => match movability {
+                    hir_def::hir::Movability::Static => rustc_ast_ir::Movability::Static,
+                    hir_def::hir::Movability::Movable => rustc_ast_ir::Movability::Movable,
+                },
+                hir_def::hir::ClosureKind::Async => rustc_ast_ir::Movability::Static,
+                _ => panic!("unexpected expression for a coroutine: {expr:?}"),
+            },
+            hir_def::hir::Expr::Async { .. } => rustc_ast_ir::Movability::Static,
+            _ => panic!("unexpected expression for a coroutine: {expr:?}"),
+        }
     }
 
-    fn coroutine_for_closure(self, _def_id: Self::CoroutineId) -> Self::CoroutineId {
-        unimplemented!()
+    fn coroutine_for_closure(self, def_id: Self::CoroutineClosureId) -> Self::CoroutineId {
+        def_id
     }
 
     fn generics_require_sized_self(self, def_id: Self::DefId) -> bool {
@@ -1725,23 +1741,39 @@ impl<'db> Interner for DbInterner<'db> {
         panic!("Bug encountered in next-trait-solver: {}", msg.to_string())
     }
 
-    fn is_general_coroutine(self, _coroutine_def_id: Self::CoroutineId) -> bool {
-        // FIXME(next-solver)
-        true
+    fn is_general_coroutine(self, def_id: Self::CoroutineId) -> bool {
+        // FIXME: Make this a query? I don't believe this can be accessed from bodies other than
+        // the current infer query, except with revealed opaques - is it rare enough to not matter?
+        let InternedCoroutine(owner, expr_id) = def_id.0.loc(self.db);
+        let body = self.db.body(owner);
+        matches!(
+            body[expr_id],
+            hir_def::hir::Expr::Closure {
+                closure_kind: hir_def::hir::ClosureKind::Coroutine(_),
+                ..
+            }
+        )
     }
 
-    fn coroutine_is_async(self, _coroutine_def_id: Self::CoroutineId) -> bool {
-        // FIXME(next-solver)
-        true
+    fn coroutine_is_async(self, def_id: Self::CoroutineId) -> bool {
+        // FIXME: Make this a query? I don't believe this can be accessed from bodies other than
+        // the current infer query, except with revealed opaques - is it rare enough to not matter?
+        let InternedCoroutine(owner, expr_id) = def_id.0.loc(self.db);
+        let body = self.db.body(owner);
+        matches!(
+            body[expr_id],
+            hir_def::hir::Expr::Closure { closure_kind: hir_def::hir::ClosureKind::Async, .. }
+                | hir_def::hir::Expr::Async { .. }
+        )
     }
 
     fn coroutine_is_gen(self, _coroutine_def_id: Self::CoroutineId) -> bool {
-        // FIXME(next-solver)
+        // We don't handle gen coroutines yet.
         false
     }
 
     fn coroutine_is_async_gen(self, _coroutine_def_id: Self::CoroutineId) -> bool {
-        // FIXME(next-solver)
+        // We don't handle gen coroutines yet.
         false
     }
 
@@ -1897,10 +1929,12 @@ impl<'db> Interner for DbInterner<'db> {
     fn coroutine_hidden_types(
         self,
         _def_id: Self::CoroutineId,
-    ) -> EarlyBinder>>
-    {
-        // FIXME(next-solver)
-        unimplemented!()
+    ) -> EarlyBinder>> {
+        // FIXME: Actually implement this.
+        EarlyBinder::bind(Binder::dummy(CoroutineWitnessTypes {
+            types: Tys::default(),
+            assumptions: RegionAssumptions::default(),
+        }))
     }
 
     fn is_default_trait(self, def_id: Self::TraitId) -> bool {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
index 1443e2f0b3126..b8406fecda315 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/ty.rs
@@ -11,10 +11,10 @@ use hir_def::{TraitId, type_ref::Rawness};
 use rustc_abi::{Float, Integer, Size};
 use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult};
 use rustc_type_ir::{
-    AliasTyKind, BoundVar, BoundVarIndexKind, ClosureKind, DebruijnIndex, FlagComputation, Flags,
-    FloatTy, FloatVid, InferTy, IntTy, IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable,
-    TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, UintTy, Upcast,
-    WithCachedTypeInfo,
+    AliasTyKind, BoundVar, BoundVarIndexKind, ClosureKind, CoroutineArgs, CoroutineArgsParts,
+    DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy, IntVid, Interner,
+    TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
+    TypeVisitor, UintTy, Upcast, WithCachedTypeInfo,
     inherent::{
         AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _,
         IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _,
@@ -404,6 +404,40 @@ impl<'db> Ty<'db> {
                 .split_closure_args_untupled()
                 .closure_sig_as_fn_ptr_ty
                 .callable_sig(interner),
+            TyKind::CoroutineClosure(coroutine_id, args) => {
+                Some(args.as_coroutine_closure().coroutine_closure_sig().map_bound(|sig| {
+                    let unit_ty = Ty::new_unit(interner);
+                    let return_ty = Ty::new_coroutine(
+                        interner,
+                        coroutine_id,
+                        CoroutineArgs::new(
+                            interner,
+                            CoroutineArgsParts {
+                                parent_args: args.as_coroutine_closure().parent_args(),
+                                kind_ty: unit_ty,
+                                resume_ty: unit_ty,
+                                yield_ty: unit_ty,
+                                return_ty: sig.return_ty,
+                                // FIXME: Deduce this from the coroutine closure's upvars.
+                                tupled_upvars_ty: unit_ty,
+                            },
+                        )
+                        .args,
+                    );
+                    FnSig {
+                        inputs_and_output: Tys::new_from_iter(
+                            interner,
+                            sig.tupled_inputs_ty
+                                .tuple_fields()
+                                .iter()
+                                .chain(std::iter::once(return_ty)),
+                        ),
+                        c_variadic: sig.c_variadic,
+                        safety: sig.safety,
+                        abi: sig.abi,
+                    }
+                }))
+            }
             _ => None,
         }
     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 38af7cb7248f4..c2392b36babaf 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -3856,9 +3856,9 @@ fn main() {
             74..75 'f': F
             80..82 '{}': ()
             94..191 '{     ... }); }': ()
-            100..113 'async_closure': fn async_closure(impl FnOnce(i32))
+            100..113 'async_closure': fn async_closure(impl AsyncFnOnce(i32))
             100..147 'async_...    })': ()
-            114..146 'async ...     }': impl FnOnce(i32)
+            114..146 'async ...     }': impl AsyncFnOnce(i32)
             121..124 'arg': i32
             126..146 '{     ...     }': ()
             136..139 'arg': i32
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index 0cf723e8514d8..f72ca22fd2292 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -85,7 +85,6 @@ async fn test() {
 }
 
 #[test]
-#[ignore = "FIXME(next-solver): fix async closures"]
 fn infer_async_closure() {
     check_types(
         r#"
@@ -93,7 +92,7 @@ fn infer_async_closure() {
 async fn test() {
     let f = async move |x: i32| x + 42;
     f;
-//  ^ impl Fn(i32) -> impl Future
+//  ^ impl AsyncFn(i32) -> i32
     let a = f(4);
     a;
 //  ^ impl Future
@@ -102,7 +101,7 @@ async fn test() {
 //  ^ i32
     let f = async move || 42;
     f;
-//  ^ impl Fn() -> impl Future
+//  ^ impl AsyncFn() -> i32
     let a = f();
     a;
 //  ^ impl Future
@@ -119,7 +118,7 @@ async fn test() {
     };
     let _: Option = c().await;
     c;
-//  ^ impl Fn() -> impl Future>
+//  ^ impl AsyncFn() -> Option
 }
 "#,
     );
@@ -4930,7 +4929,6 @@ fn main() {
 
 #[test]
 fn async_fn_return_type() {
-    // FIXME(next-solver): Async closures are lowered as closures currently. We should fix that.
     check_infer(
         r#"
 //- minicore: async_fn
@@ -4948,9 +4946,9 @@ fn main() {
             46..53 'loop {}': !
             51..53 '{}': ()
             67..97 '{     ...()); }': ()
-            73..76 'foo': fn foo(impl Fn())
+            73..76 'foo': fn foo(impl AsyncFn())
             73..94 'foo(as...|| ())': ()
-            77..93 'async ... || ()': impl Fn()
+            77..93 'async ... || ()': impl AsyncFn()
             91..93 '()': ()
         "#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 9418903123179..2bb2f80ecc05b 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -75,7 +75,7 @@ use hir_ty::{
     TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, all_super_traits, autoderef,
     check_orphan_rules,
     consteval::try_const_usize,
-    db::InternedClosureId,
+    db::{InternedClosureId, InternedCoroutineId},
     diagnostics::BodyValidationDiagnostic,
     direct_super_traits, known_const_to_ast,
     layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
@@ -92,7 +92,7 @@ use itertools::Itertools;
 use rustc_hash::FxHashSet;
 use rustc_type_ir::{
     AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor,
-    inherent::{AdtDef, IntoKind, SliceLike, Term as _, Ty as _},
+    inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _},
 };
 use smallvec::SmallVec;
 use span::{AstIdNode, Edition, FileId};
@@ -4558,16 +4558,27 @@ impl<'db> TraitRef<'db> {
     }
 }
 
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+enum AnyClosureId {
+    ClosureId(InternedClosureId),
+    CoroutineClosureId(InternedCoroutineId),
+}
+
 #[derive(Clone, Debug, PartialEq, Eq, Hash)]
 pub struct Closure<'db> {
-    id: InternedClosureId,
+    id: AnyClosureId,
     subst: GenericArgs<'db>,
 }
 
 impl<'db> Closure<'db> {
     fn as_ty(&self, db: &'db dyn HirDatabase) -> Ty<'db> {
         let interner = DbInterner::new_with(db, None, None);
-        Ty::new_closure(interner, self.id.into(), self.subst)
+        match self.id {
+            AnyClosureId::ClosureId(id) => Ty::new_closure(interner, id.into(), self.subst),
+            AnyClosureId::CoroutineClosureId(id) => {
+                Ty::new_coroutine_closure(interner, id.into(), self.subst)
+            }
+        }
     }
 
     pub fn display_with_id(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> String {
@@ -4585,20 +4596,28 @@ impl<'db> Closure<'db> {
     }
 
     pub fn captured_items(&self, db: &'db dyn HirDatabase) -> Vec> {
-        let owner = db.lookup_intern_closure(self.id).0;
+        let AnyClosureId::ClosureId(id) = self.id else {
+            // FIXME: Infer coroutine closures' captures.
+            return Vec::new();
+        };
+        let owner = db.lookup_intern_closure(id).0;
         let infer = db.infer(owner);
-        let info = infer.closure_info(self.id);
+        let info = infer.closure_info(id);
         info.0
             .iter()
             .cloned()
-            .map(|capture| ClosureCapture { owner, closure: self.id, capture })
+            .map(|capture| ClosureCapture { owner, closure: id, capture })
             .collect()
     }
 
     pub fn capture_types(&self, db: &'db dyn HirDatabase) -> Vec> {
-        let owner = db.lookup_intern_closure(self.id).0;
+        let AnyClosureId::ClosureId(id) = self.id else {
+            // FIXME: Infer coroutine closures' captures.
+            return Vec::new();
+        };
+        let owner = db.lookup_intern_closure(id).0;
         let infer = db.infer(owner);
-        let (captures, _) = infer.closure_info(self.id);
+        let (captures, _) = infer.closure_info(id);
         let env = db.trait_environment_for_body(owner);
         captures
             .iter()
@@ -4607,10 +4626,22 @@ impl<'db> Closure<'db> {
     }
 
     pub fn fn_trait(&self, db: &dyn HirDatabase) -> FnTrait {
-        let owner = db.lookup_intern_closure(self.id).0;
-        let infer = db.infer(owner);
-        let info = infer.closure_info(self.id);
-        info.1
+        match self.id {
+            AnyClosureId::ClosureId(id) => {
+                let owner = db.lookup_intern_closure(id).0;
+                let infer = db.infer(owner);
+                let info = infer.closure_info(id);
+                info.1
+            }
+            AnyClosureId::CoroutineClosureId(_id) => {
+                // FIXME: Infer kind for coroutine closures.
+                match self.subst.as_coroutine_closure().kind() {
+                    rustc_type_ir::ClosureKind::Fn => FnTrait::AsyncFn,
+                    rustc_type_ir::ClosureKind::FnMut => FnTrait::AsyncFnMut,
+                    rustc_type_ir::ClosureKind::FnOnce => FnTrait::AsyncFnOnce,
+                }
+            }
+        }
     }
 }
 
@@ -5124,28 +5155,14 @@ impl<'db> Type<'db> {
         let interner = DbInterner::new_with(db, None, None);
         let callee = match self.ty.kind() {
             TyKind::Closure(id, subst) => Callee::Closure(id.0, subst),
+            TyKind::CoroutineClosure(id, subst) => Callee::CoroutineClosure(id.0, subst),
             TyKind::FnPtr(..) => Callee::FnPtr,
             TyKind::FnDef(id, _) => Callee::Def(id.0),
-            kind => {
-                // This will happen when it implements fn or fn mut, since we add an autoborrow adjustment
-                let (ty, kind) = if let TyKind::Ref(_, ty, _) = kind {
-                    (ty, ty.kind())
-                } else {
-                    (self.ty, kind)
-                };
-                if let TyKind::Closure(closure, subst) = kind {
-                    let sig = subst
-                        .split_closure_args_untupled()
-                        .closure_sig_as_fn_ptr_ty
-                        .callable_sig(interner)?;
-                    return Some(Callable {
-                        ty: self.clone(),
-                        sig,
-                        callee: Callee::Closure(closure.0, subst),
-                        is_bound_method: false,
-                    });
-                }
-                let (fn_trait, sig) = hir_ty::callable_sig_from_fn_trait(ty, self.env.clone(), db)?;
+            // This will happen when it implements fn or fn mut, since we add an autoborrow adjustment
+            TyKind::Ref(_, inner_ty, _) => return self.derived(inner_ty).as_callable(db),
+            _ => {
+                let (fn_trait, sig) =
+                    hir_ty::callable_sig_from_fn_trait(self.ty, self.env.clone(), db)?;
                 return Some(Callable {
                     ty: self.clone(),
                     sig,
@@ -5165,7 +5182,12 @@ impl<'db> Type<'db> {
 
     pub fn as_closure(&self) -> Option> {
         match self.ty.kind() {
-            TyKind::Closure(id, subst) => Some(Closure { id: id.0, subst }),
+            TyKind::Closure(id, subst) => {
+                Some(Closure { id: AnyClosureId::ClosureId(id.0), subst })
+            }
+            TyKind::CoroutineClosure(id, subst) => {
+                Some(Closure { id: AnyClosureId::CoroutineClosureId(id.0), subst })
+            }
             _ => None,
         }
     }
@@ -5824,6 +5846,7 @@ pub struct Callable<'db> {
 enum Callee<'db> {
     Def(CallableDefId),
     Closure(InternedClosureId, GenericArgs<'db>),
+    CoroutineClosure(InternedCoroutineId, GenericArgs<'db>),
     FnPtr,
     FnImpl(FnTrait),
 }
@@ -5845,7 +5868,12 @@ impl<'db> Callable<'db> {
             Callee::Def(CallableDefId::EnumVariantId(it)) => {
                 CallableKind::TupleEnumVariant(it.into())
             }
-            Callee::Closure(id, ref subst) => CallableKind::Closure(Closure { id, subst: *subst }),
+            Callee::Closure(id, subst) => {
+                CallableKind::Closure(Closure { id: AnyClosureId::ClosureId(id), subst })
+            }
+            Callee::CoroutineClosure(id, subst) => {
+                CallableKind::Closure(Closure { id: AnyClosureId::CoroutineClosureId(id), subst })
+            }
             Callee::FnPtr => CallableKind::FnPtr,
             Callee::FnImpl(fn_) => CallableKind::FnImpl(fn_),
         }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
index 2cda6d6f1c0a4..ca142332d97e7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
@@ -805,7 +805,6 @@ impl A {
         );
     }
 
-    #[ignore = "FIXME(next-solver): Fix async closures"]
     #[test]
     fn replaces_async_closure_with_async_fn() {
         check_assist(

From 76b7c79e326a2917fe3e04ffbaac0fde61d6b4dd Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Fri, 12 Sep 2025 20:55:14 +0800
Subject: [PATCH 64/76] Support else completion for more expressions

- Support else completion in ArrayExpr, ReturnExpr and PrefixExpr etc
- Support else completion after MatchArm expression

Before this PR, the else branch could not be completed in most expressions

Example
---
```rust
fn foo() -> [i32; 1] {
    [if true {
        2
    } $0]
}
```
->
```rust
fn foo() -> [i32; 1] {
    [if true {
        2
    } else {
        $0
    }]
}
```

---

```rust
fn foo() -> i32 {
    match () {
        () => if true {
            2
        } $0
    }
}
```
->
```rust
fn foo() -> i32 {
    match () {
        () => if true {
            2
        } else {
            $0
        }
    }
}
```
---
 .../ide-completion/src/completions/pattern.rs |   5 +
 .../crates/ide-completion/src/context.rs      |   1 +
 .../ide-completion/src/context/analysis.rs    |  19 +-
 .../ide-completion/src/tests/expression.rs    | 292 ++++++++++++++++++
 4 files changed, 312 insertions(+), 5 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
index 0ce81d02b4096..dcddc24890ac4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/pattern.rs
@@ -42,6 +42,11 @@ pub(crate) fn complete_pattern(
         }
     }
 
+    if pattern_ctx.after_if_expr {
+        add_keyword("else", "else {\n    $0\n}");
+        add_keyword("else if", "else if $1 {\n    $0\n}");
+    }
+
     if pattern_ctx.record_pat.is_some() {
         return;
     }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index 91f19f6b4370c..2f166b7184511 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -279,6 +279,7 @@ pub(crate) struct PatternContext {
     pub(crate) param_ctx: Option,
     pub(crate) has_type_ascription: bool,
     pub(crate) should_suggest_name: bool,
+    pub(crate) after_if_expr: bool,
     pub(crate) parent_pat: Option,
     pub(crate) ref_token: Option,
     pub(crate) mut_token: Option,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index d39bff1577f3c..42772ef2d04f9 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -999,10 +999,6 @@ fn classify_name_ref<'db>(
             }
         }
     };
-    let after_if_expr = |node: SyntaxNode| {
-        let prev_expr = prev_expr(node);
-        matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
-    };
     let after_incomplete_let = |node: SyntaxNode| {
         prev_expr(node).and_then(|it| it.syntax().parent()).and_then(ast::LetStmt::cast)
     };
@@ -1242,7 +1238,7 @@ fn classify_name_ref<'db>(
         let it = expr.syntax();
         let in_block_expr = is_in_block(it);
         let (in_loop_body, innermost_breakable) = is_in_breakable(it).unzip();
-        let after_if_expr = after_if_expr(it.clone());
+        let after_if_expr = is_after_if_expr(it.clone());
         let ref_expr_parent =
             path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
         let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev)
@@ -1763,6 +1759,7 @@ fn pattern_context_for(
         param_ctx,
         has_type_ascription,
         should_suggest_name,
+        after_if_expr: is_after_if_expr(pat.syntax().clone()),
         parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
         mut_token,
         ref_token,
@@ -1933,6 +1930,18 @@ fn has_in_newline_expr_first(node: &SyntaxNode) -> bool {
     }
 }
 
+fn is_after_if_expr(node: SyntaxNode) -> bool {
+    let node = match node.parent().and_then(Either::::cast) {
+        Some(stmt) => stmt.syntax().clone(),
+        None => node,
+    };
+    let prev_sibling =
+        non_trivia_sibling(node.into(), Direction::Prev).and_then(NodeOrToken::into_node);
+    iter::successors(prev_sibling, |it| it.last_child_or_token()?.into_node())
+        .find_map(ast::IfExpr::cast)
+        .is_some()
+}
+
 fn next_non_trivia_token(e: impl Into) -> Option {
     let mut token = match e.into() {
         SyntaxElement::Node(n) => n.last_token()?,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index 09af635f01ca1..67c84f42c1ae5 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -1869,6 +1869,298 @@ fn foo() { let x = if foo {} $0 else if true {} else {}; }
             sn ppd
         "#]],
     );
+    check(
+        r#"
+fn foo() { [if foo {} $0]}
+"#,
+        expect![[r#"
+            fn foo()  fn()
+            bt u32     u32
+            kw async
+            kw const
+            kw crate::
+            kw else
+            kw else if
+            kw enum
+            kw extern
+            kw false
+            kw fn
+            kw for
+            kw if
+            kw if let
+            kw impl
+            kw impl for
+            kw let
+            kw letm
+            kw loop
+            kw match
+            kw mod
+            kw return
+            kw self::
+            kw static
+            kw struct
+            kw trait
+            kw true
+            kw type
+            kw union
+            kw unsafe
+            kw use
+            kw while
+            kw while let
+            sn macro_rules
+            sn pd
+            sn ppd
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { [if foo {} el$0]}
+"#,
+        expect![[r#"
+            fn foo()  fn()
+            bt u32     u32
+            kw async
+            kw const
+            kw crate::
+            kw else
+            kw else if
+            kw enum
+            kw extern
+            kw false
+            kw fn
+            kw for
+            kw if
+            kw if let
+            kw impl
+            kw impl for
+            kw let
+            kw letm
+            kw loop
+            kw match
+            kw mod
+            kw return
+            kw self::
+            kw static
+            kw struct
+            kw trait
+            kw true
+            kw type
+            kw union
+            kw unsafe
+            kw use
+            kw while
+            kw while let
+            sn macro_rules
+            sn pd
+            sn ppd
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { 2 + if foo {} $0 }
+"#,
+        expect![[r#"
+            fn foo()  fn()
+            bt u32     u32
+            kw async
+            kw const
+            kw crate::
+            kw else
+            kw else if
+            kw enum
+            kw extern
+            kw false
+            kw fn
+            kw for
+            kw if
+            kw if let
+            kw impl
+            kw impl for
+            kw let
+            kw letm
+            kw loop
+            kw match
+            kw mod
+            kw return
+            kw self::
+            kw static
+            kw struct
+            kw trait
+            kw true
+            kw type
+            kw union
+            kw unsafe
+            kw use
+            kw while
+            kw while let
+            sn macro_rules
+            sn pd
+            sn ppd
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { -if foo {} $0 }
+"#,
+        expect![[r#"
+            fn foo()  fn()
+            bt u32     u32
+            kw async
+            kw const
+            kw crate::
+            kw else
+            kw else if
+            kw enum
+            kw extern
+            kw false
+            kw fn
+            kw for
+            kw if
+            kw if let
+            kw impl
+            kw impl for
+            kw let
+            kw letm
+            kw loop
+            kw match
+            kw mod
+            kw return
+            kw self::
+            kw static
+            kw struct
+            kw trait
+            kw true
+            kw type
+            kw union
+            kw unsafe
+            kw use
+            kw while
+            kw while let
+            sn macro_rules
+            sn pd
+            sn ppd
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { &mut if foo {} $0 }
+"#,
+        expect![[r#"
+            fn foo()  fn()
+            bt u32     u32
+            kw async
+            kw const
+            kw crate::
+            kw else
+            kw else if
+            kw enum
+            kw extern
+            kw false
+            kw fn
+            kw for
+            kw if
+            kw if let
+            kw impl
+            kw impl for
+            kw let
+            kw letm
+            kw loop
+            kw match
+            kw mod
+            kw return
+            kw self::
+            kw static
+            kw struct
+            kw trait
+            kw true
+            kw type
+            kw union
+            kw unsafe
+            kw use
+            kw while
+            kw while let
+            sn macro_rules
+            sn pd
+            sn ppd
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { return if foo {} $0 }
+"#,
+        expect![[r#"
+            fn foo()  fn()
+            bt u32     u32
+            kw async
+            kw const
+            kw crate::
+            kw else
+            kw else if
+            kw enum
+            kw extern
+            kw false
+            kw fn
+            kw for
+            kw if
+            kw if let
+            kw impl
+            kw impl for
+            kw let
+            kw letm
+            kw loop
+            kw match
+            kw mod
+            kw return
+            kw self::
+            kw static
+            kw struct
+            kw trait
+            kw true
+            kw type
+            kw union
+            kw unsafe
+            kw use
+            kw while
+            kw while let
+            sn macro_rules
+            sn pd
+            sn ppd
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { match () { () => if foo {} $0 } }
+"#,
+        expect![[r#"
+            kw else
+            kw else if
+            kw mut
+            kw ref
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { match () { () => if foo {} $0, } }
+"#,
+        expect![[r#"
+            kw else
+            kw else if
+            kw mut
+            kw ref
+        "#]],
+    );
+    check(
+        r#"
+fn foo() { match () { () => if foo {} $0, _ => (), } }
+"#,
+        expect![[r#"
+            kw else
+            kw else if
+            kw mut
+            kw ref
+        "#]],
+    );
+    // FIXME: support else completion after ast::RecordExprField
 }
 
 #[test]

From 1a8055d77789ad04c688b8800394d8995d83f3e2 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Fri, 24 Oct 2025 13:32:54 +0800
Subject: [PATCH 65/76] minor: fix track_caller for ide-complpetion test utils

---
 .../rust-analyzer/crates/ide-completion/src/tests.rs     | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index b32a895457268..83606d21f522b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -246,11 +246,10 @@ pub(crate) fn check_edit_with_config(
     let (db, position) = position(ra_fixture_before);
     let completions: Vec =
         hir::attach_db(&db, || crate::completions(&db, &config, position, None).unwrap());
-    let (completion,) = completions
-        .iter()
-        .filter(|it| it.lookup() == what)
-        .collect_tuple()
-        .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}"));
+    let Some((completion,)) = completions.iter().filter(|it| it.lookup() == what).collect_tuple()
+    else {
+        panic!("can't find {what:?} completion in {completions:#?}")
+    };
     let mut actual = db.file_text(position.file_id).text(&db).to_string();
 
     let mut combined_edit = completion.text_edit.clone();

From 6d2271696a6654458c38dd5130bd8cddcb170dde Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Fri, 24 Oct 2025 18:02:30 +0800
Subject: [PATCH 66/76] minor: Update ide-assist docs for add_braces

---
 .../ide-assists/src/handlers/add_braces.rs    | 16 +++++++++++++++-
 .../crates/ide-assists/src/tests/generated.rs | 19 +++++++++++++++++++
 2 files changed, 34 insertions(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
index f5bbe8dda8c5d..99ee50fa5848f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
@@ -9,7 +9,7 @@ use crate::{AssistContext, AssistId, Assists};
 
 // Assist: add_braces
 //
-// Adds braces to closure bodies and match arm expressions.
+// Adds braces to closure bodies, match arm expressions and assignment bodies.
 //
 // ```
 // fn foo(n: i32) -> i32 {
@@ -30,6 +30,20 @@ use crate::{AssistContext, AssistId, Assists};
 //     }
 // }
 // ```
+// ---
+// ```
+// fn foo(n: i32) -> i32 {
+//     let x =$0 n + 2;
+// }
+// ```
+// ->
+// ```
+// fn foo(n: i32) -> i32 {
+//     let x = {
+//         n + 2
+//     };
+// }
+// ```
 pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let (expr_type, expr) = get_replacement_node(ctx)?;
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index c7ae44124f238..e8582aa19f9fc 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -27,6 +27,25 @@ fn foo(n: i32) -> i32 {
     )
 }
 
+#[test]
+fn doctest_add_braces_1() {
+    check_doc_test(
+        "add_braces",
+        r#####"
+fn foo(n: i32) -> i32 {
+    let x =$0 n + 2;
+}
+"#####,
+        r#####"
+fn foo(n: i32) -> i32 {
+    let x = {
+        n + 2
+    };
+}
+"#####,
+    )
+}
+
 #[test]
 fn doctest_add_explicit_enum_discriminant() {
     check_doc_test(

From 52b62c8ec3ea4106b7cd926c75311e9c4f2fa43b Mon Sep 17 00:00:00 2001
From: Chayim Refael Friedman 
Date: Fri, 24 Oct 2025 14:08:01 +0300
Subject: [PATCH 67/76] Remove no-longer-necessary conversion

---
 src/tools/rust-analyzer/crates/hir-ty/src/lower.rs | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 6f7ca4829d52c..a181ae0157ccd 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -475,10 +475,9 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
                             .opaque_type_data
                             .alloc(ImplTrait { predicates: Vec::default() });
 
-                        // FIXME(next-solver): this from_raw/into_raw dance isn't nice, but it's minimal
                         let impl_trait_id = origin.either(
-                            |f| ImplTraitId::ReturnTypeImplTrait(f, Idx::from_raw(idx.into_raw())),
-                            |a| ImplTraitId::TypeAliasImplTrait(a, Idx::from_raw(idx.into_raw())),
+                            |f| ImplTraitId::ReturnTypeImplTrait(f, idx),
+                            |a| ImplTraitId::TypeAliasImplTrait(a, idx),
                         );
                         let opaque_ty_id: SolverDefId =
                             self.db.intern_impl_trait_id(impl_trait_id).into();

From 15557506eb90cb9ea3d0d8b0835d284fb4bbd9a5 Mon Sep 17 00:00:00 2001
From: Shoyu Vanilla 
Date: Fri, 24 Oct 2025 22:31:47 +0900
Subject: [PATCH 68/76] fix: Fix a bug on inhabitedness checks for arrays

---
 .../crates/hir-ty/src/inhabitedness.rs        |  2 +-
 .../src/handlers/non_exhaustive_let.rs        | 52 +++++++++++++++++++
 2 files changed, 53 insertions(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
index 8aed2608d6cdb..5e742bba3ebe3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
@@ -90,7 +90,7 @@ impl<'db> TypeVisitor> for UninhabitedFrom<'_, 'db> {
             TyKind::Tuple(..) => ty.super_visit_with(self),
             TyKind::Array(item_ty, len) => match try_const_usize(self.infcx.interner.db, len) {
                 Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
-                Some(1..) => item_ty.super_visit_with(self),
+                Some(1..) => item_ty.visit_with(self),
             },
             _ => CONTINUE_OPAQUELY_INHABITED,
         };
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
index e31367f3b14e7..c86ecd2f03b93 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
@@ -152,6 +152,58 @@ impl Deref for Foo {
 fn test(x: Foo<(i32, bool)>) {
     let (_a, _b): &(i32, bool) = &x;
 }
+"#,
+        );
+    }
+
+    #[test]
+    fn uninhabited_variants() {
+        check_diagnostics(
+            r#"
+//- minicore: result
+enum Infallible {}
+
+trait Foo {
+    type Bar;
+}
+
+struct Wrapper {
+    error: T,
+}
+
+struct FooWrapper {
+    error: T::Bar,
+}
+
+fn foo>(result: Result) -> T {
+    let Ok(ok) = result;
+    ok
+}
+
+fn bar>(result: Result) -> T {
+    let Ok(ok) = result;
+    ok
+}
+
+fn baz>(result: Result>) -> T {
+    let Ok(ok) = result;
+    ok
+}
+
+fn qux>(result: Result>) -> T {
+    let Ok(ok) = result;
+    ok
+}
+
+fn quux>(result: Result) -> T {
+    let Ok(ok) = result;
+    ok
+}
+
+fn corge>(result: Result) -> T {
+    let Ok(ok) = result;
+    ok
+}
 "#,
         );
     }

From 4e3fe6dad59576e8f11e7eb138f6b5396c18c450 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= 
Date: Fri, 24 Oct 2025 17:10:23 +0300
Subject: [PATCH 69/76] Fix typo

---
 .../rust-analyzer/crates/ide-completion/src/context/analysis.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index 42772ef2d04f9..b3d9ff0046102 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -1907,7 +1907,7 @@ fn is_in_block(node: &SyntaxNode) -> bool {
         .unwrap_or(false)
 }
 
-/// Similar to `has_parens`, heuristic sensing incomplete statement before ambigiguous `Expr`
+/// Similar to `has_parens`, heuristic sensing incomplete statement before ambiguous `Expr`
 ///
 /// Heuristic:
 ///

From 72217cb30453a7da073960b4257b379daf8875c5 Mon Sep 17 00:00:00 2001
From: Daniel Paoliello 
Date: Fri, 24 Oct 2025 09:09:37 -0700
Subject: [PATCH 70/76] Bump dependencies to remove indirect dependencies on
 windows-sys 0.52 and 0.59

---
 src/tools/rust-analyzer/Cargo.lock | 36 ++++++++----------------------
 1 file changed, 9 insertions(+), 27 deletions(-)

diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index b557b10e5c77f..535833d9e2a95 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -796,11 +796,11 @@ dependencies = [
 
 [[package]]
 name = "home"
-version = "0.5.11"
+version = "0.5.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf"
+checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
 dependencies = [
- "windows-sys 0.59.0",
+ "windows-sys 0.61.0",
 ]
 
 [[package]]
@@ -1385,14 +1385,14 @@ dependencies = [
 
 [[package]]
 name = "mio"
-version = "1.0.4"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
+checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873"
 dependencies = [
  "libc",
  "log",
  "wasi",
- "windows-sys 0.59.0",
+ "windows-sys 0.61.0",
 ]
 
 [[package]]
@@ -1448,11 +1448,11 @@ checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
 
 [[package]]
 name = "nu-ansi-term"
-version = "0.50.1"
+version = "0.50.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
+checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
 dependencies = [
- "windows-sys 0.52.0",
+ "windows-sys 0.60.2",
 ]
 
 [[package]]
@@ -2973,24 +2973,6 @@ dependencies = [
  "windows-link 0.1.3",
 ]
 
-[[package]]
-name = "windows-sys"
-version = "0.52.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
-dependencies = [
- "windows-targets 0.52.6",
-]
-
-[[package]]
-name = "windows-sys"
-version = "0.59.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
-dependencies = [
- "windows-targets 0.52.6",
-]
-
 [[package]]
 name = "windows-sys"
 version = "0.60.2"

From 2ab3580300cebc323c5ac96b17e6a4a24a0a7022 Mon Sep 17 00:00:00 2001
From: yukang 
Date: Sat, 25 Oct 2025 09:14:35 +0800
Subject: [PATCH 71/76] Use tracing error when received compiler message for
 unknown package

---
 .../crates/project-model/src/build_dependencies.rs              | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs
index 5eda5af3ace0b..3a682d5a4d834 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs
@@ -142,7 +142,7 @@ impl WorkspaceBuildScripts {
                 if let Some(&(package, workspace)) = by_id.get(package) {
                     cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
                 } else {
-                    never!("Received compiler message for unknown package: {}", package);
+                    tracing::error!("Received compiler message for unknown package: {}", package);
                 }
             },
             progress,

From e0fd5743f9293e74843d09446f15a9616bbfea3d Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Sat, 27 Sep 2025 16:33:05 +0800
Subject: [PATCH 72/76] Fix untyped syntax tree ans casts for
 convert_to_guarded_return

---
 .../src/handlers/convert_to_guarded_return.rs | 26 ++++++++++---------
 1 file changed, 14 insertions(+), 12 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index ae13f83fbc345..2a0ed4dc9e927 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -75,8 +75,8 @@ fn if_expr_to_guarded_return(
 
     let let_chains = flat_let_chain(cond);
 
-    let then_block = if_expr.then_branch()?;
-    let then_block = then_block.stmt_list()?;
+    let then_branch = if_expr.then_branch()?;
+    let then_block = then_branch.stmt_list()?;
 
     let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
 
@@ -84,17 +84,8 @@ fn if_expr_to_guarded_return(
         return None;
     }
 
-    // FIXME: This relies on untyped syntax tree and casts to much. It should be
-    // rewritten to use strongly-typed APIs.
-
     // check for early return and continue
-    let first_in_then_block = then_block.syntax().first_child()?;
-    if ast::ReturnExpr::can_cast(first_in_then_block.kind())
-        || ast::ContinueExpr::can_cast(first_in_then_block.kind())
-        || first_in_then_block
-            .children()
-            .any(|x| ast::ReturnExpr::can_cast(x.kind()) || ast::ContinueExpr::can_cast(x.kind()))
-    {
+    if is_early_block(&then_block) || is_never_block(&ctx.sema, &then_branch) {
         return None;
     }
 
@@ -284,6 +275,17 @@ fn clean_stmt_block(block: &ast::BlockExpr) -> ast::BlockExpr {
     }
 }
 
+fn is_early_block(then_block: &ast::StmtList) -> bool {
+    let is_early_expr =
+        |expr| matches!(expr, ast::Expr::ReturnExpr(_) | ast::Expr::ContinueExpr(_));
+    let into_expr = |stmt| match stmt {
+        ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(),
+        _ => None,
+    };
+    then_block.tail_expr().is_some_and(is_early_expr)
+        || then_block.statements().filter_map(into_expr).any(is_early_expr)
+}
+
 #[cfg(test)]
 mod tests {
     use crate::tests::{check_assist, check_assist_not_applicable};

From 5fda166acfc7c7c4916b42d9a6810dce0e5b905d Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Sat, 25 Oct 2025 15:24:37 +0800
Subject: [PATCH 73/76] Fix not complete `let` before expr in condition

Example
---
```rust
fn f() {
    if $0foo.bar() {}
}
```

**Before this PR**

"let" not in completion list

**After this PR**

```rust
fn f() {
    if let $1 = $0foo.bar() {}
}
```
---
 .../crates/ide-completion/src/completions/postfix.rs        | 2 ++
 .../crates/ide-completion/src/tests/expression.rs           | 6 ++++++
 2 files changed, 8 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
index 8c2bb961c5866..ab3f619fd7f57 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -460,6 +460,8 @@ pub(crate) fn is_in_condition(it: &ast::Expr) -> bool {
                 ast::MatchGuard(guard) => guard.condition()? == *it,
                 ast::BinExpr(bin_expr) => (bin_expr.op_token()?.kind() == T![&&])
                     .then(|| is_in_condition(&bin_expr.into()))?,
+                ast::Expr(expr) => (expr.syntax().text_range().start() == it.syntax().text_range().start())
+                    .then(|| is_in_condition(&expr))?,
                 _ => return None,
             } })
         })
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index 67c84f42c1ae5..4033aa5d9c5ef 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -3268,6 +3268,12 @@ fn foo() -> (i32, i32) {
 #[test]
 fn let_in_condition() {
     check_edit("let", r#"fn f() { if $0 {} }"#, r#"fn f() { if let $1 = $0 {} }"#);
+    check_edit("let", r#"fn f() { if $0x {} }"#, r#"fn f() { if let $1 = $0x {} }"#);
+    check_edit(
+        "let",
+        r#"fn f() { if $0foo.bar() {} }"#,
+        r#"fn f() { if let $1 = $0foo.bar() {} }"#,
+    );
 }
 
 #[test]

From 95e2b8444a700d8a6c88ddb2860f74c2f5176648 Mon Sep 17 00:00:00 2001
From: A4-Tacks 
Date: Sun, 26 Oct 2025 19:55:09 +0800
Subject: [PATCH 74/76] Fix not applicable on let-chain for
 replace_is_method_with_if_let_method

Example
---
```rust
fn main() {
    let x = Some(1);
    let cond = true;
    if cond && x.is_som$0e() {}
}
```

**Before this PR**

Assist not applicable

**After this PR**

```rust
fn main() {
    let x = Some(1);
    let cond = true;
    if cond && let Some(${0:x1}) = x {}
}
```
---
 .../replace_is_method_with_if_let_method.rs   | 63 +++++++++++++++++--
 .../crates/ide-assists/src/utils.rs           | 22 +++++++
 2 files changed, 81 insertions(+), 4 deletions(-)

diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
index f507cae1bb0de..c57fd4d439dc6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -1,7 +1,7 @@
 use ide_db::syntax_helpers::suggest_name;
 use syntax::ast::{self, AstNode, syntax_factory::SyntaxFactory};
 
-use crate::{AssistContext, AssistId, Assists};
+use crate::{AssistContext, AssistId, Assists, utils::cover_let_chain};
 
 // Assist: replace_is_some_with_if_let_some
 //
@@ -27,13 +27,11 @@ pub(crate) fn replace_is_method_with_if_let_method(
     let if_expr = ctx.find_node_at_offset::()?;
 
     let cond = if_expr.condition()?;
+    let cond = cover_let_chain(cond, ctx.selection_trimmed())?;
     let call_expr = match cond {
         ast::Expr::MethodCallExpr(call) => call,
         _ => return None,
     };
-    if ctx.offset() > if_expr.then_branch()?.stmt_list()?.l_curly_token()?.text_range().end() {
-        return None;
-    }
 
     let name_ref = call_expr.name_ref()?;
     match name_ref.text().as_str() {
@@ -195,6 +193,63 @@ fn main() {
         );
     }
 
+    #[test]
+    fn replace_is_some_with_if_let_some_in_let_chain() {
+        check_assist(
+            replace_is_method_with_if_let_method,
+            r#"
+fn main() {
+    let x = Some(1);
+    let cond = true;
+    if cond && x.is_som$0e() {}
+}
+"#,
+            r#"
+fn main() {
+    let x = Some(1);
+    let cond = true;
+    if cond && let Some(${0:x1}) = x {}
+}
+"#,
+        );
+
+        check_assist(
+            replace_is_method_with_if_let_method,
+            r#"
+fn main() {
+    let x = Some(1);
+    let cond = true;
+    if x.is_som$0e() && cond {}
+}
+"#,
+            r#"
+fn main() {
+    let x = Some(1);
+    let cond = true;
+    if let Some(${0:x1}) = x && cond {}
+}
+"#,
+        );
+
+        check_assist(
+            replace_is_method_with_if_let_method,
+            r#"
+fn main() {
+    let x = Some(1);
+    let cond = true;
+    if cond && x.is_som$0e() && cond {}
+}
+"#,
+            r#"
+fn main() {
+    let x = Some(1);
+    let cond = true;
+    if cond && let Some(${0:x1}) = x && cond {}
+}
+"#,
+        );
+    }
+
     #[test]
     fn replace_is_some_with_if_let_some_not_applicable_after_l_curly() {
         check_assist_not_applicable(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index 5a3c5a39dac79..e43516f6b9635 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -1133,6 +1133,28 @@ pub(crate) fn tt_from_syntax(node: SyntaxNode) -> Vec Option {
+    if !expr.syntax().text_range().contains_range(range) {
+        return None;
+    }
+    loop {
+        let (chain_expr, rest) = if let ast::Expr::BinExpr(bin_expr) = &expr
+            && bin_expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And))
+        {
+            (bin_expr.rhs(), bin_expr.lhs())
+        } else {
+            (Some(expr), None)
+        };
+
+        if let Some(chain_expr) = chain_expr
+            && chain_expr.syntax().text_range().contains_range(range)
+        {
+            break Some(chain_expr);
+        }
+        expr = rest?;
+    }
+}
+
 pub fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool {
     let mut is_const = true;
     preorder_expr(expr, &mut |ev| {

From ce94044ff9be11792342f47af98501a35755d884 Mon Sep 17 00:00:00 2001
From: Michael Gruenewald 
Date: Sun, 26 Oct 2025 13:22:52 +0100
Subject: [PATCH 75/76] Don't add cargo to requiresServerReloadOpts

---
 src/tools/rust-analyzer/editors/code/src/config.ts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts
index c0a1b3f02e36a..5dc2c419efa86 100644
--- a/src/tools/rust-analyzer/editors/code/src/config.ts
+++ b/src/tools/rust-analyzer/editors/code/src/config.ts
@@ -31,7 +31,7 @@ export class Config {
     workspaceState: vscode.Memento;
 
     private readonly rootSection = "rust-analyzer";
-    private readonly requiresServerReloadOpts = ["cargo", "server", "files", "showSyntaxTree"].map(
+    private readonly requiresServerReloadOpts = ["server", "files", "showSyntaxTree"].map(
         (opt) => `${this.rootSection}.${opt}`,
     );
 

From 85b7d646cd9b2deb6e65b3bd4aa045a71a6415b4 Mon Sep 17 00:00:00 2001
From: Shoyu Vanilla 
Date: Mon, 27 Oct 2025 02:18:52 +0900
Subject: [PATCH 76/76] Add regression tests for some fixed `A-ty` issues

---
 .../hir-ty/src/tests/regression/new_solver.rs | 52 +++++++++++++++
 .../src/handlers/mismatched_arg_count.rs      | 23 +++++++
 .../crates/ide/src/goto_definition.rs         | 64 +++++++++++++++++++
 3 files changed, 139 insertions(+)

diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs
index 5983ec7647900..f8b73cd50551b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression/new_solver.rs
@@ -472,3 +472,55 @@ where
 "#,
     );
 }
+
+#[test]
+fn regression_16282() {
+    check_infer(
+        r#"
+//- minicore: coerce_unsized, dispatch_from_dyn
+trait MapLookup {
+    type MapValue;
+}
+
+impl MapLookup for K {
+    type MapValue = K;
+}
+
+trait Map: MapLookup<::Key> {
+    type Key;
+}
+
+impl Map for K {
+    type Key = K;
+}
+
+
+fn main() {
+    let _ = &()
+        as &dyn Map;
+}
+"#,
+        expect![[r#"
+            210..272 '{     ...32>; }': ()
+            220..221 '_': &'? (dyn Map + '?)
+            224..227 '&()': &'? ()
+            224..269 '&()   ...e=u32>': &'? (dyn Map + 'static)
+            225..227 '()': ()
+        "#]],
+    );
+}
+
+#[test]
+fn regression_18692() {
+    check_no_mismatches(
+        r#"
+//- minicore: coerce_unsized, dispatch_from_dyn, send
+trait Trait: Send {}
+
+fn f(_: *const (dyn Trait + Send)) {}
+fn g(it: *const (dyn Trait)) {
+    f(it);
+}
+"#,
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index 25c1e633ba3b7..4ed71f0d3fb82 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -485,6 +485,29 @@ fn foo((): (), (): ()) {
     foo(1);
       // ^ error: expected 2 arguments, found 1
 }
+"#,
+        );
+    }
+
+    #[test]
+    fn regression_17233() {
+        check_diagnostics(
+            r#"
+pub trait A {
+    type X: B;
+}
+pub trait B: A {
+    fn confused_name(self, _: i32);
+}
+
+pub struct Foo;
+impl Foo {
+    pub fn confused_name(&self) {}
+}
+
+pub fn repro() {
+    Foo.confused_name();
+}
 "#,
         );
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index e335989ab2b07..0ee9795af5802 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -4015,4 +4015,68 @@ fn bar() {
         "##,
         );
     }
+
+    #[test]
+    fn regression_20038() {
+        check(
+            r#"
+//- minicore: clone, fn
+struct Map(Fut, F);
+
+struct InspectFn(F);
+
+trait FnOnce1 {
+    type Output;
+}
+
+trait Future1 {
+    type Output;
+}
+
+trait FusedFuture1: Future1 {
+    fn is_terminated(&self) -> bool;
+     //^^^^^^^^^^^^^
+}
+
+impl FnOnce1 for T
+where
+    T: FnOnce(A) -> R,
+{
+    type Output = R;
+}
+
+impl FnOnce1 for InspectFn
+where
+    F: for<'a> FnOnce1<&'a A, Output = ()>,
+{
+    type Output = A;
+}
+
+impl Future1 for Map
+where
+    Fut: Future1,
+    F: FnOnce1,
+{
+    type Output = T;
+}
+
+impl FusedFuture1 for Map
+where
+    Fut: Future1,
+    F: FnOnce1,
+{
+    fn is_terminated(&self) -> bool {
+        false
+    }
+}
+
+fn overflows(inner: &Map>)
+where
+    Map>: FusedFuture1
+{
+    let _x = inner.is_terminated$0();
+}
+"#,
+        )
+    }
 }